prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>guest1_test.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
u"""Test auth.guest
:copyright: Copyright (c) 2019 RadiaSoft LLC. All Rights Reserved.
:license: http://www.apache.org/licenses/LICENSE-2.0.html
"""
from __future__ import absolute_import, division, print_function
import pytest
def test_happy_path(auth_fc):
fc = auth_fc
from pykern import pkconfig, pkunit, pkio
from pykern.pkunit import pkok, pkre, pkeq
from pykern.pkdebug import pkdp
import re
fc.sr_get('authGuestLogin', {'simulation_type': fc.sr_sim_type})
fc.sr_post('listSimulations', {'simulationType': fc.sr_sim_type})
fc.sr_auth_state(
avatarUrl=None,
displayName='Guest User',
guestIsOnlyMethod=False,
isGuestUser=True,
isLoggedIn=True,
isLoginExpired=False,
method='guest',
needCompleteRegistration=False,
userName=None,
visibleMethods=['email'],
)
def test_timeout(auth_fc):
fc = auth_fc
from pykern import pkconfig, pkunit, pkio
from pykern import pkjson
from pykern.pkdebug import pkdp
from pykern.pkunit import pkok, pkre, pkeq, pkexcept
import re
r = fc.sr_get('authGuestLogin', {'simulation_type': fc.sr_sim_type}, redirect=False)
pkeq(200, r.status_code)
d = pkjson.load_any(r.data)<|fim▁hole|> fc.sr_post('listSimulations', {'simulationType': fc.sr_sim_type})
fc.sr_auth_state(
isGuestUser=True,
isLoggedIn=True,
isLoginExpired=False,
)
fc.sr_get_json('adjustTime', params={'days': '2'})
fc.sr_auth_state(
isGuestUser=True,
isLoggedIn=True,
isLoginExpired=True,
)
with pkexcept('SRException.*guest-expired'):
fc.sr_post('listSimulations', {'simulationType': fc.sr_sim_type})<|fim▁end|>
|
pkeq(True, d.authState.isLoggedIn)
|
<|file_name|>persistenthashmap.test.ts<|end_file_name|><|fim▁begin|>/// <reference path="../../d.ts/DefinitelyTyped/jasmine/jasmine.d.ts"/>
import basilisk = require('../basilisk');
import Hash = basilisk.HashMap;
var freeze = (obj:any):any => { return (Object.freeze) ? Object.freeze(obj) : obj; };
// given a map of strings -> numbers, return a
function fixedStringHash(values:any):(key:string) => number {
var safe = {};
for (var key in values) {
if (values.hasOwnProperty(key)) {
if (typeof values[key] !== 'number') {
throw "Must only provide numbers as hashcodes";
}
safe[key] = values[key];
}
}
safe = freeze(safe);
return function (key:string):number {
if (!safe.hasOwnProperty(key)) {
throw "Must only check for keys which are in the provided set.";
}
return safe[key];
}
}
describe('PersistentHashMap', function () {
describe('.from', function () {
it("Should require a function as first parameter.", function () {
var fn = fixedStringHash({ 'a': 0, 'b': 1, 'c': 2 }),
map = Hash.from(fn);
expect(() => { Hash.from(null) }).toThrow();
});
});
// These tests check that the *internal* behaviour is correct: the external contract is tested above.
describe('Internal behaviour', function () {
it("Should still be possible to retrieve correct values, even if all keys map to the same value.", function () {
var map = Hash.from<string, string>((k:any) => { return 0; }),
map = map.set('a', 'a').set('b', 'b').set('c', 'c');
expect(map.get('a')).toBe('a');
expect(map.get('b')).toBe('b');
expect(map.get('c')).toBe('c');
});<|fim▁hole|> // 97 is one deep, and the top bit is 1. This should thus generate a nested tree, with an interior node
// in the middle and a collision node further down.
var map = Hash.from<string, string>(fixedStringHash({'a': 1, 'b': 97, 'c': 97 }));
map = map.set('a', 'a');
expect(map['root'] instanceof basilisk.hamt.Leaf).toBe(true);
map = map.set('b', 'b');
expect(map['root'] instanceof basilisk.hamt.Interior).toBe(true);
expect(map['root']['contents'][1] instanceof basilisk.hamt.Interior).toBe(true);
var nested = map['root']['contents'][1];
expect(nested['contents'][0] instanceof basilisk.hamt.Leaf).toBe(true);
expect(nested['contents'][3] instanceof basilisk.hamt.Leaf).toBe(true);
map = map.set('c', 'c');
expect(map['root'] instanceof basilisk.hamt.Interior).toBe(true);
expect(map['root']['contents'][1] instanceof basilisk.hamt.Interior).toBe(true);
nested = map['root']['contents'][1];
expect(nested['contents'][0] instanceof basilisk.hamt.Leaf).toBe(true);
expect(nested['contents'][3] instanceof basilisk.hamt.Collision).toBe(true);
expect(map.get('c')).toBe('c');
expect(map.get('b')).toBe('b');
expect(map.get('a')).toBe('a');
// now unset the items.
map = map.remove('c');
expect(map['root'] instanceof basilisk.hamt.Interior).toBe(true);
expect(map['root']['contents'][1] instanceof basilisk.hamt.Interior).toBe(true);
nested = map['root']['contents'][1];
expect(nested['contents'][0] instanceof basilisk.hamt.Leaf).toBe(true);
expect(nested['contents'][3] instanceof basilisk.hamt.Leaf).toBe(true);
map = map.remove('b');
expect(map['root'] instanceof basilisk.hamt.Leaf).toBe(true);
});
var count:number = 5000;
it("Adding " + count + " elements should not be prohibitive (+- 0.5s).", function () {
var map = Hash.from<number, number>((key:number):number => { return (key >= 0) ? key : -1 * key; });
for (var i=0; i < count; i++) {
map = map.set(i, i);
}
var final = map;
});
});
describe('.size', function () {
var count:number = 76;
it("Should work for different keys.", function () {
var map = Hash.from<number, number>((key:number):number => { return Math.abs(key); }),
correct = true;
for (var i=0; i < count; i++) {
map = map.set(i, i);
correct = correct && (map.size == i + 1);
}
expect(correct).toBe(true);
});
it("Should work for collisions.", function () {
var map = Hash.from<number, number>((key:number):number => { return 0; }),
correct = true;
for (var i=0; i < count; i++) {
map = map.set(i, i);
correct = correct && (map.size == i + 1)
}
expect(correct).toBe(true);
});
});
describe('.keys', function () {
var count:number = 76;
it("Should work for different keys.", function () {
var map = Hash.from<number, number>((key:number):number => { return Math.abs(key); }),
correct = true,
seen = {};
for (var i=0; i < count; i++) {
map = map.set(i * 2, i * 4);
seen['s' + (i * 2)] = false;
}
map.keys().forEach(function (key:number) {
seen['s' + (key)] = true;
});
for (var k in seen) {
if (seen.hasOwnProperty(k)) {
correct = correct && seen[k];
}
}
expect(correct).toBe(true);
});
it("Should work for collisions.", function () {
var map = Hash.from<number, number>((key:number):number => { return 0; }),
correct = true,
seen = {};
for (var i=0; i < count; i++) {
map = map.set(i * 2, i * 4);
seen['s' + (i * 2)] = false;
}
map.keys().forEach(function (key:number) {
seen['s' + (key)] = true;
});
for (var k in seen) {
if (seen.hasOwnProperty(k)) {
correct = correct && seen[k];
}
}
});
});
describe('.values', function () {
var count:number = 76;
it("Should work for different keys.", function () {
var map = Hash.from<number, number>((key:number):number => { return Math.abs(key); }),
correct = true,
seen = {};
for (var i=0; i < count; i++) {
map = map.set(i * 2, i * 4);
seen['s' + (i * 4)] = false;
}
map.values().forEach(function (key:number) {
seen['s' + (key)] = true;
});
for (var k in seen) {
if (seen.hasOwnProperty(k)) {
correct = correct && seen[k];
}
}
});
it("Should work for collisions.", function () {
var map = Hash.from<number, number>((key:number):number => { return 0; }),
correct = true,
seen = {};
for (var i=0; i < count; i++) {
map = map.set(i * 2, i * 4);
seen['s' + (i * 4)] = false;
}
map.values().forEach(function (key:number) {
seen['s' + (key)] = true;
});
for (var k in seen) {
if (seen.hasOwnProperty(k)) {
correct = correct && seen[k];
}
}
});
});
});<|fim▁end|>
|
it("Collisions one deep should function correctly.", function () {
|
<|file_name|>app.py<|end_file_name|><|fim▁begin|>"""
Nonlinear cartoon+texture decomposition ipol demo web app
"""
from lib import base_app, build, http, image
from lib.misc import ctime
from lib.misc import prod
from lib.base_app import init_app
import shutil
import cherrypy
from cherrypy import TimeoutError
import os.path
import time
from math import ceil
class app(base_app):
""" nonlinear cartoon+texture decomposition """
title = "Cartoon+Texture Image Decomposition"
xlink_article = 'http://www.ipol.im/pub/art/2011/blmv_ct/'
input_nb = 1
input_max_pixels = 700 * 700 # max size (in pixels) of an input image
input_max_weight = 10 * 1024 * 1024 # max size (in bytes) of an input file
input_dtype = '3x8i' # input image expected data type
input_ext = '.png' # input image expected extension (ie file format)
is_test = False
def __init__(self):
"""
app setup
"""
# setup the parent class
base_dir = os.path.dirname(os.path.abspath(__file__))
base_app.__init__(self, base_dir)
# select the base_app steps to expose
# index() and input_xxx() are generic
base_app.index.im_func.exposed = True
base_app.input_select.im_func.exposed = True
base_app.input_upload.im_func.exposed = True
# params() is modified from the template
base_app.params.im_func.exposed = True
# result() is modified from the template
base_app.result.im_func.exposed = True
def build(self):<|fim▁hole|> # store common file path in variables
tgz_url = "http://www.ipol.im/pub/art/2011/blmv_ct/srcB.tar.gz"
tgz_file = self.dl_dir + "srcB.tar.gz"
progs = ["cartoonIpol"]
src_bin = dict([(self.src_dir + os.path.join("srcB", prog),
self.bin_dir + prog)
for prog in progs])
log_file = self.base_dir + "build.log"
# get the latest source archive
build.download(tgz_url, tgz_file)
# test if any dest file is missing, or too old
if all([(os.path.isfile(bin_file)
and ctime(tgz_file) < ctime(bin_file))
for bin_file in src_bin.values()]):
cherrypy.log("not rebuild needed",
context='BUILD', traceback=False)
else:
# extract the archive
build.extract(tgz_file, self.src_dir)
# build the programs
build.run("make -j4 -C %s %s"
% (self.src_dir + "srcB", " ".join(progs)),
stdout=log_file)
# save into bin dir
if os.path.isdir(self.bin_dir):
shutil.rmtree(self.bin_dir)
os.mkdir(self.bin_dir)
for (src, dst) in src_bin.items():
shutil.copy(src, dst)
# cleanup the source dir
shutil.rmtree(self.src_dir)
return
#
# PARAMETER HANDLING
#
def select_subimage(self, x0, y0, x1, y1):
"""
cut subimage from original image
"""
# draw selected rectangle on the image
imgS = image(self.work_dir + 'input_0.png')
imgS.draw_line([(x0, y0), (x1, y0), (x1, y1), (x0, y1), (x0, y0)],
color="red")
imgS.draw_line([(x0+1, y0+1), (x1-1, y0+1), (x1-1, y1-1), (x0+1, y1-1),
(x0+1, y0+1)], color="white")
imgS.save(self.work_dir + 'input_0s.png')
# crop the image
# try cropping from the original input image (if different from input_0)
im0 = image(self.work_dir + 'input_0.orig.png')
dx0 = im0.size[0]
img = image(self.work_dir + 'input_0.png')
dx = img.size[0]
if (dx != dx0) :
z = float(dx0)/float(dx)
im0.crop((int(x0*z), int(y0*z), int(x1*z), int(y1*z)))
# resize if cropped image is too big
if self.input_max_pixels and prod(im0.size) > self.input_max_pixels:
im0.resize(self.input_max_pixels, method="antialias")
img = im0
else :
img.crop((x0, y0, x1, y1))
# save result
img.save(self.work_dir + 'input_0.sel.png')
return
@cherrypy.expose
@init_app
def params(self, newrun=False, msg=None, x0=None, y0=None,
x1=None, y1=None, scale="3.0"):
"""
configure the algo execution
"""
if newrun:
self.clone_input()
if x0:
self.select_subimage(int(x0), int(y0), int(x1), int(y1))
return self.tmpl_out("params.html", msg=msg, x0=x0, y0=y0,
x1=x1, y1=y1, scale=scale)
@cherrypy.expose
@init_app
def rectangle(self, action=None, scale=None,
x=None, y=None, x0=None, y0=None):
"""
select a rectangle in the image
"""
if action == 'run':
if x == None:
#save parameter
try:
self.cfg['param'] = {'scale' : scale}
except ValueError:
return self.error(errcode='badparams',
errmsg="Incorrect scale parameter.")
else:
#save parameters
try:
self.cfg['param'] = {'scale' : scale,
'x0' : int(x0),
'y0' : int(y0),
'x1' : int(x),
'y1' : int(y)}
except ValueError:
return self.error(errcode='badparams',
errmsg="Incorrect parameters.")
# use the whole image if no subimage is available
try:
img = image(self.work_dir + 'input_0.sel.png')
except IOError:
img = image(self.work_dir + 'input_0.png')
img.save(self.work_dir + 'input_0.sel.png')
# go to the wait page, with the key
http.redir_303(self.base_url + "wait?key=%s" % self.key)
return
else:
# use a part of the image
if x0 == None:
# first corner selection
x = int(x)
y = int(y)
# draw a cross at the first corner
img = image(self.work_dir + 'input_0.png')
img.draw_cross((x, y), size=4, color="white")
img.draw_cross((x, y), size=2, color="red")
img.save(self.work_dir + 'input.png')
return self.tmpl_out("params.html", scale=scale, x0=x, y0=y)
else:
# second corner selection
x0 = int(x0)
y0 = int(y0)
x1 = int(x)
y1 = int(y)
# reorder the corners
(x0, x1) = (min(x0, x1), max(x0, x1))
(y0, y1) = (min(y0, y1), max(y0, y1))
assert (x1 - x0) > 0
assert (y1 - y0) > 0
#save parameters
try:
self.cfg['param'] = {'scale' : scale,
'x0' : x0,
'y0' : y0,
'x1' : x1,
'y1' : y1}
except ValueError:
return self.error(errcode='badparams',
errmsg="Incorrect parameters.")
#select subimage
self.select_subimage(x0, y0, x1, y1)
# go to the wait page, with the key
http.redir_303(self.base_url + "wait?key=%s" % self.key)
return
@cherrypy.expose
@init_app
def wait(self):
"""
run redirection
"""
http.refresh(self.base_url + 'run?key=%s' % self.key)
return self.tmpl_out("wait.html")
@cherrypy.expose
@init_app
def run(self):
"""
algorithm execution
"""
# read the parameters
scale = self.cfg['param']['scale']
# run the algorithm
stdout = open(self.work_dir + 'stdout.txt', 'w')
try:
run_time = time.time()
self.run_algo(scale, stdout=stdout)
self.cfg['info']['run_time'] = time.time() - run_time
except TimeoutError:
return self.error(errcode='timeout')
except RuntimeError:
return self.error(errcode='runtime')
stdout.close()
http.redir_303(self.base_url + 'result?key=%s' % self.key)
# archive
if self.cfg['meta']['original']:
ar = self.make_archive()
ar.add_file("input_0.orig.png", info="uploaded image")
# save processed image (if different from uploaded)
im0 = image(self.work_dir + 'input_0.orig.png')
dx0 = im0.size[0]
img = image(self.work_dir + 'input_0.png')
dx = img.size[0]
imgsel = image(self.work_dir + 'input_0.sel.png')
dxsel = imgsel.size[0]
if (dx != dx0) or (dxsel != dx):
ar.add_file("input_0.sel.png", info="original input image")
ar.add_file("cartoon.png", info="cartoon image")
ar.add_file("texture.png", info="texture image")
ar.add_info({"scale": scale})
ar.save()
return self.tmpl_out("run.html")
def run_algo(self, scale, stdout=None, timeout=False):
"""
the core algo runner
could also be called by a batch processor
this one needs no parameter
"""
#cartoon-texture images
p = self.run_proc(['cartoonIpol', 'input_0.sel.png', str(scale),
'cartoon.png', 'texture.png'],
stdout=None, stderr=None)
self.wait_proc(p, timeout)
@cherrypy.expose
@init_app
def result(self):
"""
display the algo results
"""
# read the parameters
scale = self.cfg['param']['scale']
try:
x0 = self.cfg['param']['x0']
except KeyError:
x0 = None
try:
y0 = self.cfg['param']['y0']
except KeyError:
y0 = None
try:
x1 = self.cfg['param']['x1']
except KeyError:
x1 = None
try:
y1 = self.cfg['param']['y1']
except KeyError:
y1 = None
(sizeX, sizeY)=image(self.work_dir + 'input_0.sel.png').size
# Resize for visualization (new size of the smallest dimension = 200)
zoom_factor = None
if (sizeX < 200) or (sizeY < 200):
if sizeX > sizeY:
zoom_factor = int(ceil(200.0/sizeY))
else:
zoom_factor = int(ceil(200.0/sizeX))
sizeX = sizeX*zoom_factor
sizeY = sizeY*zoom_factor
im = image(self.work_dir + 'input_0.sel.png')
im.resize((sizeX, sizeY), method="pixeldup")
im.save(self.work_dir + 'input_0_zoom.sel.png')
im = image(self.work_dir + 'cartoon.png')
im.resize((sizeX, sizeY), method="pixeldup")
im.save(self.work_dir + 'cartoon_zoom.png')
im = image(self.work_dir + 'texture.png')
im.resize((sizeX, sizeY), method="pixeldup")
im.save(self.work_dir + 'texture_zoom.png')
return self.tmpl_out("result.html", scale=scale,
x0=x0, y0=y0, x1=x1, y1=y1,
sizeY=sizeY, zoom_factor=zoom_factor)<|fim▁end|>
|
"""
program build/update
"""
|
<|file_name|>getTemp.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8 -*-
import re, os, time
# function: read and parse sensor data file
def read_sensor(path):
value = "U"
try:
f = open(path, "r")
line = f.readline()
if re.match(r"([0-9a-f]{2} ){9}: crc=[0-9a-f]{2} YES", line):
line = f.readline()
m = re.match(r"([0-9a-f]{2} ){9}t=([+-]?[0-9]+)", line)
if m:
value = str(round(float(m.group(2)) / 1000.0,1))
f.close()
except (IOError), e:
print time.strftime("%x %X"), "Error reading", path, ": ", e
return value
# define pathes to 1-wire sensor data
pathes = (
"/sys/bus/w1/devices/28-0314640daeff/w1_slave"
)
# read sensor data
#for path in pathes:
# path = "/sys/bus/w1/devices/28-0314640daeff/w1_slave"
# print read_sensor(path)
# time.sleep(30)
<|fim▁hole|>while (flag):
temp2 = temp
temp = read_sensor("/sys/bus/w1/devices/28-0314640daeff/w1_slave")
if temp2 != temp:
print temp
time.sleep(11)<|fim▁end|>
|
flag = 1
temp = 0
temp2 = 0
|
<|file_name|>stdlib_demo.rs<|end_file_name|><|fim▁begin|>// Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
use diem_framework::{encode_peer_to_peer_with_metadata_script, ScriptCall};
use diem_types::{AccountAddress, Identifier, StructTag, TypeTag};
use serde_bytes::ByteBuf as Bytes;
fn main() {
let token = TypeTag::Struct(StructTag {
address: AccountAddress([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1]),
module: Identifier("XDX".into()),
name: Identifier("XDX".into()),
type_params: Vec::new(),
});
let payee = AccountAddress([
0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22, 0x22,
0x22,
]);
let amount = 1234567;
let script = encode_peer_to_peer_with_metadata_script(
token,
payee.clone(),
amount,
Bytes::from(Vec::new()),
Bytes::from(Vec::new()),
);
let call = ScriptCall::decode(&script);
match call {
Some(ScriptCall::PeerToPeerWithMetadata {
amount: a,
payee: p,
..
}) => {
assert_eq!(a, amount);<|fim▁hole|> assert_eq!(p, payee);
}
_ => panic!("unexpected type of script"),
}
let output = bcs::to_bytes(&script).unwrap();
for o in output {
print!("{} ", o);
}
println!();
}<|fim▁end|>
| |
<|file_name|>integration_account_map_paged.py<|end_file_name|><|fim▁begin|># coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.paging import Paged
class IntegrationAccountMapPaged(Paged):
"""
A paging container for iterating over a list of IntegrationAccountMap object
"""
_attribute_map = {
'next_link': {'key': 'nextLink', 'type': 'str'},
'current_page': {'key': 'value', 'type': '[IntegrationAccountMap]'}<|fim▁hole|> }
def __init__(self, *args, **kwargs):
super(IntegrationAccountMapPaged, self).__init__(*args, **kwargs)<|fim▁end|>
| |
<|file_name|>settings.py<|end_file_name|><|fim▁begin|># Django settings for mcjsms project.
import os
DEBUG = False
TEMPLATE_DEBUG = DEBUG
SITE_ROOT = os.path.realpath(os.path.dirname(__file__))
ADMINS = []
MANAGERS = ADMINS
if DEBUG:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': os.path.join(SITE_ROOT, 'data/dev.sqlite'), # Or path to database file if using sqlite3.
'USER': '', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
else:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'mcj_sms', # Or path to database file if using sqlite3.
'USER': 'mcj_sms', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '127.0.0.1', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '3306', # Set to empty string for default. Not used with sqlite3.
'OPTIONS': {
'init_command': 'SET storage_engine=INNODB',
}
}
}
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'America/Montreal'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html<|fim▁hole|>LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale
USE_L10N = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
if DEBUG:
MEDIA_ROOT = os.path.join(SITE_ROOT, 'media/')
else:
MEDIA_ROOT = '/home/ramisayar/public/mcj/mcj2011/media/'
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = '/site_media/'
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
if DEBUG:
STATIC_ROOT = os.path.join(SITE_ROOT, 'static')
else:
STATIC_ROOT = '/home/ramisayar/public/mcj/mcj2011/static/'
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
# URL prefix for admin static files -- CSS, JavaScript and images.
# Make sure to use a trailing slash.
# Examples: "http://foo.com/static/admin/", "/static/admin/".
ADMIN_MEDIA_PREFIX = '/static/admin/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
os.path.join(SITE_ROOT, 'global_static'),
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = '3+pefpl6rsg&#smr*4$f(18nasrr0u)wp_4q=lkn50n-qz0rjt'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
ROOT_URLCONF = 'sms.urls'
TEMPLATE_DIRS = (os.path.join(SITE_ROOT, 'templates'),)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
#'django.contrib.admin',
'django.contrib.localflavor',
'django_twilio',
'sms.twilio_sms'
)
TWILIO_ACCOUNT_SID = ''
TWILIO_AUTH_TOKEN = ''
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'mail_admins': {
'level': 'ERROR',
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}<|fim▁end|>
| |
<|file_name|>client_test.go<|end_file_name|><|fim▁begin|>// Copyright 2015 The etcd Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package client
import (
"context"
"errors"
"fmt"
"io"
"io/ioutil"
"math/rand"
"net/http"
"net/url"
"reflect"
"sort"
"strings"
"testing"
"time"
"github.com/coreos/etcd/pkg/testutil"
"github.com/coreos/etcd/version"
)
type actionAssertingHTTPClient struct {
t *testing.T
num int
act httpAction<|fim▁hole|> resp http.Response
body []byte
err error
}
func (a *actionAssertingHTTPClient) Do(_ context.Context, act httpAction) (*http.Response, []byte, error) {
if !reflect.DeepEqual(a.act, act) {
a.t.Errorf("#%d: unexpected httpAction: want=%#v got=%#v", a.num, a.act, act)
}
return &a.resp, a.body, a.err
}
type staticHTTPClient struct {
resp http.Response
body []byte
err error
}
func (s *staticHTTPClient) Do(context.Context, httpAction) (*http.Response, []byte, error) {
return &s.resp, s.body, s.err
}
type staticHTTPAction struct {
request http.Request
}
func (s *staticHTTPAction) HTTPRequest(url.URL) *http.Request {
return &s.request
}
type staticHTTPResponse struct {
resp http.Response
body []byte
err error
}
type multiStaticHTTPClient struct {
responses []staticHTTPResponse
cur int
}
func (s *multiStaticHTTPClient) Do(context.Context, httpAction) (*http.Response, []byte, error) {
r := s.responses[s.cur]
s.cur++
return &r.resp, r.body, r.err
}
func newStaticHTTPClientFactory(responses []staticHTTPResponse) httpClientFactory {
var cur int
return func(url.URL) httpClient {
r := responses[cur]
cur++
return &staticHTTPClient{resp: r.resp, body: r.body, err: r.err}
}
}
type fakeTransport struct {
respchan chan *http.Response
errchan chan error
startCancel chan struct{}
finishCancel chan struct{}
}
func newFakeTransport() *fakeTransport {
return &fakeTransport{
respchan: make(chan *http.Response, 1),
errchan: make(chan error, 1),
startCancel: make(chan struct{}, 1),
finishCancel: make(chan struct{}, 1),
}
}
func (t *fakeTransport) CancelRequest(*http.Request) {
t.startCancel <- struct{}{}
}
type fakeAction struct{}
func (a *fakeAction) HTTPRequest(url.URL) *http.Request {
return &http.Request{}
}
func TestSimpleHTTPClientDoSuccess(t *testing.T) {
tr := newFakeTransport()
c := &simpleHTTPClient{transport: tr}
tr.respchan <- &http.Response{
StatusCode: http.StatusTeapot,
Body: ioutil.NopCloser(strings.NewReader("foo")),
}
resp, body, err := c.Do(context.Background(), &fakeAction{})
if err != nil {
t.Fatalf("incorrect error value: want=nil got=%v", err)
}
wantCode := http.StatusTeapot
if wantCode != resp.StatusCode {
t.Fatalf("invalid response code: want=%d got=%d", wantCode, resp.StatusCode)
}
wantBody := []byte("foo")
if !reflect.DeepEqual(wantBody, body) {
t.Fatalf("invalid response body: want=%q got=%q", wantBody, body)
}
}
func TestSimpleHTTPClientDoError(t *testing.T) {
tr := newFakeTransport()
c := &simpleHTTPClient{transport: tr}
tr.errchan <- errors.New("fixture")
_, _, err := c.Do(context.Background(), &fakeAction{})
if err == nil {
t.Fatalf("expected non-nil error, got nil")
}
}
func TestSimpleHTTPClientDoCancelContext(t *testing.T) {
tr := newFakeTransport()
c := &simpleHTTPClient{transport: tr}
tr.startCancel <- struct{}{}
tr.finishCancel <- struct{}{}
_, _, err := c.Do(context.Background(), &fakeAction{})
if err == nil {
t.Fatalf("expected non-nil error, got nil")
}
}
type checkableReadCloser struct {
io.ReadCloser
closed bool
}
func (c *checkableReadCloser) Close() error {
if !c.closed {
c.closed = true
return c.ReadCloser.Close()
}
return nil
}
func TestSimpleHTTPClientDoCancelContextResponseBodyClosed(t *testing.T) {
tr := newFakeTransport()
c := &simpleHTTPClient{transport: tr}
// create an already-cancelled context
ctx, cancel := context.WithCancel(context.Background())
cancel()
body := &checkableReadCloser{ReadCloser: ioutil.NopCloser(strings.NewReader("foo"))}
go func() {
// wait that simpleHTTPClient knows the context is already timed out,
// and calls CancelRequest
testutil.WaitSchedule()
// response is returned before cancel effects
tr.respchan <- &http.Response{Body: body}
}()
_, _, err := c.Do(ctx, &fakeAction{})
if err == nil {
t.Fatalf("expected non-nil error, got nil")
}
if !body.closed {
t.Fatalf("expected closed body")
}
}
type blockingBody struct {
c chan struct{}
}
func (bb *blockingBody) Read(p []byte) (n int, err error) {
<-bb.c
return 0, errors.New("closed")
}
func (bb *blockingBody) Close() error {
close(bb.c)
return nil
}
func TestSimpleHTTPClientDoCancelContextResponseBodyClosedWithBlockingBody(t *testing.T) {
tr := newFakeTransport()
c := &simpleHTTPClient{transport: tr}
ctx, cancel := context.WithCancel(context.Background())
body := &checkableReadCloser{ReadCloser: &blockingBody{c: make(chan struct{})}}
go func() {
tr.respchan <- &http.Response{Body: body}
time.Sleep(2 * time.Millisecond)
// cancel after the body is received
cancel()
}()
_, _, err := c.Do(ctx, &fakeAction{})
if err != context.Canceled {
t.Fatalf("expected %+v, got %+v", context.Canceled, err)
}
if !body.closed {
t.Fatalf("expected closed body")
}
}
func TestSimpleHTTPClientDoCancelContextWaitForRoundTrip(t *testing.T) {
tr := newFakeTransport()
c := &simpleHTTPClient{transport: tr}
donechan := make(chan struct{})
ctx, cancel := context.WithCancel(context.Background())
go func() {
c.Do(ctx, &fakeAction{})
close(donechan)
}()
// This should call CancelRequest and begin the cancellation process
cancel()
select {
case <-donechan:
t.Fatalf("simpleHTTPClient.Do should not have exited yet")
default:
}
tr.finishCancel <- struct{}{}
select {
case <-donechan:
//expected behavior
return
case <-time.After(time.Second):
t.Fatalf("simpleHTTPClient.Do did not exit within 1s")
}
}
func TestSimpleHTTPClientDoHeaderTimeout(t *testing.T) {
tr := newFakeTransport()
tr.finishCancel <- struct{}{}
c := &simpleHTTPClient{transport: tr, headerTimeout: time.Millisecond}
errc := make(chan error)
go func() {
_, _, err := c.Do(context.Background(), &fakeAction{})
errc <- err
}()
select {
case err := <-errc:
if err == nil {
t.Fatalf("expected non-nil error, got nil")
}
case <-time.After(time.Second):
t.Fatalf("unexpected timeout when waiting for the test to finish")
}
}
func TestHTTPClusterClientDo(t *testing.T) {
fakeErr := errors.New("fake!")
fakeURL := url.URL{}
tests := []struct {
client *httpClusterClient
ctx context.Context
wantCode int
wantErr error
wantPinned int
}{
// first good response short-circuits Do
{
client: &httpClusterClient{
endpoints: []url.URL{fakeURL, fakeURL},
clientFactory: newStaticHTTPClientFactory(
[]staticHTTPResponse{
{resp: http.Response{StatusCode: http.StatusTeapot}},
{err: fakeErr},
},
),
rand: rand.New(rand.NewSource(0)),
},
wantCode: http.StatusTeapot,
},
// fall through to good endpoint if err is arbitrary
{
client: &httpClusterClient{
endpoints: []url.URL{fakeURL, fakeURL},
clientFactory: newStaticHTTPClientFactory(
[]staticHTTPResponse{
{err: fakeErr},
{resp: http.Response{StatusCode: http.StatusTeapot}},
},
),
rand: rand.New(rand.NewSource(0)),
},
wantCode: http.StatusTeapot,
wantPinned: 1,
},
// context.Canceled short-circuits Do
{
client: &httpClusterClient{
endpoints: []url.URL{fakeURL, fakeURL},
clientFactory: newStaticHTTPClientFactory(
[]staticHTTPResponse{
{err: context.Canceled},
{resp: http.Response{StatusCode: http.StatusTeapot}},
},
),
rand: rand.New(rand.NewSource(0)),
},
wantErr: context.Canceled,
},
// return err if there are no endpoints
{
client: &httpClusterClient{
endpoints: []url.URL{},
clientFactory: newHTTPClientFactory(nil, nil, 0),
rand: rand.New(rand.NewSource(0)),
},
wantErr: ErrNoEndpoints,
},
// return err if all endpoints return arbitrary errors
{
client: &httpClusterClient{
endpoints: []url.URL{fakeURL, fakeURL},
clientFactory: newStaticHTTPClientFactory(
[]staticHTTPResponse{
{err: fakeErr},
{err: fakeErr},
},
),
rand: rand.New(rand.NewSource(0)),
},
wantErr: &ClusterError{Errors: []error{fakeErr, fakeErr}},
},
// 500-level errors cause Do to fallthrough to next endpoint
{
client: &httpClusterClient{
endpoints: []url.URL{fakeURL, fakeURL},
clientFactory: newStaticHTTPClientFactory(
[]staticHTTPResponse{
{resp: http.Response{StatusCode: http.StatusBadGateway}},
{resp: http.Response{StatusCode: http.StatusTeapot}},
},
),
rand: rand.New(rand.NewSource(0)),
},
wantCode: http.StatusTeapot,
wantPinned: 1,
},
// 500-level errors cause one shot Do to fallthrough to next endpoint
{
client: &httpClusterClient{
endpoints: []url.URL{fakeURL, fakeURL},
clientFactory: newStaticHTTPClientFactory(
[]staticHTTPResponse{
{resp: http.Response{StatusCode: http.StatusBadGateway}},
{resp: http.Response{StatusCode: http.StatusTeapot}},
},
),
rand: rand.New(rand.NewSource(0)),
},
ctx: context.WithValue(context.Background(), &oneShotCtxValue, &oneShotCtxValue),
wantErr: fmt.Errorf("client: etcd member returns server error [Bad Gateway]"),
wantPinned: 1,
},
}
for i, tt := range tests {
if tt.ctx == nil {
tt.ctx = context.Background()
}
resp, _, err := tt.client.Do(tt.ctx, nil)
if !reflect.DeepEqual(tt.wantErr, err) {
t.Errorf("#%d: got err=%v, want=%v", i, err, tt.wantErr)
continue
}
if resp == nil {
if tt.wantCode != 0 {
t.Errorf("#%d: resp is nil, want=%d", i, tt.wantCode)
continue
}
} else if resp.StatusCode != tt.wantCode {
t.Errorf("#%d: resp code=%d, want=%d", i, resp.StatusCode, tt.wantCode)
continue
}
if tt.client.pinned != tt.wantPinned {
t.Errorf("#%d: pinned=%d, want=%d", i, tt.client.pinned, tt.wantPinned)
}
}
}
func TestHTTPClusterClientDoDeadlineExceedContext(t *testing.T) {
fakeURL := url.URL{}
tr := newFakeTransport()
tr.finishCancel <- struct{}{}
c := &httpClusterClient{
clientFactory: newHTTPClientFactory(tr, DefaultCheckRedirect, 0),
endpoints: []url.URL{fakeURL},
}
errc := make(chan error)
go func() {
ctx, cancel := context.WithTimeout(context.Background(), time.Millisecond)
defer cancel()
_, _, err := c.Do(ctx, &fakeAction{})
errc <- err
}()
select {
case err := <-errc:
if err != context.DeadlineExceeded {
t.Errorf("err = %+v, want %+v", err, context.DeadlineExceeded)
}
case <-time.After(time.Second):
t.Fatalf("unexpected timeout when waiting for request to deadline exceed")
}
}
type fakeCancelContext struct{}
var fakeCancelContextError = errors.New("fake context canceled")
func (f fakeCancelContext) Deadline() (time.Time, bool) { return time.Time{}, false }
func (f fakeCancelContext) Done() <-chan struct{} {
d := make(chan struct{}, 1)
d <- struct{}{}
return d
}
func (f fakeCancelContext) Err() error { return fakeCancelContextError }
func (f fakeCancelContext) Value(key interface{}) interface{} { return 1 }
func withTimeout(parent context.Context, timeout time.Duration) (
ctx context.Context,
cancel context.CancelFunc) {
ctx = parent
cancel = func() {
ctx = nil
}
return ctx, cancel
}
func TestHTTPClusterClientDoCanceledContext(t *testing.T) {
fakeURL := url.URL{}
tr := newFakeTransport()
tr.finishCancel <- struct{}{}
c := &httpClusterClient{
clientFactory: newHTTPClientFactory(tr, DefaultCheckRedirect, 0),
endpoints: []url.URL{fakeURL},
}
errc := make(chan error)
go func() {
ctx, cancel := withTimeout(fakeCancelContext{}, time.Millisecond)
cancel()
_, _, err := c.Do(ctx, &fakeAction{})
errc <- err
}()
select {
case err := <-errc:
if err != fakeCancelContextError {
t.Errorf("err = %+v, want %+v", err, fakeCancelContextError)
}
case <-time.After(time.Second):
t.Fatalf("unexpected timeout when waiting for request to fake context canceled")
}
}
func TestRedirectedHTTPAction(t *testing.T) {
act := &redirectedHTTPAction{
action: &staticHTTPAction{
request: http.Request{
Method: "DELETE",
URL: &url.URL{
Scheme: "https",
Host: "foo.example.com",
Path: "/ping",
},
},
},
location: url.URL{
Scheme: "https",
Host: "bar.example.com",
Path: "/pong",
},
}
want := &http.Request{
Method: "DELETE",
URL: &url.URL{
Scheme: "https",
Host: "bar.example.com",
Path: "/pong",
},
}
got := act.HTTPRequest(url.URL{Scheme: "http", Host: "baz.example.com", Path: "/pang"})
if !reflect.DeepEqual(want, got) {
t.Fatalf("HTTPRequest is %#v, want %#v", want, got)
}
}
func TestRedirectFollowingHTTPClient(t *testing.T) {
tests := []struct {
checkRedirect CheckRedirectFunc
client httpClient
wantCode int
wantErr error
}{
// errors bubbled up
{
checkRedirect: func(int) error { return ErrTooManyRedirects },
client: &multiStaticHTTPClient{
responses: []staticHTTPResponse{
{
err: errors.New("fail!"),
},
},
},
wantErr: errors.New("fail!"),
},
// no need to follow redirect if none given
{
checkRedirect: func(int) error { return ErrTooManyRedirects },
client: &multiStaticHTTPClient{
responses: []staticHTTPResponse{
{
resp: http.Response{
StatusCode: http.StatusTeapot,
},
},
},
},
wantCode: http.StatusTeapot,
},
// redirects if less than max
{
checkRedirect: func(via int) error {
if via >= 2 {
return ErrTooManyRedirects
}
return nil
},
client: &multiStaticHTTPClient{
responses: []staticHTTPResponse{
{
resp: http.Response{
StatusCode: http.StatusTemporaryRedirect,
Header: http.Header{"Location": []string{"http://example.com"}},
},
},
{
resp: http.Response{
StatusCode: http.StatusTeapot,
},
},
},
},
wantCode: http.StatusTeapot,
},
// succeed after reaching max redirects
{
checkRedirect: func(via int) error {
if via >= 3 {
return ErrTooManyRedirects
}
return nil
},
client: &multiStaticHTTPClient{
responses: []staticHTTPResponse{
{
resp: http.Response{
StatusCode: http.StatusTemporaryRedirect,
Header: http.Header{"Location": []string{"http://example.com"}},
},
},
{
resp: http.Response{
StatusCode: http.StatusTemporaryRedirect,
Header: http.Header{"Location": []string{"http://example.com"}},
},
},
{
resp: http.Response{
StatusCode: http.StatusTeapot,
},
},
},
},
wantCode: http.StatusTeapot,
},
// fail if too many redirects
{
checkRedirect: func(via int) error {
if via >= 2 {
return ErrTooManyRedirects
}
return nil
},
client: &multiStaticHTTPClient{
responses: []staticHTTPResponse{
{
resp: http.Response{
StatusCode: http.StatusTemporaryRedirect,
Header: http.Header{"Location": []string{"http://example.com"}},
},
},
{
resp: http.Response{
StatusCode: http.StatusTemporaryRedirect,
Header: http.Header{"Location": []string{"http://example.com"}},
},
},
{
resp: http.Response{
StatusCode: http.StatusTeapot,
},
},
},
},
wantErr: ErrTooManyRedirects,
},
// fail if Location header not set
{
checkRedirect: func(int) error { return ErrTooManyRedirects },
client: &multiStaticHTTPClient{
responses: []staticHTTPResponse{
{
resp: http.Response{
StatusCode: http.StatusTemporaryRedirect,
},
},
},
},
wantErr: errors.New("Location header not set"),
},
// fail if Location header is invalid
{
checkRedirect: func(int) error { return ErrTooManyRedirects },
client: &multiStaticHTTPClient{
responses: []staticHTTPResponse{
{
resp: http.Response{
StatusCode: http.StatusTemporaryRedirect,
Header: http.Header{"Location": []string{":"}},
},
},
},
},
wantErr: errors.New("Location header not valid URL: :"),
},
// fail if redirects checked way too many times
{
checkRedirect: func(int) error { return nil },
client: &staticHTTPClient{
resp: http.Response{
StatusCode: http.StatusTemporaryRedirect,
Header: http.Header{"Location": []string{"http://example.com"}},
},
},
wantErr: errTooManyRedirectChecks,
},
}
for i, tt := range tests {
client := &redirectFollowingHTTPClient{client: tt.client, checkRedirect: tt.checkRedirect}
resp, _, err := client.Do(context.Background(), nil)
if !reflect.DeepEqual(tt.wantErr, err) {
t.Errorf("#%d: got err=%v, want=%v", i, err, tt.wantErr)
continue
}
if resp == nil {
if tt.wantCode != 0 {
t.Errorf("#%d: resp is nil, want=%d", i, tt.wantCode)
}
continue
}
if resp.StatusCode != tt.wantCode {
t.Errorf("#%d: resp code=%d, want=%d", i, resp.StatusCode, tt.wantCode)
continue
}
}
}
func TestDefaultCheckRedirect(t *testing.T) {
tests := []struct {
num int
err error
}{
{0, nil},
{5, nil},
{10, nil},
{11, ErrTooManyRedirects},
{29, ErrTooManyRedirects},
}
for i, tt := range tests {
err := DefaultCheckRedirect(tt.num)
if !reflect.DeepEqual(tt.err, err) {
t.Errorf("#%d: want=%#v got=%#v", i, tt.err, err)
}
}
}
func TestHTTPClusterClientSync(t *testing.T) {
cf := newStaticHTTPClientFactory([]staticHTTPResponse{
{
resp: http.Response{StatusCode: http.StatusOK, Header: http.Header{"Content-Type": []string{"application/json"}}},
body: []byte(`{"members":[{"id":"2745e2525fce8fe","peerURLs":["http://127.0.0.1:7003"],"name":"node3","clientURLs":["http://127.0.0.1:4003"]},{"id":"42134f434382925","peerURLs":["http://127.0.0.1:2380","http://127.0.0.1:7001"],"name":"node1","clientURLs":["http://127.0.0.1:2379","http://127.0.0.1:4001"]},{"id":"94088180e21eb87b","peerURLs":["http://127.0.0.1:7002"],"name":"node2","clientURLs":["http://127.0.0.1:4002"]}]}`),
},
})
hc := &httpClusterClient{
clientFactory: cf,
rand: rand.New(rand.NewSource(0)),
}
err := hc.SetEndpoints([]string{"http://127.0.0.1:2379"})
if err != nil {
t.Fatalf("unexpected error during setup: %#v", err)
}
want := []string{"http://127.0.0.1:2379"}
got := hc.Endpoints()
if !reflect.DeepEqual(want, got) {
t.Fatalf("incorrect endpoints: want=%#v got=%#v", want, got)
}
err = hc.Sync(context.Background())
if err != nil {
t.Fatalf("unexpected error during Sync: %#v", err)
}
want = []string{"http://127.0.0.1:2379", "http://127.0.0.1:4001", "http://127.0.0.1:4002", "http://127.0.0.1:4003"}
got = hc.Endpoints()
sort.Sort(sort.StringSlice(got))
if !reflect.DeepEqual(want, got) {
t.Fatalf("incorrect endpoints post-Sync: want=%#v got=%#v", want, got)
}
err = hc.SetEndpoints([]string{"http://127.0.0.1:4009"})
if err != nil {
t.Fatalf("unexpected error during reset: %#v", err)
}
want = []string{"http://127.0.0.1:4009"}
got = hc.Endpoints()
if !reflect.DeepEqual(want, got) {
t.Fatalf("incorrect endpoints post-reset: want=%#v got=%#v", want, got)
}
}
func TestHTTPClusterClientSyncFail(t *testing.T) {
cf := newStaticHTTPClientFactory([]staticHTTPResponse{
{err: errors.New("fail!")},
})
hc := &httpClusterClient{
clientFactory: cf,
rand: rand.New(rand.NewSource(0)),
}
err := hc.SetEndpoints([]string{"http://127.0.0.1:2379"})
if err != nil {
t.Fatalf("unexpected error during setup: %#v", err)
}
want := []string{"http://127.0.0.1:2379"}
got := hc.Endpoints()
if !reflect.DeepEqual(want, got) {
t.Fatalf("incorrect endpoints: want=%#v got=%#v", want, got)
}
err = hc.Sync(context.Background())
if err == nil {
t.Fatalf("got nil error during Sync")
}
got = hc.Endpoints()
if !reflect.DeepEqual(want, got) {
t.Fatalf("incorrect endpoints after failed Sync: want=%#v got=%#v", want, got)
}
}
func TestHTTPClusterClientAutoSyncCancelContext(t *testing.T) {
cf := newStaticHTTPClientFactory([]staticHTTPResponse{
{
resp: http.Response{StatusCode: http.StatusOK, Header: http.Header{"Content-Type": []string{"application/json"}}},
body: []byte(`{"members":[{"id":"2745e2525fce8fe","peerURLs":["http://127.0.0.1:7003"],"name":"node3","clientURLs":["http://127.0.0.1:4003"]},{"id":"42134f434382925","peerURLs":["http://127.0.0.1:2380","http://127.0.0.1:7001"],"name":"node1","clientURLs":["http://127.0.0.1:2379","http://127.0.0.1:4001"]},{"id":"94088180e21eb87b","peerURLs":["http://127.0.0.1:7002"],"name":"node2","clientURLs":["http://127.0.0.1:4002"]}]}`),
},
})
hc := &httpClusterClient{
clientFactory: cf,
rand: rand.New(rand.NewSource(0)),
}
err := hc.SetEndpoints([]string{"http://127.0.0.1:2379"})
if err != nil {
t.Fatalf("unexpected error during setup: %#v", err)
}
ctx, cancel := context.WithCancel(context.Background())
cancel()
err = hc.AutoSync(ctx, time.Hour)
if err != context.Canceled {
t.Fatalf("incorrect error value: want=%v got=%v", context.Canceled, err)
}
}
func TestHTTPClusterClientAutoSyncFail(t *testing.T) {
cf := newStaticHTTPClientFactory([]staticHTTPResponse{
{err: errors.New("fail!")},
})
hc := &httpClusterClient{
clientFactory: cf,
rand: rand.New(rand.NewSource(0)),
}
err := hc.SetEndpoints([]string{"http://127.0.0.1:2379"})
if err != nil {
t.Fatalf("unexpected error during setup: %#v", err)
}
err = hc.AutoSync(context.Background(), time.Hour)
if !strings.HasPrefix(err.Error(), ErrClusterUnavailable.Error()) {
t.Fatalf("incorrect error value: want=%v got=%v", ErrClusterUnavailable, err)
}
}
func TestHTTPClusterClientGetVersion(t *testing.T) {
body := []byte(`{"etcdserver":"2.3.2","etcdcluster":"2.3.0"}`)
cf := newStaticHTTPClientFactory([]staticHTTPResponse{
{
resp: http.Response{StatusCode: http.StatusOK, Header: http.Header{"Content-Length": []string{"44"}}},
body: body,
},
})
hc := &httpClusterClient{
clientFactory: cf,
rand: rand.New(rand.NewSource(0)),
}
err := hc.SetEndpoints([]string{"http://127.0.0.1:4003", "http://127.0.0.1:2379", "http://127.0.0.1:4001", "http://127.0.0.1:4002"})
if err != nil {
t.Fatalf("unexpected error during setup: %#v", err)
}
actual, err := hc.GetVersion(context.Background())
if err != nil {
t.Errorf("non-nil error: %#v", err)
}
expected := version.Versions{Server: "2.3.2", Cluster: "2.3.0"}
if !reflect.DeepEqual(&expected, actual) {
t.Errorf("incorrect Response: want=%#v got=%#v", expected, actual)
}
}
// TestHTTPClusterClientSyncPinEndpoint tests that Sync() pins the endpoint when
// it gets the exactly same member list as before.
func TestHTTPClusterClientSyncPinEndpoint(t *testing.T) {
cf := newStaticHTTPClientFactory([]staticHTTPResponse{
{
resp: http.Response{StatusCode: http.StatusOK, Header: http.Header{"Content-Type": []string{"application/json"}}},
body: []byte(`{"members":[{"id":"2745e2525fce8fe","peerURLs":["http://127.0.0.1:7003"],"name":"node3","clientURLs":["http://127.0.0.1:4003"]},{"id":"42134f434382925","peerURLs":["http://127.0.0.1:2380","http://127.0.0.1:7001"],"name":"node1","clientURLs":["http://127.0.0.1:2379","http://127.0.0.1:4001"]},{"id":"94088180e21eb87b","peerURLs":["http://127.0.0.1:7002"],"name":"node2","clientURLs":["http://127.0.0.1:4002"]}]}`),
},
{
resp: http.Response{StatusCode: http.StatusOK, Header: http.Header{"Content-Type": []string{"application/json"}}},
body: []byte(`{"members":[{"id":"2745e2525fce8fe","peerURLs":["http://127.0.0.1:7003"],"name":"node3","clientURLs":["http://127.0.0.1:4003"]},{"id":"42134f434382925","peerURLs":["http://127.0.0.1:2380","http://127.0.0.1:7001"],"name":"node1","clientURLs":["http://127.0.0.1:2379","http://127.0.0.1:4001"]},{"id":"94088180e21eb87b","peerURLs":["http://127.0.0.1:7002"],"name":"node2","clientURLs":["http://127.0.0.1:4002"]}]}`),
},
{
resp: http.Response{StatusCode: http.StatusOK, Header: http.Header{"Content-Type": []string{"application/json"}}},
body: []byte(`{"members":[{"id":"2745e2525fce8fe","peerURLs":["http://127.0.0.1:7003"],"name":"node3","clientURLs":["http://127.0.0.1:4003"]},{"id":"42134f434382925","peerURLs":["http://127.0.0.1:2380","http://127.0.0.1:7001"],"name":"node1","clientURLs":["http://127.0.0.1:2379","http://127.0.0.1:4001"]},{"id":"94088180e21eb87b","peerURLs":["http://127.0.0.1:7002"],"name":"node2","clientURLs":["http://127.0.0.1:4002"]}]}`),
},
})
hc := &httpClusterClient{
clientFactory: cf,
rand: rand.New(rand.NewSource(0)),
}
err := hc.SetEndpoints([]string{"http://127.0.0.1:4003", "http://127.0.0.1:2379", "http://127.0.0.1:4001", "http://127.0.0.1:4002"})
if err != nil {
t.Fatalf("unexpected error during setup: %#v", err)
}
pinnedEndpoint := hc.endpoints[hc.pinned]
for i := 0; i < 3; i++ {
err = hc.Sync(context.Background())
if err != nil {
t.Fatalf("#%d: unexpected error during Sync: %#v", i, err)
}
if g := hc.endpoints[hc.pinned]; g != pinnedEndpoint {
t.Errorf("#%d: pinned endpoint = %v, want %v", i, g, pinnedEndpoint)
}
}
}
// TestHTTPClusterClientSyncUnpinEndpoint tests that Sync() unpins the endpoint when
// it gets a different member list than before.
func TestHTTPClusterClientSyncUnpinEndpoint(t *testing.T) {
cf := newStaticHTTPClientFactory([]staticHTTPResponse{
{
resp: http.Response{StatusCode: http.StatusOK, Header: http.Header{"Content-Type": []string{"application/json"}}},
body: []byte(`{"members":[{"id":"2745e2525fce8fe","peerURLs":["http://127.0.0.1:7003"],"name":"node3","clientURLs":["http://127.0.0.1:4003"]},{"id":"42134f434382925","peerURLs":["http://127.0.0.1:2380","http://127.0.0.1:7001"],"name":"node1","clientURLs":["http://127.0.0.1:2379","http://127.0.0.1:4001"]},{"id":"94088180e21eb87b","peerURLs":["http://127.0.0.1:7002"],"name":"node2","clientURLs":["http://127.0.0.1:4002"]}]}`),
},
{
resp: http.Response{StatusCode: http.StatusOK, Header: http.Header{"Content-Type": []string{"application/json"}}},
body: []byte(`{"members":[{"id":"42134f434382925","peerURLs":["http://127.0.0.1:2380","http://127.0.0.1:7001"],"name":"node1","clientURLs":["http://127.0.0.1:2379","http://127.0.0.1:4001"]},{"id":"94088180e21eb87b","peerURLs":["http://127.0.0.1:7002"],"name":"node2","clientURLs":["http://127.0.0.1:4002"]}]}`),
},
{
resp: http.Response{StatusCode: http.StatusOK, Header: http.Header{"Content-Type": []string{"application/json"}}},
body: []byte(`{"members":[{"id":"2745e2525fce8fe","peerURLs":["http://127.0.0.1:7003"],"name":"node3","clientURLs":["http://127.0.0.1:4003"]},{"id":"42134f434382925","peerURLs":["http://127.0.0.1:2380","http://127.0.0.1:7001"],"name":"node1","clientURLs":["http://127.0.0.1:2379","http://127.0.0.1:4001"]},{"id":"94088180e21eb87b","peerURLs":["http://127.0.0.1:7002"],"name":"node2","clientURLs":["http://127.0.0.1:4002"]}]}`),
},
})
hc := &httpClusterClient{
clientFactory: cf,
rand: rand.New(rand.NewSource(0)),
}
err := hc.SetEndpoints([]string{"http://127.0.0.1:4003", "http://127.0.0.1:2379", "http://127.0.0.1:4001", "http://127.0.0.1:4002"})
if err != nil {
t.Fatalf("unexpected error during setup: %#v", err)
}
wants := []string{"http://127.0.0.1:2379", "http://127.0.0.1:4001", "http://127.0.0.1:4002"}
for i := 0; i < 3; i++ {
err = hc.Sync(context.Background())
if err != nil {
t.Fatalf("#%d: unexpected error during Sync: %#v", i, err)
}
if g := hc.endpoints[hc.pinned]; g.String() != wants[i] {
t.Errorf("#%d: pinned endpoint = %v, want %v", i, g, wants[i])
}
}
}
// TestHTTPClusterClientSyncPinLeaderEndpoint tests that Sync() pins the leader
// when the selection mode is EndpointSelectionPrioritizeLeader
func TestHTTPClusterClientSyncPinLeaderEndpoint(t *testing.T) {
cf := newStaticHTTPClientFactory([]staticHTTPResponse{
{
resp: http.Response{StatusCode: http.StatusOK, Header: http.Header{"Content-Type": []string{"application/json"}}},
body: []byte(`{"members":[{"id":"2745e2525fce8fe","peerURLs":["http://127.0.0.1:7003"],"name":"node3","clientURLs":["http://127.0.0.1:4003"]},{"id":"42134f434382925","peerURLs":["http://127.0.0.1:2380","http://127.0.0.1:7001"],"name":"node1","clientURLs":["http://127.0.0.1:2379","http://127.0.0.1:4001"]},{"id":"94088180e21eb87b","peerURLs":["http://127.0.0.1:7002"],"name":"node2","clientURLs":["http://127.0.0.1:4002"]}]}`),
},
{
resp: http.Response{StatusCode: http.StatusOK, Header: http.Header{"Content-Type": []string{"application/json"}}},
body: []byte(`{"id":"2745e2525fce8fe","peerURLs":["http://127.0.0.1:7003"],"name":"node3","clientURLs":["http://127.0.0.1:4003"]}`),
},
{
resp: http.Response{StatusCode: http.StatusOK, Header: http.Header{"Content-Type": []string{"application/json"}}},
body: []byte(`{"members":[{"id":"2745e2525fce8fe","peerURLs":["http://127.0.0.1:7003"],"name":"node3","clientURLs":["http://127.0.0.1:4003"]},{"id":"42134f434382925","peerURLs":["http://127.0.0.1:2380","http://127.0.0.1:7001"],"name":"node1","clientURLs":["http://127.0.0.1:2379","http://127.0.0.1:4001"]},{"id":"94088180e21eb87b","peerURLs":["http://127.0.0.1:7002"],"name":"node2","clientURLs":["http://127.0.0.1:4002"]}]}`),
},
{
resp: http.Response{StatusCode: http.StatusOK, Header: http.Header{"Content-Type": []string{"application/json"}}},
body: []byte(`{"id":"94088180e21eb87b","peerURLs":["http://127.0.0.1:7002"],"name":"node2","clientURLs":["http://127.0.0.1:4002"]}`),
},
})
hc := &httpClusterClient{
clientFactory: cf,
rand: rand.New(rand.NewSource(0)),
selectionMode: EndpointSelectionPrioritizeLeader,
endpoints: []url.URL{{}}, // Need somewhere to pretend to send to initially
}
wants := []string{"http://127.0.0.1:4003", "http://127.0.0.1:4002"}
for i, want := range wants {
err := hc.Sync(context.Background())
if err != nil {
t.Fatalf("#%d: unexpected error during Sync: %#v", i, err)
}
pinned := hc.endpoints[hc.pinned].String()
if pinned != want {
t.Errorf("#%d: pinned endpoint = %v, want %v", i, pinned, want)
}
}
}
func TestHTTPClusterClientResetFail(t *testing.T) {
tests := [][]string{
// need at least one endpoint
{},
// urls must be valid
{":"},
}
for i, tt := range tests {
hc := &httpClusterClient{rand: rand.New(rand.NewSource(0))}
err := hc.SetEndpoints(tt)
if err == nil {
t.Errorf("#%d: expected non-nil error", i)
}
}
}
func TestHTTPClusterClientResetPinRandom(t *testing.T) {
round := 2000
pinNum := 0
for i := 0; i < round; i++ {
hc := &httpClusterClient{rand: rand.New(rand.NewSource(int64(i)))}
err := hc.SetEndpoints([]string{"http://127.0.0.1:4001", "http://127.0.0.1:4002", "http://127.0.0.1:4003"})
if err != nil {
t.Fatalf("#%d: reset error (%v)", i, err)
}
if hc.endpoints[hc.pinned].String() == "http://127.0.0.1:4001" {
pinNum++
}
}
min := 1.0/3.0 - 0.05
max := 1.0/3.0 + 0.05
if ratio := float64(pinNum) / float64(round); ratio > max || ratio < min {
t.Errorf("pinned ratio = %v, want [%v, %v]", ratio, min, max)
}
}<|fim▁end|>
| |
<|file_name|>help.option.ts<|end_file_name|><|fim▁begin|>import {Option} from "./option";
<|fim▁hole|>export class HelpOption extends Option {
/**
*
*/
constructor() {
super();
this.shortName = 'h';
this.longName = 'help';
this.argument = '';
}
}<|fim▁end|>
| |
<|file_name|>compile.js<|end_file_name|><|fim▁begin|>/*
* oskari-compile
*/
module.exports = function(grunt) {
grunt.registerMultiTask('compile', 'Compile appsetup js', function() {
var starttime = (new Date()).getTime();
var options = this.data.options;
// Run some sync stuff.
grunt.log.writeln('Compiling...');
// Catch if required fields are not provided.
if ( !options.appSetupFile ) {
grunt.fail.warn('No path provided for Compile to scan.');
}
if ( !options.dest ) {
grunt.fail.warn('No destination path provided for Compile to use.');<|fim▁hole|> parser = require('../parser.js'),
processedAppSetup = parser.getComponents(options.appSetupFile);
grunt.log.writeln('Parsed appSetup:' + options.appSetupFile);
// internal minify i18n files function
this.minifyLocalization = function(langfiles, path) {
for (var id in langfiles) {
//console.log('Minifying loc:' + id + '/' + langfiles[id]);
this.minifyJS(langfiles[id], path + 'oskari_lang_' + id + '.js', options.concat);
}
}
// internal minify JS function
this.minifyJS = function(files, outputFile, concat) {
var okFiles = [],
fileMap = {},
result = null;
for (var i = 0; i < files.length; ++i) {
if (!fs.existsSync(files[i])) {
var msg = 'Couldnt locate ' + files[i];
throw msg;
}
// do not put duplicates on compiled code
if(!fileMap[files[i]]) {
fileMap[files[i]] = true;
okFiles.push(files[i]);
} else {
grunt.log.writeln('File already added:' + files[i]);
}
}
// minify or concatenate the files
if (!concat) {
result = UglifyJS.minify(okFiles, {
//outSourceMap : "out.js.map",
warnings : true,
compress : true
});
} else {
// emulate the result uglify creates, but only concatenating
result = {"code" : ""};
for (var j = 0, jlen = okFiles.length; j < jlen; j +=1) {
result.code += fs.readFileSync(okFiles[j], 'utf8');
}
}
// write result to disk
fs.writeFileSync(outputFile, result.code, 'utf8');
}
// validate parsed appsetup
var compiledDir = options.dest;
if (!fs.existsSync(compiledDir)) {
fs.mkdirSync(compiledDir);
}
var files = [];
for (var j = 0; j < processedAppSetup.length; ++j) {
var array = parser.getFilesForComponent(processedAppSetup[j], 'javascript');
files = files.concat(array);
}
this.minifyJS(files, compiledDir + 'oskari.min.js', options.concat);
var langfiles = {};
for (var j = 0; j < processedAppSetup.length; ++j) {
var deps = processedAppSetup[j].dependencies;
for (var i = 0; i < deps.length; ++i) {
for (var lang in deps[i].locales) {
if (!langfiles[lang]) {
langfiles[lang] = [];
}
langfiles[lang] = langfiles[lang].concat(deps[i].locales[lang]);
}
}
}
this.minifyLocalization(langfiles, compiledDir);
var unknownfiles = [];
for(var j = 0; j < processedAppSetup.length; ++j) {
unknownfiles = unknownfiles.concat(parser.getFilesForComponent(processedAppSetup[j], 'unknown'));
}
if(unknownfiles.length != 0) {
console.log('Appsetup referenced types of files that couldn\'t be handled: ' + unknownfiles);
}
var endtime = (new Date()).getTime();
grunt.log.writeln('Compile completed in ' + ((endtime - starttime) / 1000) + ' seconds');
});
};<|fim▁end|>
|
}
var fs = require('fs'),
UglifyJS = require('uglify-js'),
|
<|file_name|>exception.rs<|end_file_name|><|fim▁begin|>//! Exceptions
use asm;
<|fim▁hole|> /// Non-maskable interrupt
pub fn __nmi();
/// Hard fault
pub fn __hard_fault();
}
/// Default exception handler
#[no_mangle]
pub unsafe extern "C" fn __default_handler() {
asm::bkpt();
}
/// Program entry point: The reset function.
#[link_section = ".reset"]
#[no_mangle]
pub static __RESET: Option<unsafe extern "C" fn() -> !> = Some(start);
/// Cortex-M processor exceptions
#[link_section = ".exceptions"]
#[no_mangle]
pub static __EXCEPTIONS: [Option<unsafe extern "C" fn()>; 14] = [None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None];<|fim▁end|>
|
extern "C" {
/// Reset
pub fn start() -> !;
|
<|file_name|>list sort strings.java<|end_file_name|><|fim▁begin|>//<|fim▁hole|>import java.util.Comparator;
abc.sort(Comparator.naturalOrder());
//<|fim▁end|>
| |
<|file_name|>account.py<|end_file_name|><|fim▁begin|>from django.contrib.auth.models import AnonymousUser
from core.models import Identity
<|fim▁hole|>from api.v2.views.base import AdminAuthViewSet
class AccountViewSet(AdminAuthViewSet):
"""
API endpoint that allows providers to be viewed or edited.
"""
lookup_fields = ("id", "uuid")
queryset = Identity.objects.all()
serializer_class = AccountSerializer
http_method_names = ['post', 'head', 'options', 'trace']
def get_queryset(self):
"""
Filter providers by current user
"""
user = self.request.user
if (type(user) == AnonymousUser):
return Identity.objects.none()
identities = user.current_identities()
return identities<|fim▁end|>
|
from api.v2.serializers.post import AccountSerializer
|
<|file_name|>resampled_source.cc<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2007-2017 Paul Davis <[email protected]>
* Copyright (C) 2008-2011 David Robillard <[email protected]>
* Copyright (C) 2010 Carl Hetherington <[email protected]>
* Copyright (C) 2013-2016 John Emmas <[email protected]>
* Copyright (C) 2015-2016 Robin Gareus <[email protected]>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
#include "pbd/error.h"
#include "ardour/resampled_source.h"
#include "pbd/failed_constructor.h"
#include "pbd/i18n.h"
using namespace ARDOUR;
using namespace PBD;
#ifdef PLATFORM_WINDOWS
const uint32_t ResampledImportableSource::blocksize = 524288U;
#else
const uint32_t ResampledImportableSource::blocksize = 16384U;
#endif
ResampledImportableSource::ResampledImportableSource (boost::shared_ptr<ImportableSource> src, samplecnt_t rate, SrcQuality srcq)
: source (src)
, _src_state (0)
{
_src_type = SRC_SINC_BEST_QUALITY;
switch (srcq) {
case SrcBest:
_src_type = SRC_SINC_BEST_QUALITY;
break;
case SrcGood:
_src_type = SRC_SINC_MEDIUM_QUALITY;
break;
case SrcQuick:
_src_type = SRC_SINC_FASTEST;
break;
case SrcFast:
_src_type = SRC_ZERO_ORDER_HOLD;
break;
case SrcFastest:
_src_type = SRC_LINEAR;
break;
}
_input = new float[blocksize];
seek (0);
_src_data.src_ratio = ((float) rate) / source->samplerate();
}
ResampledImportableSource::~ResampledImportableSource ()
{
_src_state = src_delete (_src_state) ;
delete [] _input;
}
samplecnt_t
ResampledImportableSource::read (Sample* output, samplecnt_t nframes)
{
int err;
size_t bs = floor ((float)(blocksize / source->channels())) * source->channels();
/* If the input buffer is empty, refill it. */
if (_src_data.input_frames == 0) {
_src_data.input_frames = source->read (_input, bs);
/* The last read will not be a full buffer, so set end_of_input. */
if ((size_t) _src_data.input_frames < bs) {
_end_of_input = true;
}
_src_data.input_frames /= source->channels();
_src_data.data_in = _input;
}
_src_data.data_out = output;
_src_data.output_frames = nframes / source->channels();
if (_end_of_input && _src_data.input_frames * _src_data.src_ratio <= _src_data.output_frames) {
/* only set src_data.end_of_input for the last cycle.
*
* The flag only affects writing out remaining data in the
* internal buffer of src_state.
* SRC is not aware of data bufered here in _src_data.input
* which needs to be processed first.
*/
_src_data.end_of_input = true;
}
if ((err = src_process (_src_state, &_src_data))) {
error << string_compose(_("Import: %1"), src_strerror (err)) << endmsg ;
return 0 ;
}
/* Terminate if at end */
if (_src_data.end_of_input && _src_data.output_frames_gen == 0) {
return 0;
}
_src_data.data_in += _src_data.input_frames_used * source->channels();
_src_data.input_frames -= _src_data.input_frames_used ;
return _src_data.output_frames_gen * source->channels();
}
void
ResampledImportableSource::seek (samplepos_t pos)
{
source->seek (pos);
/* and reset things so that we start from scratch with the conversion */
if (_src_state) {
src_delete (_src_state);
}
int err;
if ((_src_state = src_new (_src_type, source->channels(), &err)) == 0) {
error << string_compose(_("Import: src_new() failed : %1"), src_strerror (err)) << endmsg ;
throw failed_constructor ();
}
<|fim▁hole|> _src_data.end_of_input = 0;
_end_of_input = false;
}
samplepos_t
ResampledImportableSource::natural_position () const
{
return source->natural_position() * ratio ();
}<|fim▁end|>
|
_src_data.input_frames = 0;
_src_data.data_in = _input;
|
<|file_name|>Gruntfile.js<|end_file_name|><|fim▁begin|>module.exports = function(grunt) {
grunt.initConfig({
// !! This is the name of the task 'requirejs' (grunt-contrib-requirejs module)
requirejs: {
dist: {
// !! You can drop your app.build.js config wholesale into 'options'
options: {
appDir: "bl_scoreboard/static",
baseUrl: "js",
dir: "bl_scoreboard/dist",
keepBuildDir: true, // cleanup of 'dist' is supposed to be done by 'purge:dist' task
mainConfigFile: "bl_scoreboard/static/js/main.js",
name: "main", // points to 'static/js/main.js'
optimize: 'uglify2',
uglify2: {
compress: {}
},
skipDirOptimize: true,
preserveLicenseComments: false,
findNestedDependencies: false,
removeCombined: true,
inlineText: true,
optimizeCss: 'standard',
logLevel: 0,
fileExclusionRegExp: /^\./
}
}
},
compress: {
dist: {
options: {
mode: 'gzip',
level: 9
},
files: [
{expand: true, src: [
'bl_scoreboard/dist/js/**/*.js',
'bl_scoreboard/dist/lib/requirejs/require.js'
], ext: '.js.gz'}
]
}
},
uglify: {
rjs_dist: {
options: { },
files: {
'bl_scoreboard/dist/lib/requirejs/require.js': ['bl_scoreboard/dist/lib/requirejs/require.js']<|fim▁hole|> }
},
// !! This is the name of the task 'clean' (grunt-contrib-clean module) after renaming to 'purge'
purge: {
dist: {
src: [
'bl_scoreboard/dist/*', '!bl_scoreboard/dist/.gitignore'
]
},
dev: {
src: [
'bl_scoreboard/static/lib/*', '!bl_scoreboard/static/lib/.gitignore',
]
}
},
// !! Bower's 'install' task
bower: {
dev: {
options: {
// targetDir: "bl_scoreboard/static/lib",
// layout: "byComponent",
verbose: true
}
}
},
bower_rjs: {
all: {
rjsConfig: "bl_scoreboard/static/js/main.js"
}
}
});
// This loads the requirejs plugin into grunt
grunt.loadNpmTasks('grunt-contrib-requirejs');
grunt.loadNpmTasks('grunt-contrib-compress');
grunt.loadNpmTasks('grunt-contrib-uglify');
// Must be renamed because 'clean' is very common name
grunt.loadNpmTasks('grunt-contrib-clean');
grunt.renameTask('clean', 'purge'); // because 'clean' is very common name
// Must be renamed because of task name conflict between 'grunt-bower-requirejs' and 'grunt-bower-task'.
// They both are use 'bower' task name.
// Also, 'bower-requirejs' npm module can be used. See '.bowerrc.inactive' at the root directory for some details.
grunt.loadNpmTasks('grunt-bower-requirejs');
grunt.renameTask('bower', 'bower_rjs');
grunt.loadNpmTasks('grunt-bower-task');
// Register tasks
grunt.registerTask('clean', ['purge:dist', 'purge:dev']);
grunt.registerTask('clean-dist', ['purge:dist']);
grunt.registerTask('clean-dev', ['purge:dev']);
grunt.registerTask('build-dev', ['clean-dev', 'bower:dev', 'bower_rjs']);
grunt.registerTask('build-dist', ['clean-dist', 'requirejs:dist', 'uglify:rjs_dist', 'compress:dist']);
};<|fim▁end|>
|
}
|
<|file_name|>joystick_controller.py<|end_file_name|><|fim▁begin|>import sdl2
from mgl2d.input.game_controller import GameController
class JoystickController(GameController):
_DEBUG_CONTROLLER = False
AXIS_DEAD_ZONE = 4000
AXIS_MIN_VALUE = -32768
AXIS_MAX_VALUE = 32767
def __init__(self):
super().__init__()
self._sdl_controller = None
self._sdl_joystick = None
self._sdl_joystick_id = None
def open(self, device_index):
self._sdl_controller = sdl2.SDL_GameControllerOpen(device_index)
self._sdl_joystick = sdl2.SDL_GameControllerGetJoystick(self._sdl_controller)
self._sdl_joystick_id = sdl2.SDL_JoystickInstanceID(self._sdl_joystick)
self._controller_name = sdl2.SDL_JoystickName(self._sdl_joystick)
self._num_axis = sdl2.SDL_JoystickNumAxes(self._sdl_joystick),
self._num_buttons = sdl2.SDL_JoystickNumButtons(self._sdl_joystick)
self._num_balls = sdl2.SDL_JoystickNumBalls(self._sdl_joystick)
for btn_index in range(0, self.MAX_BUTTONS):
self._button_down[btn_index] = 0
self._button_pressed[btn_index] = 0
self._button_released[btn_index] = 0
if self._sdl_joystick_id != -1:
self._connected = True
def close(self):<|fim▁hole|> self._connected = False
def update(self):
if not self._connected:
return
for btn_index in range(0, self._num_buttons):
self._button_pressed[btn_index] = 0
is_down = sdl2.SDL_GameControllerGetButton(self._sdl_controller, btn_index)
if is_down and not self._button_down[btn_index]:
self._button_pressed[btn_index] = True
self._button_down[btn_index] = is_down
def get_axis(self, axis_index):
axis_value = sdl2.SDL_GameControllerGetAxis(self._sdl_controller, axis_index)
# Sticks have a dead zone
if axis_index != self.AXIS_TRIGGER_LEFT and axis_index != self.AXIS_TRIGGER_RIGHT:
if abs(axis_value) < self.AXIS_DEAD_ZONE:
return 0
# Return scaled value
return axis_value / self.AXIS_MAX_VALUE if axis_value > 0 else -axis_value / self.AXIS_MIN_VALUE
def get_axis_digital_value(self, axis_name):
# Not implemented
return 0
def to_string(self):
return f'[\'{self._controller_name}\',axis:{self._num_axis},buttons:{self._num_buttons}]'<|fim▁end|>
|
sdl2.SDL_GameControllerClose(self._sdl_controller)
self._sdl_controller = None
self._sdl_joystick = None
self._sdl_joystick_id = None
|
<|file_name|>push-animator.js<|end_file_name|><|fim▁begin|>/*
Copyright 2013-2015 ASIAL CORPORATION
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import animit from '../../ons/animit';
import SplitterAnimator from './animator.js';
export default class PushSplitterAnimator extends SplitterAnimator {
_getSlidingElements() {
const slidingElements = [this._side, this._content];
if (this._oppositeSide && this._oppositeSide.mode === 'split') {
slidingElements.push(this._oppositeSide);
}
return slidingElements;
}
translate(distance) {
if (!this._slidingElements) {
this._slidingElements = this._getSlidingElements();
}
animit(this._slidingElements)
.queue({
transform: `translate3d(${this.minus + distance}px, 0px, 0px)`
})
.play();
}
/**
* @param {Function} done
*/
open(done) {console.log('opening');
const max = this._side.offsetWidth;
this._slidingElements = this._getSlidingElements();
animit.runAll(
animit(this._slidingElements)
.wait(this.delay)
.queue({
transform: `translate3d(${this.minus + max}px, 0px, 0px)`
}, {
duration: this.duration,
timing: this.timing
})
.queue(callback => {
this._slidingElements = null;
callback();
done && done();
}),
animit(this._mask)
.wait(this.delay)
.queue({
display: 'block'
})
);
}
/**
* @param {Function} done
*/
close(done) {
this._slidingElements = this._getSlidingElements();
animit.runAll(
animit(this._slidingElements)
.wait(this.delay)
.queue({
transform: 'translate3d(0px, 0px, 0px)'
}, {
duration: this.duration,
timing: this.timing
})
.queue(callback => {
this._slidingElements = null;
super.clearTransition();
done && done();
callback();
}),
animit(this._mask)<|fim▁hole|> })
);
}
}<|fim▁end|>
|
.wait(this.delay)
.queue({
display: 'none'
|
<|file_name|>worker.js<|end_file_name|><|fim▁begin|>// The worker we construct depends on the environment we're in (nodejs or webbrowser);
module.exports = function makeChessWorker() {
try {
// Reference error if in nodejs
Worker;
const worker = new Worker('scalachessworker.js');
return worker;<|fim▁hole|> return new ChessWorker(path);
}
}<|fim▁end|>
|
} catch (e) {
const rootDir = `${__dirname.replace('ctrl', '')}/`;
const path =`${rootDir}vendor/scalachessjs/scalachess.js`;
const ChessWorker = require('tiny-worker');
|
<|file_name|>position.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
//! Generic types for CSS handling of specified and computed values of
//! [`position`](https://drafts.csswg.org/css-backgrounds-3/#position)
/// A generic type for representing a CSS [position](https://drafts.csswg.org/css-values/#position).
#[derive(
Animate,
Clone,
ComputeSquaredDistance,
Copy,
Debug,
MallocSizeOf,
PartialEq,
SpecifiedValueInfo,
ToAnimatedValue,<|fim▁hole|> ToComputedValue,
ToResolvedValue,
ToShmem,
)]
#[repr(C)]
pub struct GenericPosition<H, V> {
/// The horizontal component of position.
pub horizontal: H,
/// The vertical component of position.
pub vertical: V,
}
pub use self::GenericPosition as Position;
impl<H, V> Position<H, V> {
/// Returns a new position.
pub fn new(horizontal: H, vertical: V) -> Self {
Self {
horizontal,
vertical,
}
}
}
/// A generic type for representing an `Auto | <position>`.
/// This is used by <offset-anchor> for now.
/// https://drafts.fxtf.org/motion-1/#offset-anchor-property
#[derive(
Animate,
Clone,
ComputeSquaredDistance,
Copy,
Debug,
MallocSizeOf,
Parse,
PartialEq,
SpecifiedValueInfo,
ToAnimatedZero,
ToComputedValue,
ToCss,
ToResolvedValue,
ToShmem,
)]
#[repr(C, u8)]
pub enum GenericPositionOrAuto<Pos> {
/// The <position> value.
Position(Pos),
/// The keyword `auto`.
Auto,
}
pub use self::GenericPositionOrAuto as PositionOrAuto;
impl<Pos> PositionOrAuto<Pos> {
/// Return `auto`.
#[inline]
pub fn auto() -> Self {
PositionOrAuto::Auto
}
}
/// A generic value for the `z-index` property.
#[derive(
Animate,
Clone,
ComputeSquaredDistance,
Copy,
Debug,
MallocSizeOf,
PartialEq,
Parse,
SpecifiedValueInfo,
ToAnimatedZero,
ToComputedValue,
ToCss,
ToResolvedValue,
ToShmem,
)]
#[repr(C, u8)]
pub enum GenericZIndex<I> {
/// An integer value.
Integer(I),
/// The keyword `auto`.
Auto,
}
pub use self::GenericZIndex as ZIndex;
impl<Integer> ZIndex<Integer> {
/// Returns `auto`
#[inline]
pub fn auto() -> Self {
ZIndex::Auto
}
/// Returns whether `self` is `auto`.
#[inline]
pub fn is_auto(self) -> bool {
matches!(self, ZIndex::Auto)
}
/// Returns the integer value if it is an integer, or `auto`.
#[inline]
pub fn integer_or(self, auto: Integer) -> Integer {
match self {
ZIndex::Integer(n) => n,
ZIndex::Auto => auto,
}
}
}<|fim▁end|>
|
ToAnimatedZero,
|
<|file_name|>options.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# * Copyright (C) 2012-2014 Croissance Commune
# * Authors:
# * Arezki Feth <[email protected]>;
# * Miotte Julien <[email protected]>;
# * TJEBBES Gaston <[email protected]>
#
# This file is part of Autonomie : Progiciel de gestion de CAE.
#
# Autonomie is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Autonomie is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Autonomie. If not, see <http://www.gnu.org/licenses/>.
"""
Base tools for administrable options
"""
from sqlalchemy import (
Column,
Integer,
String,
Boolean,
ForeignKey,
)
from sqlalchemy.util import classproperty
from sqlalchemy.sql.expression import func
from autonomie_base.utils.ascii import camel_case_to_name
from autonomie_base.models.base import (
DBBASE,
default_table_args,
DBSESSION,
)
from autonomie.forms import (
get_hidden_field_conf,
EXCLUDED,
)
class ConfigurableOption(DBBASE):
"""
Base class for options
"""
__table_args__ = default_table_args
id = Column(
Integer,
primary_key=True,
info={'colanderalchemy': get_hidden_field_conf()}
)
label = Column(
String(100),
info={'colanderalchemy': {'title': u'Libellé'}},
nullable=False,
)
active = Column(
Boolean(),
default=True,
info={'colanderalchemy': EXCLUDED}
)
order = Column(
Integer,
default=0,
info={'colanderalchemy': get_hidden_field_conf()}
)
type_ = Column(
'type_',
String(30),
nullable=False,
info={'colanderalchemy': EXCLUDED}
)
@classproperty
def __mapper_args__(cls):
name = cls.__name__
if name == 'ConfigurableOption':
return {
'polymorphic_on': 'type_',
'polymorphic_identity': 'configurable_option'
}
else:<|fim▁hole|>
@classmethod
def query(cls, *args):
query = super(ConfigurableOption, cls).query(*args)
query = query.filter(ConfigurableOption.active == True)
query = query.order_by(ConfigurableOption.order)
return query
def __json__(self, request):
return dict(
id=self.id,
label=self.label,
active=self.active,
)
def move_up(self):
"""
Move the current instance up in the category's order
"""
order = self.order
if order > 0:
new_order = order - 1
self.__class__.insert(self, new_order)
def move_down(self):
"""
Move the current instance down in the category's order
"""
order = self.order
new_order = order + 1
self.__class__.insert(self, new_order)
@classmethod
def get_next_order(cls):
"""
:returns: The next available order
:rtype: int
"""
query = DBSESSION().query(func.max(cls.order)).filter_by(active=True)
query = query.filter_by(
type_=cls.__mapper_args__['polymorphic_identity']
)
query = query.first()
if query is not None and query[0] is not None:
result = query[0] + 1
else:
result = 0
return result
@classmethod
def _query_active_items(cls):
"""
Build a query to collect active items of the current class
:rtype: :class:`sqlalchemy.Query`
"""
return DBSESSION().query(cls).filter_by(
type_=cls.__mapper_args__['polymorphic_identity']
).filter_by(active=True)
@classmethod
def insert(cls, item, new_order):
"""
Place the item at the given index
:param obj item: The item to move
:param int new_order: The new index of the item
"""
query = cls._query_active_items()
items = query.filter(cls.id != item.id).order_by(cls.order).all()
items.insert(new_order, item)
for index, item in enumerate(items):
item.order = index
DBSESSION().merge(item)
@classmethod
def reorder(cls):
"""
Regenerate order attributes
"""
items = cls._query_active_items().order_by(cls.order).all()
for index, item in enumerate(items):
item.order = index
DBSESSION().merge(item)
def get_id_foreignkey_col(foreignkey_str):
"""
Return an id column as a foreignkey with correct colander configuration
foreignkey_str
The foreignkey our id is pointing to
"""
column = Column(
"id",
Integer,
ForeignKey(foreignkey_str),
primary_key=True,
info={'colanderalchemy': get_hidden_field_conf()},
)
return column<|fim▁end|>
|
return {'polymorphic_identity': camel_case_to_name(name)}
|
<|file_name|>4-first-mesh.js<|end_file_name|><|fim▁begin|>var Mesh = require('../../lib/mesh');
var config = {
name: 'remoteMesh',
dataLayer: {
port: 3001,
authTokenSecret: 'a256a2fd43bf441483c5177fc85fd9d3',
systemSecret: 'mesh',
secure: true,
adminPassword: 'guessme',
},
endpoints: {},
modules: {
"remoteComponent": {
path: __dirname + "/4-remote-component",
constructor: {
type: "sync",
parameters: []
}
}
},
components: {
"remoteComponent": {
moduleName: "remoteComponent",
schema: {
"exclusive": false,
"methods": {
"remoteFunction": {
parameters: [
{name: 'one', required: true},
{name: 'two', required: true},
{name: 'three', required: true},
{name: 'callback', type: 'callback', required: true}
]
}
,
"causeError": {
parameters: [
{name: 'callback', type: 'callback', required: true}
]
}
}
}
}
}
};
(new Mesh()).initialize(config, function (err) {
if (err) {
console.log(err);
process.exit(err.code || 1);
return;
}<|fim▁hole|><|fim▁end|>
|
console.log('READY');
});
|
<|file_name|>simplysupplements.py<|end_file_name|><|fim▁begin|><|fim▁hole|>import os
from scrapy.spider import BaseSpider
from scrapy.selector import HtmlXPathSelector
from scrapy.http import Request, HtmlResponse
from scrapy.utils.response import get_base_url
from scrapy.utils.url import urljoin_rfc
from urllib import urlencode
import hashlib
import csv
from product_spiders.items import Product, ProductLoaderWithNameStrip\
as ProductLoader
from scrapy import log
HERE = os.path.abspath(os.path.dirname(__file__))
class SimplySupplementsSpider(BaseSpider):
name = 'simplysupplements.net-merckgroup'
allowed_domains = ['www.simplysupplements.net', 'simplysupplements.net']
start_urls = ('http://www.simplysupplements.net/product-a-to-z/',)
def parse(self, response):
if not isinstance(response, HtmlResponse):
return
hxs = HtmlXPathSelector(response)
# getting product links from A-Z product list
links = hxs.select('//ul[@id="product-a-to-z"]/li/a/@href').extract()
for prod_url in links:
url = urljoin_rfc(get_base_url(response), prod_url)
yield Request(url)
# products
for product in self.parse_product(response):
yield product
def parse_product(self, response):
if not isinstance(response, HtmlResponse):
return
hxs = HtmlXPathSelector(response)
name = hxs.select('//div[@class="innercol"]/h1/text()').extract()
if name:
url = response.url
url = urljoin_rfc(get_base_url(response), url)
skus = hxs.select('//td[@class="size"]/strong/text()').extract()
prices = hxs.select('//td[@class="price"]/text()').extract()
skus_prices = zip(skus, prices)
for sku, price in skus_prices:
loader = ProductLoader(item=Product(), selector=hxs)
loader.add_value('url', url)
loader.add_value('name', name[0].strip() + ' ' + sku.strip(':'))
#loader.add_value('sku', sku)
loader.add_value('price', price)
yield loader.load_item()<|fim▁end|>
|
import re
|
<|file_name|>sftp.py<|end_file_name|><|fim▁begin|>import os
import stat
import socket
import paramiko
from transfert.statresult import stat_result
from transfert.resources._resource import _Resource
from transfert.exceptions import TransfertFileExistsError, TransfertFileNotFoundError
class SftpResource(_Resource):
KNOW_HOSt_FILE = '~/.ssh/known_hosts'
GSS_AUTH = False
GSS_KEX = False
_DEFAULT_PORT = 22
def __init__(self, url):
_Resource.__init__(self, url)
self.__client = None
self._transport = None
self._fd = None
def exists(self):
try:
return self.isfile() or self.isdir()
except FileNotFoundError:<|fim▁hole|>
def _get_hostkey(self):
try:
host_keys = paramiko.util.load_host_keys(os.path.expanduser(self.KNOW_HOSt_FILE))
htype = host_keys[self.url.host].keys()[0]
return host_keys[self.url.host][htype]
except (IOError, KeyError):
return None
def _connect(self):
self._transport = paramiko.Transport((self.url.host, self.url.port or self._DEFAULT_PORT))
self._transport.connect(self._get_hostkey(),
self.url.user,
self.url.password,
gss_host=socket.getfqdn(self.url.host),
gss_auth=self.GSS_AUTH,
gss_kex=self.GSS_KEX)
self.__client = paramiko.SFTPClient.from_transport(self._transport)
self.__client.chdir()
def open(self, flags):
self._fd = self._client.open(self.url.path[1:], flags)
@property
def _client(self):
if self.__client is None:
self._connect()
return self.__client
def isfile(self):
try:
with self('r'):
return stat.S_ISREG(self.stat().st_mode)
except IOError:
return False
def isdir(self):
try:
with self('r'):
return stat.S_ISDIR(self.stat().st_mode)
except IOError:
return False
def listdir(self, path=None):
if self.isfile():
yield self
elif self.isdir():
for entry in self._client.listdir(self.url.path[1:] + '/'):
yield self.join(entry)
else:
raise FileNotFoundError(self)
def close(self):
if self._fd:
self._fd.close()
self._fd = None
if self._transport:
self._transport.close()
self._transport = None
if self.__client is not None:
self.__client.close()
self.__client = None
def stat(self):
stat_res = self._client.stat(self.url.path[1:])
return stat_result(
st_atime=stat_res.st_atime,
st_gid=stat_res.st_gid,
st_mode=stat_res.st_mode,
st_mtime=stat_res.st_mtime,
st_size=stat_res.st_size,
st_uid=stat_res.st_uid,
)
def size(self):
return self.stat().st_size
def delete(self):
if self.isfile():
self._client.remove(self.url.path[1:])
elif self.isdir():
self._client.rmdir(self.url.path[1:])
else:
raise TransfertFileNotFoundError(self)
def chmod(self, mode):
self._client.chmod(self.url.path[1:], mode)
def read(self, size):
return iter(lambda: self._fd.read(size), b'')
def write(self, data):
self._fd.write(data)
def mkdir(self, name=None):
# Can be optimized after connection pool setup
if name is None:
if self.isfile():
raise TransfertFileExistsError(self)
elif not self.isdir():
self._client.mkdir(self.url.path[1:])
return self
else:
dire = self.join(name)
if dire.isfile():
raise TransfertFileExistsError(self)
elif not dire.isdir():
return dire.mkdir()
return dire
def __del__(self):
self.close()<|fim▁end|>
|
return False
|
<|file_name|>soy2html.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#
# This script is designed to be invoked by soy2html.sh.
# Usage:
#
# buck/docs$ python soy2html.py <output_dir>
#
# This will write all of the static content to the specified output directory.
# You may want to verify that things worked correctly by running:
#
# python -m SimpleHTTPServer <output_dir>
#
# and then navigating to http://localhost:8000/.
#
# When this script is run, soyweb should already be running locally on port
# 9814 via ./docs/soyweb-prod.sh.
import os
import subprocess
import sys
import time
URL_ROOT = 'http://localhost:9814/'
def main(output_dir):
# Iterate over the files in the docs directory and copy them, as
# appropriate.
for root, dirs, files in os.walk('.'):
for file_name in files:
if file_name.endswith('.soy') and not file_name.startswith('__'):
# Strip the './' prefix, if appropriate.
if root.startswith('./'):
root = root[2:]
# Construct the URL where the .soy file is being served.
soy_file = file_name
html_file = root + '/' + soy_file[:-len('.soy')] + '.html'
url = URL_ROOT + html_file
copy_dest = ensure_dir(html_file, output_dir)
subprocess.check_call([
"curl", "--fail", "--output", copy_dest, url
])
elif (file_name == ".nojekyll" or
file_name == "CNAME" or
file_name.endswith('.css') or
file_name.endswith('.jpg') or
file_name.endswith('.js') or
file_name.endswith('.png') or
file_name.endswith('.gif') or
file_name.endswith('.html') or
file_name.endswith('.md') or
file_name.endswith('.svg') or
file_name.endswith('.ttf') or
file_name.endswith('.txt')):
# Copy the static resource to output_dir.
relative_path = os.path.join(root, file_name)
with open(relative_path) as resource_file:
resource = resource_file.read()
copy_to_output_dir(relative_path, output_dir, resource)
def ensure_dir(path, output_dir):
last_slash = path.rfind('/')
if last_slash != -1:<|fim▁hole|>
return os.path.join(output_dir, path)
def copy_to_output_dir(path, output_dir, content):
output_file = ensure_dir(path, output_dir)
with open(output_file, 'w') as f:
f.write(content)
def pollForServerReady():
SERVER_START_POLL = 5
print 'Waiting for server to start.'
for _ in range(0, SERVER_START_POLL):
result = subprocess.call(['curl', '--fail', '-I', URL_ROOT])
if result == 0:
return
time.sleep(1)
print 'Server failed to start after %s seconds.' % SERVER_START_POLL
if __name__ == '__main__':
output_dir = sys.argv[1]
pollForServerReady()
main(output_dir)<|fim▁end|>
|
output_subdir = os.path.join(output_dir, path[:last_slash])
if not os.path.exists(output_subdir):
os.makedirs(output_subdir)
|
<|file_name|>1_generateImage.py<|end_file_name|><|fim▁begin|># coding: utf-8
import random
from PIL import Image
from PIL import ImageDraw
from PIL import ImageFont
import sys
import os
# how many pictures to generate
num = 10
if len(sys.argv) > 1:
num = int(sys.argv[1])
<|fim▁hole|> '''
generate one line
'''
w, h = font.getsize(text)
image = Image.new('RGB', (w + 15, h + 15), 'white')
brush = ImageDraw.Draw(image)
brush.text((8, 5), text, font=font, fill=(0, 0, 0))
image.save(filename + '.jpg')
with open(filename + '.txt', 'w') as f:
f.write(text)
f.close()
if __name__ == '__main__':
if not os.path.isdir('./lines/'):
os.mkdir('./lines/')
for i in range(num):
fontname = './fonts/simkai.ttf'
fontsize = 24
font = ImageFont.truetype(fontname, fontsize)
text = str(random.randint(1000000000, 9999999999))
text = text + str(random.randint(1000000000, 9999999999))
#text = str(random.randint(1000, 9999))
filename = './lines/' + str(i + 1)
genline(text, font, filename)
pass<|fim▁end|>
|
def genline(text, font, filename):
|
<|file_name|>CertificateExpiredExceptionTest.java<|end_file_name|><|fim▁begin|>/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @author Vera Y. Petrashkova
*/
package org.apache.harmony.security.tests.java.security.cert;
import java.security.cert.CertificateExpiredException;
import junit.framework.TestCase;
/**
* Tests for <code>DigestException</code> class constructors and methods.
*
*/
public class CertificateExpiredExceptionTest extends TestCase {
public static void main(String[] args) {
}
/**
* Constructor for CertificateExpiredExceptionTests.
*
* @param arg0
*/<|fim▁hole|> }
static String[] msgs = {
"",
"Check new message",
"Check new message Check new message Check new message Check new message Check new message" };
static Throwable tCause = new Throwable("Throwable for exception");
/**
* Test for <code>CertificateExpiredException()</code> constructor
* Assertion: constructs CertificateExpiredException with no detail message
*/
public void testCertificateExpiredException01() {
CertificateExpiredException tE = new CertificateExpiredException();
assertNull("getMessage() must return null.", tE.getMessage());
assertNull("getCause() must return null", tE.getCause());
}
/**
* Test for <code>CertificateExpiredException(String)</code> constructor
* Assertion: constructs CertificateExpiredException with detail message
* msg. Parameter <code>msg</code> is not null.
*/
public void testCertificateExpiredException02() {
CertificateExpiredException tE;
for (int i = 0; i < msgs.length; i++) {
tE = new CertificateExpiredException(msgs[i]);
assertEquals("getMessage() must return: ".concat(msgs[i]), tE
.getMessage(), msgs[i]);
assertNull("getCause() must return null", tE.getCause());
}
}
/**
* Test for <code>CertificateExpiredException(String)</code> constructor
* Assertion: constructs CertificateExpiredException when <code>msg</code>
* is null
*/
public void testCertificateExpiredException03() {
String msg = null;
CertificateExpiredException tE = new CertificateExpiredException(msg);
assertNull("getMessage() must return null.", tE.getMessage());
assertNull("getCause() must return null", tE.getCause());
}
}<|fim▁end|>
|
public CertificateExpiredExceptionTest(String arg0) {
super(arg0);
|
<|file_name|>data-source.js<|end_file_name|><|fim▁begin|>const escapeStringRegexp = require('escape-string-regexp');
const query = require('../query/query');
const Sort = require('../helpers/sort');
const StringScore = require('../helpers/string-score');
const DataSource = {
cells: (name) => {
return new Promise((resolve) => {
// calculate text search score
const textMatch = (matches) => {
const arrWithScores = matches.map((matchedTerm) => {
return Object.assign(
{},
matchedTerm,
{
score: StringScore(matchedTerm.name, name),
}
);
});
const sorted = Sort.arrayOfObjectByKeyNumber(arrWithScores, 'score', 'desc');
// add _id to name because some cell lines have the same name
return sorted.map((cell) => {
return Object.assign(
{},
cell,
{
name: `${cell.name}; ${cell._id}`,
}
);
});
};
if (!name) {
resolve({
status: 200,
clientResponse: {
status: 200,
data: [],
message: 'Species string searched',
},
});
} else {
const escapedString = escapeStringRegexp(name);
const re = new RegExp(`^${escapedString}`, 'i');
query.get('cells', { name: { $regex: re } }, { })
.then((matches) => {
const sortedResults = textMatch(matches);
resolve({
status: 200,
clientResponse: {
status: 200,
data: sortedResults,
message: 'Cell string searched',
},
});
})
.catch((error) => {
resolve({
status: 500,
clientResponse: {
status: 500,
message: `There was an error querying the text string ${name}: ${error}`,
},
});
})<|fim▁hole|> species: (name) => {
return new Promise((resolve) => {
// calculate text search score
const textMatch = (matches) => {
const arrWithScores = matches.map((matchedTerm) => {
return Object.assign(
{},
matchedTerm,
{
score: StringScore(matchedTerm.name, name),
}
);
});
return Sort.arrayOfObjectByKeyNumber(arrWithScores, 'score', 'desc');
};
if (!name) {
resolve({
status: 200,
clientResponse: {
status: 200,
data: [],
message: 'Species string searched',
},
});
} else {
const escapedString = escapeStringRegexp(name);
const re = new RegExp(`^${escapedString}`, 'i');
query.get('species', { name: { $regex: re } }, { })
.then((matches) => {
const sortedResults = textMatch(matches);
resolve({
status: 200,
clientResponse: {
status: 200,
data: sortedResults,
message: 'Species string searched',
},
});
})
.catch((error) => {
resolve({
status: 500,
clientResponse: {
status: 500,
message: `There was an error querying the text string ${name}: ${error}`,
},
});
})
;
}
});
},
};
module.exports = DataSource;<|fim▁end|>
|
;
}
});
},
|
<|file_name|>ProductCreate.java<|end_file_name|><|fim▁begin|>package weixin.popular.bean.scan.crud;
import weixin.popular.bean.scan.base.ProductGet;<|fim▁hole|>public class ProductCreate extends ProductGet {
private BrandInfo brand_info;
public BrandInfo getBrand_info() {
return brand_info;
}
public void setBrand_info(BrandInfo brand_info) {
this.brand_info = brand_info;
}
}<|fim▁end|>
|
import weixin.popular.bean.scan.info.BrandInfo;
|
<|file_name|>services.js<|end_file_name|><|fim▁begin|>const https = require('https')
const cookie = require('cookie');
var exports = module.exports = {}
exports.getResponseHeaders = function(httpOptions, formData) {
if (formData) {
httpOptions.headers = formData.getHeaders()
}
return new Promise((resolve, reject) => {
const request = https.request(httpOptions, (response) => {
// handle http errors
if (response.statusCode < 200 || response.statusCode > 299) {
reject(new Error('Failed to load page, status code: ' + response.statusCode));
}
// temporary data holder
const body = [];
// on every content chunk, push it to the data array
response.on('data', () => {});
// we are done, resolve promise with those joined chunks
response.on('end', () => resolve(response.headers));
});
// handle connection errors of the request
request.on('error', (err) => reject(err))<|fim▁hole|>
request.end()
})
}
exports.getCookies = function(headers) {
cookies = {}
headers['set-cookie'].forEach((element) => {
cookies = Object.assign(cookies, cookie.parse(element))
})
return cookies
}<|fim▁end|>
| |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from zope.i18nmessageid import MessageFactory
PloneMessageFactory = MessageFactory('plone')<|fim▁hole|>
from Products.CMFCore.permissions import setDefaultRoles
setDefaultRoles('signature.portlets.gdsignature: Add GroupDocs Signature portlet',
('Manager', 'Site Administrator', 'Owner',))<|fim▁end|>
| |
<|file_name|>compat.py<|end_file_name|><|fim▁begin|># flake8: noqa
###############################################################################
# Compat file to import the correct modules for each platform and python
# version.
#
# author: Thomas Moreau and Olivier grisel
#
import sys<|fim▁hole|> import Queue as queue
from pickle import PicklingError
if sys.version_info >= (3, 4):
from multiprocessing.process import BaseProcess
else:
from multiprocessing.process import Process as BaseProcess
# Platform specific compat
if sys.platform == "win32":
from .compat_win32 import *
else:
from .compat_posix import *<|fim▁end|>
|
if sys.version_info[:2] >= (3, 3):
import queue
else:
|
<|file_name|>events.py<|end_file_name|><|fim▁begin|>#
# Copyright 2015-2016 Red Hat, Inc.
#<|fim▁hole|># This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Refer to the README and COPYING files for full details of the license
#
from __future__ import absolute_import
import collections
import logging
import threading
Callback = collections.namedtuple('Callback',
['conn', 'dom', 'body', 'opaque'])
def _null_cb(*args, **kwargs):
pass
_NULL = Callback(None, None, _null_cb, tuple())
class Handler(object):
_log = logging.getLogger('convirt.event')
_null = [_NULL]
def __init__(self, name=None, parent=None):
self._name = id(self) if name is None else name
self._parent = parent
self._lock = threading.Lock()
self.events = collections.defaultdict(list)
def register(self, event_id, conn, dom, func, opaque=None):
with self._lock:
# TODO: weakrefs?
cb = Callback(conn, dom, func, opaque)
# TODO: debug?
self._log.info('[%s] %i -> %s', self._name, event_id, cb)
self.events[event_id].append(cb)
def fire(self, event_id, dom, *args):
for cb in self.get_callbacks(event_id):
arguments = list(args)
if cb.opaque is not None:
arguments.append(cb.opaque)
domain = cb.dom
if dom is not None:
domain = dom
self._log.debug('firing: %s(%s, %s, %s)',
cb.body, cb.conn, domain, arguments)
return cb.body(cb.conn, domain, *arguments)
def get_callbacks(self, event_id):
with self._lock:
callback = self.events.get(event_id, None)
if callback is not None:
return callback
if self._parent is not None:
self._log.warning('[%s] unknown event %r',
self._name, event_id)
return self._parent.get_callbacks(event_id)
# TODO: debug?
self._log.warning('[%s] unhandled event %r', self._name, event_id)
return self._null
@property
def registered(self):
with self._lock:
return tuple(self.events.keys())
# for testing purposes
def clear(self):
with self._lock:
self.events.clear()
root = Handler(name='root')
def fire(event_id, dom, *args):
global root
root.fire(event_id, dom, *args)<|fim▁end|>
| |
<|file_name|>address-create.component.spec.ts<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2014-2021 Bjoern Kimminich.
* SPDX-License-Identifier: MIT
*/
import { TranslateModule, TranslateService } from '@ngx-translate/core'
import { HttpClientTestingModule } from '@angular/common/http/testing'
import { MatCardModule } from '@angular/material/card'
import { MatFormFieldModule } from '@angular/material/form-field'
import { ComponentFixture, fakeAsync, TestBed, waitForAsync } from '@angular/core/testing'
import { AddressCreateComponent } from './address-create.component'
import { MatInputModule } from '@angular/material/input'
import { ReactiveFormsModule } from '@angular/forms'
import { BrowserAnimationsModule } from '@angular/platform-browser/animations'
import { of, throwError } from 'rxjs'
import { RouterTestingModule } from '@angular/router/testing'
import { AddressService } from '../Services/address.service'
import { MatGridListModule } from '@angular/material/grid-list'
import { EventEmitter } from '@angular/core'
import { MatIconModule } from '@angular/material/icon'
import { MatSnackBar, MatSnackBarModule } from '@angular/material/snack-bar'
describe('AddressCreateComponent', () => {
let component: AddressCreateComponent
let fixture: ComponentFixture<AddressCreateComponent>
let addressService
let translateService
let snackBar: any
beforeEach(waitForAsync(() => {
addressService = jasmine.createSpyObj('AddressService', ['getById', 'put', 'save'])
addressService.save.and.returnValue(of({}))
addressService.getById.and.returnValue(of({}))
addressService.put.and.returnValue(of({}))
translateService = jasmine.createSpyObj('TranslateService', ['get'])
translateService.get.and.returnValue(of({}))
translateService.onLangChange = new EventEmitter()
translateService.onTranslationChange = new EventEmitter()
translateService.onDefaultLangChange = new EventEmitter()
snackBar = jasmine.createSpyObj('MatSnackBar', ['open'])
snackBar.open.and.returnValue(null)
TestBed.configureTestingModule({
imports: [
RouterTestingModule,
TranslateModule.forRoot(),
HttpClientTestingModule,
ReactiveFormsModule,
BrowserAnimationsModule,
MatCardModule,
MatFormFieldModule,
MatInputModule,
MatGridListModule,
MatIconModule,
MatSnackBarModule
],
declarations: [AddressCreateComponent],
providers: [
{ provide: AddressService, useValue: addressService },
{ provide: TranslateService, useValue: translateService },
{ provide: MatSnackBar, useValue: snackBar }
]
})
.compileComponents()
}))
beforeEach(() => {
fixture = TestBed.createComponent(AddressCreateComponent)
component = fixture.componentInstance<|fim▁hole|> expect(component).toBeTruthy()
})
it('should reinitizalise forms by calling resetForm', () => {
component.countryControl.setValue('US')
component.nameControl.setValue('jim')
component.numberControl.setValue(9800000000)
component.pinControl.setValue('NX 101')
component.addressControl.setValue('Bakers Street')
component.cityControl.setValue('NYC')
component.stateControl.setValue('NY')
component.resetForm()
expect(component.countryControl.value).toBe('')
expect(component.countryControl.pristine).toBe(true)
expect(component.countryControl.untouched).toBe(true)
expect(component.nameControl.value).toBe('')
expect(component.nameControl.pristine).toBe(true)
expect(component.nameControl.untouched).toBe(true)
expect(component.numberControl.value).toBe('')
expect(component.numberControl.pristine).toBe(true)
expect(component.numberControl.untouched).toBe(true)
expect(component.pinControl.value).toBe('')
expect(component.pinControl.pristine).toBe(true)
expect(component.pinControl.untouched).toBe(true)
expect(component.addressControl.value).toBe('')
expect(component.addressControl.pristine).toBe(true)
expect(component.addressControl.untouched).toBe(true)
expect(component.cityControl.value).toBe('')
expect(component.cityControl.pristine).toBe(true)
expect(component.cityControl.untouched).toBe(true)
expect(component.stateControl.value).toBe('')
expect(component.stateControl.pristine).toBe(true)
expect(component.stateControl.untouched).toBe(true)
})
it('should be compulsory to provide country', () => {
component.countryControl.setValue('')
expect(component.countryControl.valid).toBeFalsy()
})
it('should be compulsory to provide name', () => {
component.nameControl.setValue('')
expect(component.nameControl.valid).toBeFalsy()
})
it('should be compulsory to provide number', () => {
component.numberControl.setValue('')
expect(component.numberControl.valid).toBeFalsy()
})
it('should be compulsory to provide pin', () => {
component.pinControl.setValue('')
expect(component.pinControl.valid).toBeFalsy()
})
it('should be compulsory to provide address', () => {
component.addressControl.setValue('')
expect(component.addressControl.valid).toBeFalsy()
})
it('should be compulsory to provide city', () => {
component.cityControl.setValue('')
expect(component.cityControl.valid).toBeFalsy()
})
it('should not be compulsory to provide state', () => {
component.stateControl.setValue('')
expect(component.stateControl.valid).toBe(true)
})
it('pin code should not be more than 8 characters', () => {
let str: string = ''
for (let i = 0; i < 9; ++i) {
str += 'a'
}
component.pinControl.setValue(str)
expect(component.pinControl.valid).toBeFalsy()
str = str.slice(1)
component.pinControl.setValue(str)
expect(component.pinControl.valid).toBe(true)
})
it('address should not be more than 160 characters', () => {
let str: string = ''
for (let i = 0; i < 161; ++i) {
str += 'a'
}
component.addressControl.setValue(str)
expect(component.addressControl.valid).toBeFalsy()
str = str.slice(1)
component.addressControl.setValue(str)
expect(component.addressControl.valid).toBe(true)
})
it('number should be in the range [1111111, 9999999999]', () => {
component.numberControl.setValue(1111110)
expect(component.numberControl.valid).toBeFalsy()
component.numberControl.setValue(10000000000)
expect(component.numberControl.valid).toBeFalsy()
component.numberControl.setValue(9999999999)
expect(component.numberControl.valid).toBe(true)
component.numberControl.setValue(1111111)
expect(component.numberControl.valid).toBe(true)
})
it('should reset the form on updating address and show confirmation', () => {
addressService.put.and.returnValue(of({ city: 'NY' }))
translateService.get.and.returnValue(of('ADDRESS_UPDATED'))
component.mode = 'edit'
spyOn(component, 'resetForm')
spyOn(component, 'ngOnInit')
component.save()
expect(translateService.get).toHaveBeenCalledWith('ADDRESS_UPDATED', { city: 'NY' })
expect(component.ngOnInit).toHaveBeenCalled()
expect(component.resetForm).toHaveBeenCalled()
})
it('should reset the form on adding address and show confirmation', () => {
addressService.save.and.returnValue(of({ city: 'NY' }))
translateService.get.and.returnValue(of('ADDRESS_ADDED'))
spyOn(component, 'resetForm')
spyOn(component, 'ngOnInit')
component.save()
expect(translateService.get).toHaveBeenCalledWith('ADDRESS_ADDED', { city: 'NY' })
expect(component.ngOnInit).toHaveBeenCalled()
expect(component.resetForm).toHaveBeenCalled()
})
it('should clear the form and display error if saving address fails', fakeAsync(() => {
addressService.save.and.returnValue(throwError({ error: 'Error' }))
spyOn(component, 'resetForm')
component.save()
expect(component.resetForm).toHaveBeenCalled()
expect(snackBar.open).toHaveBeenCalled()
}))
it('should clear the form and display error if updating address fails', fakeAsync(() => {
addressService.put.and.returnValue(throwError({ error: 'Error' }))
component.mode = 'edit'
spyOn(component, 'resetForm')
component.save()
expect(component.resetForm).toHaveBeenCalled()
expect(snackBar.open).toHaveBeenCalled()
}))
it('should populate the form on calling initializeForm', () => {
component.initializeForm({ country: 'US', fullName: 'jim', mobileNum: 9800000000, zipCode: 'NX 101', streetAddress: 'Bakers Street', city: 'NYC', state: 'NY' })
expect(component.countryControl.value).toBe('US')
expect(component.nameControl.value).toBe('jim')
expect(component.numberControl.value).toBe(9800000000)
expect(component.pinControl.value).toBe('NX 101')
expect(component.addressControl.value).toBe('Bakers Street')
expect(component.cityControl.value).toBe('NYC')
expect(component.stateControl.value).toBe('NY')
})
})<|fim▁end|>
|
fixture.detectChanges()
})
it('should create', () => {
|
<|file_name|>Destructor.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
|
# TODO : Faire l'agent destructeur
|
<|file_name|>platform.ts<|end_file_name|><|fim▁begin|>/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
// --- THIS FILE IS TEMPORARY UNTIL ENV.TS IS CLEANED UP. IT CAN SAFELY BE USED IN ALL TARGET EXECUTION ENVIRONMENTS (node & dom) ---
let _isWindows = false;
let _isMacintosh = false;
let _isLinux = false;
let _isRootUser = false;
let _isNative = false;
let _isWeb = false;
let _isQunit = false;
let _locale = undefined;
let _language = undefined;
interface NLSConfig {
locale: string;
availableLanguages: { [key: string]: string; };
}
export interface IProcessEnvironment {
[key: string]: string;
}
interface INodeProcess {
platform: string;
env: IProcessEnvironment;<|fim▁hole|> getuid(): number;
}
declare let process: INodeProcess;
declare let global: any;
interface INavigator {
userAgent: string;
language: string;
}
declare let navigator: INavigator;
declare let self: any;
export const LANGUAGE_DEFAULT = 'en';
// OS detection
if (typeof process === 'object') {
_isWindows = (process.platform === 'win32');
_isMacintosh = (process.platform === 'darwin');
_isLinux = (process.platform === 'linux');
_isRootUser = !_isWindows && (process.getuid() === 0);
let rawNlsConfig = process.env['VSCODE_NLS_CONFIG'];
if (rawNlsConfig) {
try {
let nlsConfig: NLSConfig = JSON.parse(rawNlsConfig);
let resolved = nlsConfig.availableLanguages['*'];
_locale = nlsConfig.locale;
// VSCode's default language is 'en'
_language = resolved ? resolved : LANGUAGE_DEFAULT;
} catch (e) {
}
}
_isNative = true;
} else if (typeof navigator === 'object') {
let userAgent = navigator.userAgent;
_isWindows = userAgent.indexOf('Windows') >= 0;
_isMacintosh = userAgent.indexOf('Macintosh') >= 0;
_isLinux = userAgent.indexOf('Linux') >= 0;
_isWeb = true;
_locale = navigator.language;
_language = _locale;
_isQunit = !!(<any>self).QUnit;
}
export enum Platform {
Web,
Mac,
Linux,
Windows
}
export let _platform: Platform = Platform.Web;
if (_isNative) {
if (_isMacintosh) {
_platform = Platform.Mac;
} else if (_isWindows) {
_platform = Platform.Windows;
} else if (_isLinux) {
_platform = Platform.Linux;
}
}
export const isWindows = _isWindows;
export const isMacintosh = _isMacintosh;
export const isLinux = _isLinux;
export const isRootUser = _isRootUser;
export const isNative = _isNative;
export const isWeb = _isWeb;
export const isQunit = _isQunit;
export const platform = _platform;
/**
* The language used for the user interface. The format of
* the string is all lower case (e.g. zh-tw for Traditional
* Chinese)
*/
export const language = _language;
/**
* The OS locale or the locale specified by --locale. The format of
* the string is all lower case (e.g. zh-tw for Traditional
* Chinese). The UI is not necessarily shown in the provided locale.
*/
export const locale = _locale;
export interface TimeoutToken {
}
export interface IntervalToken {
}
interface IGlobals {
Worker?: any;
setTimeout(callback: (...args: any[]) => void, delay: number, ...args: any[]): TimeoutToken;
clearTimeout(token: TimeoutToken): void;
setInterval(callback: (...args: any[]) => void, delay: number, ...args: any[]): IntervalToken;
clearInterval(token: IntervalToken);
}
const _globals = <IGlobals>(typeof self === 'object' ? self : global);
export const globals: any = _globals;
export function hasWebWorkerSupport(): boolean {
return typeof _globals.Worker !== 'undefined';
}
export const setTimeout = _globals.setTimeout.bind(_globals);
export const clearTimeout = _globals.clearTimeout.bind(_globals);
export const setInterval = _globals.setInterval.bind(_globals);
export const clearInterval = _globals.clearInterval.bind(_globals);<|fim▁end|>
| |
<|file_name|>p1.rs<|end_file_name|><|fim▁begin|>use rust_utils::*;
pub fn main() {
let decoded = to_bytes::from_hex("49276d206b696c6c696e6720796f757220627261696e206c696b65206120706f69736f6e6f7573206d757368726f6f6d");
let encoded = from_bytes::to_b64(&decoded);
let matches = encoded == "SSdtIGtpbGxpbmcgeW91ciBicmFpbiBsaWtlIGEgcG9pc29ub3VzIG11c2hyb29t";
<|fim▁hole|><|fim▁end|>
|
println!("{}\nEncoded string matches: {}", encoded, matches);
}
|
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>from django.conf.urls import patterns, include, url
from django.contrib import admin
from django.conf import settings
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'CryptoKnocker.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^$', include("mainpage.urls")),
url(r"^login/$", "mainpage.views.login_form"),<|fim▁hole|> url(r'^management/keys/$', "management.views.manageKeys"),
url(r'^management/keys/changeKey$', "management.views.changeKey"),
url(r'^management/getPorts/$', "management.views.getPorts"),
url(r'^(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.MEDIA_ROOT})
)<|fim▁end|>
|
url(r'^management/$', "management.views.index"),
url(r'^management/login$', "management.views.user_login"),
url(r'^logout/$', "management.views.user_logout"),
url(r'^management/registration/$', "management.views.registration"),
|
<|file_name|>TestMriServer.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
import unittest
from mri import MriServer
from mri.dispatch import MriServerDispatch<|fim▁hole|>
class TestMriServer(unittest.TestCase):
def test_new_dispatch(self):
server = MriServer("http://www.httpbin.com", "testuser", "testpass")
task = {"title": "TEST", "id": "000112233"}
dispatch = server.new_dispatch(task)
test_against = MriServerDispatch(task, "http://www.httpbin.com", "testuser", "testpass")
self.assertEqual(dispatch, test_against)
if __name__ == '__main__':
unittest.main()<|fim▁end|>
| |
<|file_name|>superclass.py<|end_file_name|><|fim▁begin|>class Foo:
def __init__(self):
pass
class Bar(Foo):
pass<|fim▁hole|>
Bar()
#<ref><|fim▁end|>
| |
<|file_name|>msvs_emulation.py<|end_file_name|><|fim▁begin|># Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
This module helps emulate Visual Studio 2008 behavior on top of other
build systems, primarily ninja.
"""
import os
import re
import subprocess
import sys
from gyp.common import OrderedSet
import gyp.MSVSUtil
import gyp.MSVSVersion
windows_quoter_regex = re.compile(r'(\\*)"')
def QuoteForRspFile(arg):
"""Quote a command line argument so that it appears as one argument when
processed via cmd.exe and parsed by CommandLineToArgvW (as is typical for
Windows programs)."""
# See http://goo.gl/cuFbX and http://goo.gl/dhPnp including the comment
# threads. This is actually the quoting rules for CommandLineToArgvW, not
# for the shell, because the shell doesn't do anything in Windows. This
# works more or less because most programs (including the compiler, etc.)
# use that function to handle command line arguments.
# For a literal quote, CommandLineToArgvW requires 2n+1 backslashes
# preceding it, and results in n backslashes + the quote. So we substitute
# in 2* what we match, +1 more, plus the quote.
arg = windows_quoter_regex.sub(lambda mo: 2 * mo.group(1) + '\\"', arg)
# %'s also need to be doubled otherwise they're interpreted as batch
# positional arguments. Also make sure to escape the % so that they're
# passed literally through escaping so they can be singled to just the
# original %. Otherwise, trying to pass the literal representation that
# looks like an environment variable to the shell (e.g. %PATH%) would fail.
arg = arg.replace('%', '%%')
# These commands are used in rsp files, so no escaping for the shell (via ^)
# is necessary.
# Finally, wrap the whole thing in quotes so that the above quote rule
# applies and whitespace isn't a word break.
return '"' + arg + '"'
def EncodeRspFileList(args):
"""Process a list of arguments using QuoteCmdExeArgument."""
# Note that the first argument is assumed to be the command. Don't add
# quotes around it because then built-ins like 'echo', etc. won't work.
# Take care to normpath only the path in the case of 'call ../x.bat' because
# otherwise the whole thing is incorrectly interpreted as a path and not
# normalized correctly.
if not args: return ''
if args[0].startswith('call '):
call, program = args[0].split(' ', 1)
program = call + ' ' + os.path.normpath(program)
else:
program = os.path.normpath(args[0])
return program + ' ' + ' '.join(QuoteForRspFile(arg) for arg in args[1:])
def _GenericRetrieve(root, default, path):
"""Given a list of dictionary keys |path| and a tree of dicts |root|, find
value at path, or return |default| if any of the path doesn't exist."""
if not root:
return default
if not path:
return root
return _GenericRetrieve(root.get(path[0]), default, path[1:])
def _AddPrefix(element, prefix):
"""Add |prefix| to |element| or each subelement if element is iterable."""
if element is None:
return element
# Note, not Iterable because we don't want to handle strings like that.
if isinstance(element, list) or isinstance(element, tuple):
return [prefix + e for e in element]
else:
return prefix + element
def _DoRemapping(element, map):
"""If |element| then remap it through |map|. If |element| is iterable then
each item will be remapped. Any elements not found will be removed."""
if map is not None and element is not None:
if not callable(map):
map = map.get # Assume it's a dict, otherwise a callable to do the remap.
if isinstance(element, list) or isinstance(element, tuple):
element = filter(None, [map(elem) for elem in element])
else:
element = map(element)
return element
def _AppendOrReturn(append, element):
"""If |append| is None, simply return |element|. If |append| is not None,
then add |element| to it, adding each item in |element| if it's a list or
tuple."""
if append is not None and element is not None:
if isinstance(element, list) or isinstance(element, tuple):
append.extend(element)
else:
append.append(element)
else:
return element
def _FindDirectXInstallation():
"""Try to find an installation location for the DirectX SDK. Check for the
standard environment variable, and if that doesn't exist, try to find
via the registry. May return None if not found in either location."""
# Return previously calculated value, if there is one
if hasattr(_FindDirectXInstallation, 'dxsdk_dir'):
return _FindDirectXInstallation.dxsdk_dir
dxsdk_dir = os.environ.get('DXSDK_DIR')
if not dxsdk_dir:
# Setup params to pass to and attempt to launch reg.exe.
cmd = ['reg.exe', 'query', r'HKLM\Software\Microsoft\DirectX', '/s']
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
for line in p.communicate()[0].splitlines():
if 'InstallPath' in line:
dxsdk_dir = line.split(' ')[3] + "\\"
# Cache return value
_FindDirectXInstallation.dxsdk_dir = dxsdk_dir
return dxsdk_dir
def GetGlobalVSMacroEnv(vs_version):
"""Get a dict of variables mapping internal VS macro names to their gyp
equivalents. Returns all variables that are independent of the target."""
env = {}
# '$(VSInstallDir)' and '$(VCInstallDir)' are available when and only when
# Visual Studio is actually installed.
if vs_version.Path():
env['$(VSInstallDir)'] = vs_version.Path()
env['$(VCInstallDir)'] = os.path.join(vs_version.Path(), 'VC') + '\\'
# Chromium uses DXSDK_DIR in include/lib paths, but it may or may not be
# set. This happens when the SDK is sync'd via src-internal, rather than
# by typical end-user installation of the SDK. If it's not set, we don't
# want to leave the unexpanded variable in the path, so simply strip it.
dxsdk_dir = _FindDirectXInstallation()
env['$(DXSDK_DIR)'] = dxsdk_dir if dxsdk_dir else ''
# Try to find an installation location for the Windows DDK by checking
# the WDK_DIR environment variable, may be None.
env['$(WDK_DIR)'] = os.environ.get('WDK_DIR', '')
return env
def ExtractSharedMSVSSystemIncludes(configs, generator_flags):
"""Finds msvs_system_include_dirs that are common to all targets, removes
them from all targets, and returns an OrderedSet containing them."""
all_system_includes = OrderedSet(
configs[0].get('msvs_system_include_dirs', []))
for config in configs[1:]:
system_includes = config.get('msvs_system_include_dirs', [])
all_system_includes = all_system_includes & OrderedSet(system_includes)
if not all_system_includes:
return None
# Expand macros in all_system_includes.
env = GetGlobalVSMacroEnv(GetVSVersion(generator_flags))
expanded_system_includes = OrderedSet([ExpandMacros(include, env)
for include in all_system_includes])
if any(['$' in include for include in expanded_system_includes]):
# Some path relies on target-specific variables, bail.
return None
# Remove system includes shared by all targets from the targets.
for config in configs:
includes = config.get('msvs_system_include_dirs', [])
if includes: # Don't insert a msvs_system_include_dirs key if not needed.
# This must check the unexpanded includes list:
new_includes = [i for i in includes if i not in all_system_includes]
config['msvs_system_include_dirs'] = new_includes
return expanded_system_includes
class MsvsSettings(object):
"""A class that understands the gyp 'msvs_...' values (especially the
msvs_settings field). They largely correpond to the VS2008 IDE DOM. This
class helps map those settings to command line options."""
def __init__(self, spec, generator_flags):
self.spec = spec
self.vs_version = GetVSVersion(generator_flags)
supported_fields = [
('msvs_configuration_attributes', dict),
('msvs_settings', dict),
('msvs_system_include_dirs', list),
('msvs_disabled_warnings', list),
('msvs_precompiled_header', str),
('msvs_precompiled_source', str),
('msvs_configuration_platform', str),
('msvs_target_platform', str),
]
configs = spec['configurations']
for field, default in supported_fields:
setattr(self, field, {})
for configname, config in configs.iteritems():
getattr(self, field)[configname] = config.get(field, default())
self.msvs_cygwin_dirs = spec.get('msvs_cygwin_dirs', ['.'])
unsupported_fields = [
'msvs_prebuild',
'msvs_postbuild',
]
unsupported = []
for field in unsupported_fields:
for config in configs.values():
if field in config:
unsupported += ["%s not supported (target %s)." %
(field, spec['target_name'])]
if unsupported:
raise Exception('\n'.join(unsupported))
def GetExtension(self):
"""Returns the extension for the target, with no leading dot.
Uses 'product_extension' if specified, otherwise uses MSVS defaults based on
the target type.
"""
ext = self.spec.get('product_extension', None)
if ext:
return ext
return gyp.MSVSUtil.TARGET_TYPE_EXT.get(self.spec['type'], '')
def GetVSMacroEnv(self, base_to_build=None, config=None):
"""Get a dict of variables mapping internal VS macro names to their gyp
equivalents."""
target_platform = 'Win32' if self.GetArch(config) == 'x86' else 'x64'
target_name = self.spec.get('product_prefix', '') + \
self.spec.get('product_name', self.spec['target_name'])
target_dir = base_to_build + '\\' if base_to_build else ''
target_ext = '.' + self.GetExtension()
target_file_name = target_name + target_ext
replacements = {
'$(InputName)': '${root}',
'$(InputPath)': '${source}',
'$(IntDir)': '$!INTERMEDIATE_DIR',
'$(OutDir)\\': target_dir,
'$(PlatformName)': target_platform,
'$(ProjectDir)\\': '',
'$(ProjectName)': self.spec['target_name'],
'$(TargetDir)\\': target_dir,
'$(TargetExt)': target_ext,
'$(TargetFileName)': target_file_name,
'$(TargetName)': target_name,
'$(TargetPath)': os.path.join(target_dir, target_file_name),
}
replacements.update(GetGlobalVSMacroEnv(self.vs_version))
return replacements
def ConvertVSMacros(self, s, base_to_build=None, config=None):
"""Convert from VS macro names to something equivalent."""
env = self.GetVSMacroEnv(base_to_build, config=config)
return ExpandMacros(s, env)
def AdjustLibraries(self, libraries):
"""Strip -l from library if it's specified with that."""
libs = [lib[2:] if lib.startswith('-l') else lib for lib in libraries]
return [lib + '.lib' if not lib.endswith('.lib') else lib for lib in libs]
def _GetAndMunge(self, field, path, default, prefix, append, map):
"""Retrieve a value from |field| at |path| or return |default|. If
|append| is specified, and the item is found, it will be appended to that
object instead of returned. If |map| is specified, results will be
remapped through |map| before being returned or appended."""
result = _GenericRetrieve(field, default, path)
result = _DoRemapping(result, map)
result = _AddPrefix(result, prefix)
return _AppendOrReturn(append, result)
class _GetWrapper(object):
def __init__(self, parent, field, base_path, append=None):
self.parent = parent
self.field = field
self.base_path = [base_path]
self.append = append
def __call__(self, name, map=None, prefix='', default=None):
return self.parent._GetAndMunge(self.field, self.base_path + [name],
default=default, prefix=prefix, append=self.append, map=map)
def GetArch(self, config):
"""Get architecture based on msvs_configuration_platform and
msvs_target_platform. Returns either 'x86' or 'x64'."""
configuration_platform = self.msvs_configuration_platform.get(config, '')
platform = self.msvs_target_platform.get(config, '')
if not platform: # If no specific override, use the configuration's.
platform = configuration_platform
# Map from platform to architecture.
return {'Win32': 'x86', 'x64': 'x64'}.get(platform, 'x86')
def _TargetConfig(self, config):
"""Returns the target-specific configuration."""
# There's two levels of architecture/platform specification in VS. The
# first level is globally for the configuration (this is what we consider
# "the" config at the gyp level, which will be something like 'Debug' or
# 'Release_x64'), and a second target-specific configuration, which is an
# override for the global one. |config| is remapped here to take into
# account the local target-specific overrides to the global configuration.
arch = self.GetArch(config)
if arch == 'x64' and not config.endswith('_x64'):
config += '_x64'
if arch == 'x86' and config.endswith('_x64'):
config = config.rsplit('_', 1)[0]
return config
def _Setting(self, path, config,
default=None, prefix='', append=None, map=None):
"""_GetAndMunge for msvs_settings."""
return self._GetAndMunge(
self.msvs_settings[config], path, default, prefix, append, map)
def _ConfigAttrib(self, path, config,
default=None, prefix='', append=None, map=None):
"""_GetAndMunge for msvs_configuration_attributes."""
return self._GetAndMunge(
self.msvs_configuration_attributes[config],
path, default, prefix, append, map)
def AdjustIncludeDirs(self, include_dirs, config):
"""Updates include_dirs to expand VS specific paths, and adds the system
include dirs used for platform SDK and similar."""
config = self._TargetConfig(config)
includes = include_dirs + self.msvs_system_include_dirs[config]
includes.extend(self._Setting(
('VCCLCompilerTool', 'AdditionalIncludeDirectories'), config, default=[]))
return [self.ConvertVSMacros(p, config=config) for p in includes]
def AdjustMidlIncludeDirs(self, midl_include_dirs, config):
"""Updates midl_include_dirs to expand VS specific paths, and adds the
system include dirs used for platform SDK and similar."""
config = self._TargetConfig(config)
includes = midl_include_dirs + self.msvs_system_include_dirs[config]
includes.extend(self._Setting(
('VCMIDLTool', 'AdditionalIncludeDirectories'), config, default=[]))
return [self.ConvertVSMacros(p, config=config) for p in includes]
def GetComputedDefines(self, config):
"""Returns the set of defines that are injected to the defines list based
on other VS settings."""
config = self._TargetConfig(config)
defines = []
if self._ConfigAttrib(['CharacterSet'], config) == '1':
defines.extend(('_UNICODE', 'UNICODE'))
if self._ConfigAttrib(['CharacterSet'], config) == '2':
defines.append('_MBCS')
defines.extend(self._Setting(
('VCCLCompilerTool', 'PreprocessorDefinitions'), config, default=[]))
return defines
def GetCompilerPdbName(self, config, expand_special):
"""Get the pdb file name that should be used for compiler invocations, or
None if there's no explicit name specified."""
config = self._TargetConfig(config)
pdbname = self._Setting(
('VCCLCompilerTool', 'ProgramDataBaseFileName'), config)
if pdbname:
pdbname = expand_special(self.ConvertVSMacros(pdbname))
return pdbname
def GetMapFileName(self, config, expand_special):
"""Gets the explicitly overriden map file name for a target or returns None
if it's not set."""
config = self._TargetConfig(config)
map_file = self._Setting(('VCLinkerTool', 'MapFileName'), config)
if map_file:
map_file = expand_special(self.ConvertVSMacros(map_file, config=config))
return map_file
def GetOutputName(self, config, expand_special):
"""Gets the explicitly overridden output name for a target or returns None
if it's not overridden."""
config = self._TargetConfig(config)
type = self.spec['type']
root = 'VCLibrarianTool' if type == 'static_library' else 'VCLinkerTool'
# TODO(scottmg): Handle OutputDirectory without OutputFile.
output_file = self._Setting((root, 'OutputFile'), config)
if output_file:
output_file = expand_special(self.ConvertVSMacros(
output_file, config=config))
return output_file
def GetPDBName(self, config, expand_special, default):
"""Gets the explicitly overridden pdb name for a target or returns
default if it's not overridden, or if no pdb will be generated."""
config = self._TargetConfig(config)
output_file = self._Setting(('VCLinkerTool', 'ProgramDatabaseFile'), config)
generate_debug_info = self._Setting(
('VCLinkerTool', 'GenerateDebugInformation'), config)
if generate_debug_info == 'true':
if output_file:
return expand_special(self.ConvertVSMacros(output_file, config=config))
else:
return default
<|fim▁hole|> """If NoImportLibrary: true, ninja will not expect the output to include
an import library."""
config = self._TargetConfig(config)
noimplib = self._Setting(('NoImportLibrary',), config)
return noimplib == 'true'
def GetAsmflags(self, config):
"""Returns the flags that need to be added to ml invocations."""
config = self._TargetConfig(config)
asmflags = []
safeseh = self._Setting(('MASM', 'UseSafeExceptionHandlers'), config)
if safeseh == 'true':
asmflags.append('/safeseh')
return asmflags
def GetCflags(self, config):
"""Returns the flags that need to be added to .c and .cc compilations."""
config = self._TargetConfig(config)
cflags = []
cflags.extend(['/wd' + w for w in self.msvs_disabled_warnings[config]])
cl = self._GetWrapper(self, self.msvs_settings[config],
'VCCLCompilerTool', append=cflags)
cl('Optimization',
map={'0': 'd', '1': '1', '2': '2', '3': 'x'}, prefix='/O', default='2')
cl('InlineFunctionExpansion', prefix='/Ob')
cl('DisableSpecificWarnings', prefix='/wd')
cl('StringPooling', map={'true': '/GF'})
cl('EnableFiberSafeOptimizations', map={'true': '/GT'})
cl('OmitFramePointers', map={'false': '-', 'true': ''}, prefix='/Oy')
cl('EnableIntrinsicFunctions', map={'false': '-', 'true': ''}, prefix='/Oi')
cl('FavorSizeOrSpeed', map={'1': 't', '2': 's'}, prefix='/O')
cl('FloatingPointModel',
map={'0': 'precise', '1': 'strict', '2': 'fast'}, prefix='/fp:',
default='0')
cl('CompileAsManaged', map={'false': '', 'true': '/clr'})
cl('WholeProgramOptimization', map={'true': '/GL'})
cl('WarningLevel', prefix='/W')
cl('WarnAsError', map={'true': '/WX'})
cl('CallingConvention',
map={'0': 'd', '1': 'r', '2': 'z', '3': 'v'}, prefix='/G')
cl('DebugInformationFormat',
map={'1': '7', '3': 'i', '4': 'I'}, prefix='/Z')
cl('RuntimeTypeInfo', map={'true': '/GR', 'false': '/GR-'})
cl('EnableFunctionLevelLinking', map={'true': '/Gy', 'false': '/Gy-'})
cl('MinimalRebuild', map={'true': '/Gm'})
cl('BufferSecurityCheck', map={'true': '/GS', 'false': '/GS-'})
cl('BasicRuntimeChecks', map={'1': 's', '2': 'u', '3': '1'}, prefix='/RTC')
cl('RuntimeLibrary',
map={'0': 'T', '1': 'Td', '2': 'D', '3': 'Dd'}, prefix='/M')
cl('ExceptionHandling', map={'1': 'sc','2': 'a'}, prefix='/EH')
cl('DefaultCharIsUnsigned', map={'true': '/J'})
cl('TreatWChar_tAsBuiltInType',
map={'false': '-', 'true': ''}, prefix='/Zc:wchar_t')
cl('EnablePREfast', map={'true': '/analyze'})
cl('AdditionalOptions', prefix='')
cl('EnableEnhancedInstructionSet',
map={'1': 'SSE', '2': 'SSE2', '3': 'AVX', '4': 'IA32', '5': 'AVX2'},
prefix='/arch:')
cflags.extend(['/FI' + f for f in self._Setting(
('VCCLCompilerTool', 'ForcedIncludeFiles'), config, default=[])])
if self.vs_version.short_name in ('2013', '2013e', '2015'):
# New flag required in 2013 to maintain previous PDB behavior.
cflags.append('/FS')
# ninja handles parallelism by itself, don't have the compiler do it too.
cflags = filter(lambda x: not x.startswith('/MP'), cflags)
return cflags
def _GetPchFlags(self, config, extension):
"""Get the flags to be added to the cflags for precompiled header support.
"""
config = self._TargetConfig(config)
# The PCH is only built once by a particular source file. Usage of PCH must
# only be for the same language (i.e. C vs. C++), so only include the pch
# flags when the language matches.
if self.msvs_precompiled_header[config]:
source_ext = os.path.splitext(self.msvs_precompiled_source[config])[1]
if _LanguageMatchesForPch(source_ext, extension):
pch = os.path.split(self.msvs_precompiled_header[config])[1]
return ['/Yu' + pch, '/FI' + pch, '/Fp${pchprefix}.' + pch + '.pch']
return []
def GetCflagsC(self, config):
"""Returns the flags that need to be added to .c compilations."""
config = self._TargetConfig(config)
return self._GetPchFlags(config, '.c')
def GetCflagsCC(self, config):
"""Returns the flags that need to be added to .cc compilations."""
config = self._TargetConfig(config)
return ['/TP'] + self._GetPchFlags(config, '.cc')
def _GetAdditionalLibraryDirectories(self, root, config, gyp_to_build_path):
"""Get and normalize the list of paths in AdditionalLibraryDirectories
setting."""
config = self._TargetConfig(config)
libpaths = self._Setting((root, 'AdditionalLibraryDirectories'),
config, default=[])
libpaths = [os.path.normpath(
gyp_to_build_path(self.ConvertVSMacros(p, config=config)))
for p in libpaths]
return ['/LIBPATH:"' + p + '"' for p in libpaths]
def GetLibFlags(self, config, gyp_to_build_path):
"""Returns the flags that need to be added to lib commands."""
config = self._TargetConfig(config)
libflags = []
lib = self._GetWrapper(self, self.msvs_settings[config],
'VCLibrarianTool', append=libflags)
libflags.extend(self._GetAdditionalLibraryDirectories(
'VCLibrarianTool', config, gyp_to_build_path))
lib('LinkTimeCodeGeneration', map={'true': '/LTCG'})
lib('TargetMachine', map={'1': 'X86', '17': 'X64', '3': 'ARM'},
prefix='/MACHINE:')
lib('AdditionalOptions')
return libflags
def GetDefFile(self, gyp_to_build_path):
"""Returns the .def file from sources, if any. Otherwise returns None."""
spec = self.spec
if spec['type'] in ('shared_library', 'loadable_module', 'executable'):
def_files = [s for s in spec.get('sources', []) if s.endswith('.def')]
if len(def_files) == 1:
return gyp_to_build_path(def_files[0])
elif len(def_files) > 1:
raise Exception("Multiple .def files")
return None
def _GetDefFileAsLdflags(self, ldflags, gyp_to_build_path):
""".def files get implicitly converted to a ModuleDefinitionFile for the
linker in the VS generator. Emulate that behaviour here."""
def_file = self.GetDefFile(gyp_to_build_path)
if def_file:
ldflags.append('/DEF:"%s"' % def_file)
def GetPGDName(self, config, expand_special):
"""Gets the explicitly overridden pgd name for a target or returns None
if it's not overridden."""
config = self._TargetConfig(config)
output_file = self._Setting(
('VCLinkerTool', 'ProfileGuidedDatabase'), config)
if output_file:
output_file = expand_special(self.ConvertVSMacros(
output_file, config=config))
return output_file
def GetLdflags(self, config, gyp_to_build_path, expand_special,
manifest_base_name, output_name, is_executable, build_dir):
"""Returns the flags that need to be added to link commands, and the
manifest files."""
config = self._TargetConfig(config)
ldflags = []
ld = self._GetWrapper(self, self.msvs_settings[config],
'VCLinkerTool', append=ldflags)
self._GetDefFileAsLdflags(ldflags, gyp_to_build_path)
ld('GenerateDebugInformation', map={'true': '/DEBUG'})
ld('TargetMachine', map={'1': 'X86', '17': 'X64', '3': 'ARM'},
prefix='/MACHINE:')
ldflags.extend(self._GetAdditionalLibraryDirectories(
'VCLinkerTool', config, gyp_to_build_path))
ld('DelayLoadDLLs', prefix='/DELAYLOAD:')
ld('TreatLinkerWarningAsErrors', prefix='/WX',
map={'true': '', 'false': ':NO'})
out = self.GetOutputName(config, expand_special)
if out:
ldflags.append('/OUT:' + out)
pdb = self.GetPDBName(config, expand_special, output_name + '.pdb')
if pdb:
ldflags.append('/PDB:' + pdb)
pgd = self.GetPGDName(config, expand_special)
if pgd:
ldflags.append('/PGD:' + pgd)
map_file = self.GetMapFileName(config, expand_special)
ld('GenerateMapFile', map={'true': '/MAP:' + map_file if map_file
else '/MAP'})
ld('MapExports', map={'true': '/MAPINFO:EXPORTS'})
ld('AdditionalOptions', prefix='')
minimum_required_version = self._Setting(
('VCLinkerTool', 'MinimumRequiredVersion'), config, default='')
if minimum_required_version:
minimum_required_version = ',' + minimum_required_version
ld('SubSystem',
map={'1': 'CONSOLE%s' % minimum_required_version,
'2': 'WINDOWS%s' % minimum_required_version},
prefix='/SUBSYSTEM:')
stack_reserve_size = self._Setting(
('VCLinkerTool', 'StackReserveSize'), config, default='')
if stack_reserve_size:
stack_commit_size = self._Setting(
('VCLinkerTool', 'StackCommitSize'), config, default='')
if stack_commit_size:
stack_commit_size = ',' + stack_commit_size
ldflags.append('/STACK:%s%s' % (stack_reserve_size, stack_commit_size))
ld('TerminalServerAware', map={'1': ':NO', '2': ''}, prefix='/TSAWARE')
ld('LinkIncremental', map={'1': ':NO', '2': ''}, prefix='/INCREMENTAL')
ld('BaseAddress', prefix='/BASE:')
ld('FixedBaseAddress', map={'1': ':NO', '2': ''}, prefix='/FIXED')
ld('RandomizedBaseAddress',
map={'1': ':NO', '2': ''}, prefix='/DYNAMICBASE')
ld('DataExecutionPrevention',
map={'1': ':NO', '2': ''}, prefix='/NXCOMPAT')
ld('OptimizeReferences', map={'1': 'NOREF', '2': 'REF'}, prefix='/OPT:')
ld('ForceSymbolReferences', prefix='/INCLUDE:')
ld('EnableCOMDATFolding', map={'1': 'NOICF', '2': 'ICF'}, prefix='/OPT:')
ld('LinkTimeCodeGeneration',
map={'1': '', '2': ':PGINSTRUMENT', '3': ':PGOPTIMIZE',
'4': ':PGUPDATE'},
prefix='/LTCG')
ld('IgnoreDefaultLibraryNames', prefix='/NODEFAULTLIB:')
ld('ResourceOnlyDLL', map={'true': '/NOENTRY'})
ld('EntryPointSymbol', prefix='/ENTRY:')
ld('Profile', map={'true': '/PROFILE'})
ld('LargeAddressAware',
map={'1': ':NO', '2': ''}, prefix='/LARGEADDRESSAWARE')
# TODO(scottmg): This should sort of be somewhere else (not really a flag).
ld('AdditionalDependencies', prefix='')
if self.GetArch(config) == 'x86':
safeseh_default = 'true'
else:
safeseh_default = None
ld('ImageHasSafeExceptionHandlers',
map={'false': ':NO', 'true': ''}, prefix='/SAFESEH',
default=safeseh_default)
# If the base address is not specifically controlled, DYNAMICBASE should
# be on by default.
base_flags = filter(lambda x: 'DYNAMICBASE' in x or x == '/FIXED',
ldflags)
if not base_flags:
ldflags.append('/DYNAMICBASE')
# If the NXCOMPAT flag has not been specified, default to on. Despite the
# documentation that says this only defaults to on when the subsystem is
# Vista or greater (which applies to the linker), the IDE defaults it on
# unless it's explicitly off.
if not filter(lambda x: 'NXCOMPAT' in x, ldflags):
ldflags.append('/NXCOMPAT')
have_def_file = filter(lambda x: x.startswith('/DEF:'), ldflags)
manifest_flags, intermediate_manifest, manifest_files = \
self._GetLdManifestFlags(config, manifest_base_name, gyp_to_build_path,
is_executable and not have_def_file, build_dir)
ldflags.extend(manifest_flags)
return ldflags, intermediate_manifest, manifest_files
def _GetLdManifestFlags(self, config, name, gyp_to_build_path,
allow_isolation, build_dir):
"""Returns a 3-tuple:
- the set of flags that need to be added to the link to generate
a default manifest
- the intermediate manifest that the linker will generate that should be
used to assert it doesn't add anything to the merged one.
- the list of all the manifest files to be merged by the manifest tool and
included into the link."""
generate_manifest = self._Setting(('VCLinkerTool', 'GenerateManifest'),
config,
default='true')
if generate_manifest != 'true':
# This means not only that the linker should not generate the intermediate
# manifest but also that the manifest tool should do nothing even when
# additional manifests are specified.
return ['/MANIFEST:NO'], [], []
output_name = name + '.intermediate.manifest'
flags = [
'/MANIFEST',
'/ManifestFile:' + output_name,
]
# Instead of using the MANIFESTUAC flags, we generate a .manifest to
# include into the list of manifests. This allows us to avoid the need to
# do two passes during linking. The /MANIFEST flag and /ManifestFile are
# still used, and the intermediate manifest is used to assert that the
# final manifest we get from merging all the additional manifest files
# (plus the one we generate here) isn't modified by merging the
# intermediate into it.
# Always NO, because we generate a manifest file that has what we want.
flags.append('/MANIFESTUAC:NO')
config = self._TargetConfig(config)
enable_uac = self._Setting(('VCLinkerTool', 'EnableUAC'), config,
default='true')
manifest_files = []
generated_manifest_outer = \
"<?xml version='1.0' encoding='UTF-8' standalone='yes'?>" \
"<assembly xmlns='urn:schemas-microsoft-com:asm.v1' manifestVersion='1.0'>%s" \
"</assembly>"
if enable_uac == 'true':
execution_level = self._Setting(('VCLinkerTool', 'UACExecutionLevel'),
config, default='0')
execution_level_map = {
'0': 'asInvoker',
'1': 'highestAvailable',
'2': 'requireAdministrator'
}
ui_access = self._Setting(('VCLinkerTool', 'UACUIAccess'), config,
default='false')
inner = '''
<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
<security>
<requestedPrivileges>
<requestedExecutionLevel level='%s' uiAccess='%s' />
</requestedPrivileges>
</security>
</trustInfo>''' % (execution_level_map[execution_level], ui_access)
else:
inner = ''
generated_manifest_contents = generated_manifest_outer % inner
generated_name = name + '.generated.manifest'
# Need to join with the build_dir here as we're writing it during
# generation time, but we return the un-joined version because the build
# will occur in that directory. We only write the file if the contents
# have changed so that simply regenerating the project files doesn't
# cause a relink.
build_dir_generated_name = os.path.join(build_dir, generated_name)
gyp.common.EnsureDirExists(build_dir_generated_name)
f = gyp.common.WriteOnDiff(build_dir_generated_name)
f.write(generated_manifest_contents)
f.close()
manifest_files = [generated_name]
if allow_isolation:
flags.append('/ALLOWISOLATION')
manifest_files += self._GetAdditionalManifestFiles(config,
gyp_to_build_path)
return flags, output_name, manifest_files
def _GetAdditionalManifestFiles(self, config, gyp_to_build_path):
"""Gets additional manifest files that are added to the default one
generated by the linker."""
files = self._Setting(('VCManifestTool', 'AdditionalManifestFiles'), config,
default=[])
if isinstance(files, str):
files = files.split(';')
return [os.path.normpath(
gyp_to_build_path(self.ConvertVSMacros(f, config=config)))
for f in files]
def IsUseLibraryDependencyInputs(self, config):
"""Returns whether the target should be linked via Use Library Dependency
Inputs (using component .objs of a given .lib)."""
config = self._TargetConfig(config)
uldi = self._Setting(('VCLinkerTool', 'UseLibraryDependencyInputs'), config)
return uldi == 'true'
def IsEmbedManifest(self, config):
"""Returns whether manifest should be linked into binary."""
config = self._TargetConfig(config)
embed = self._Setting(('VCManifestTool', 'EmbedManifest'), config,
default='true')
return embed == 'true'
def IsLinkIncremental(self, config):
"""Returns whether the target should be linked incrementally."""
config = self._TargetConfig(config)
link_inc = self._Setting(('VCLinkerTool', 'LinkIncremental'), config)
return link_inc != '1'
def GetRcflags(self, config, gyp_to_ninja_path):
"""Returns the flags that need to be added to invocations of the resource
compiler."""
config = self._TargetConfig(config)
rcflags = []
rc = self._GetWrapper(self, self.msvs_settings[config],
'VCResourceCompilerTool', append=rcflags)
rc('AdditionalIncludeDirectories', map=gyp_to_ninja_path, prefix='/I')
rcflags.append('/I' + gyp_to_ninja_path('.'))
rc('PreprocessorDefinitions', prefix='/d')
# /l arg must be in hex without leading '0x'
rc('Culture', prefix='/l', map=lambda x: hex(int(x))[2:])
return rcflags
def BuildCygwinBashCommandLine(self, args, path_to_base):
"""Build a command line that runs args via cygwin bash. We assume that all
incoming paths are in Windows normpath'd form, so they need to be
converted to posix style for the part of the command line that's passed to
bash. We also have to do some Visual Studio macro emulation here because
various rules use magic VS names for things. Also note that rules that
contain ninja variables cannot be fixed here (for example ${source}), so
the outer generator needs to make sure that the paths that are written out
are in posix style, if the command line will be used here."""
cygwin_dir = os.path.normpath(
os.path.join(path_to_base, self.msvs_cygwin_dirs[0]))
cd = ('cd %s' % path_to_base).replace('\\', '/')
args = [a.replace('\\', '/').replace('"', '\\"') for a in args]
args = ["'%s'" % a.replace("'", "'\\''") for a in args]
bash_cmd = ' '.join(args)
cmd = (
'call "%s\\setup_env.bat" && set CYGWIN=nontsec && ' % cygwin_dir +
'bash -c "%s ; %s"' % (cd, bash_cmd))
return cmd
def IsRuleRunUnderCygwin(self, rule):
"""Determine if an action should be run under cygwin. If the variable is
unset, or set to 1 we use cygwin."""
return int(rule.get('msvs_cygwin_shell',
self.spec.get('msvs_cygwin_shell', 1))) != 0
def _HasExplicitRuleForExtension(self, spec, extension):
"""Determine if there's an explicit rule for a particular extension."""
for rule in spec.get('rules', []):
if rule['extension'] == extension:
return True
return False
def _HasExplicitIdlActions(self, spec):
"""Determine if an action should not run midl for .idl files."""
return any([action.get('explicit_idl_action', 0)
for action in spec.get('actions', [])])
def HasExplicitIdlRulesOrActions(self, spec):
"""Determine if there's an explicit rule or action for idl files. When
there isn't we need to generate implicit rules to build MIDL .idl files."""
return (self._HasExplicitRuleForExtension(spec, 'idl') or
self._HasExplicitIdlActions(spec))
def HasExplicitAsmRules(self, spec):
"""Determine if there's an explicit rule for asm files. When there isn't we
need to generate implicit rules to assemble .asm files."""
return self._HasExplicitRuleForExtension(spec, 'asm')
def GetIdlBuildData(self, source, config):
"""Determine the implicit outputs for an idl file. Returns output
directory, outputs, and variables and flags that are required."""
config = self._TargetConfig(config)
midl_get = self._GetWrapper(self, self.msvs_settings[config], 'VCMIDLTool')
def midl(name, default=None):
return self.ConvertVSMacros(midl_get(name, default=default),
config=config)
tlb = midl('TypeLibraryName', default='${root}.tlb')
header = midl('HeaderFileName', default='${root}.h')
dlldata = midl('DLLDataFileName', default='dlldata.c')
iid = midl('InterfaceIdentifierFileName', default='${root}_i.c')
proxy = midl('ProxyFileName', default='${root}_p.c')
# Note that .tlb is not included in the outputs as it is not always
# generated depending on the content of the input idl file.
outdir = midl('OutputDirectory', default='')
output = [header, dlldata, iid, proxy]
variables = [('tlb', tlb),
('h', header),
('dlldata', dlldata),
('iid', iid),
('proxy', proxy)]
# TODO(scottmg): Are there configuration settings to set these flags?
target_platform = 'win32' if self.GetArch(config) == 'x86' else 'x64'
flags = ['/char', 'signed', '/env', target_platform, '/Oicf']
return outdir, output, variables, flags
def _LanguageMatchesForPch(source_ext, pch_source_ext):
c_exts = ('.c',)
cc_exts = ('.cc', '.cxx', '.cpp')
return ((source_ext in c_exts and pch_source_ext in c_exts) or
(source_ext in cc_exts and pch_source_ext in cc_exts))
class PrecompiledHeader(object):
"""Helper to generate dependencies and build rules to handle generation of
precompiled headers. Interface matches the GCH handler in xcode_emulation.py.
"""
def __init__(
self, settings, config, gyp_to_build_path, gyp_to_unique_output, obj_ext):
self.settings = settings
self.config = config
pch_source = self.settings.msvs_precompiled_source[self.config]
self.pch_source = gyp_to_build_path(pch_source)
filename, _ = os.path.splitext(pch_source)
self.output_obj = gyp_to_unique_output(filename + obj_ext).lower()
def _PchHeader(self):
"""Get the header that will appear in an #include line for all source
files."""
return os.path.split(self.settings.msvs_precompiled_header[self.config])[1]
def GetObjDependencies(self, sources, objs, arch):
"""Given a list of sources files and the corresponding object files,
returns a list of the pch files that should be depended upon. The
additional wrapping in the return value is for interface compatibility
with make.py on Mac, and xcode_emulation.py."""
assert arch is None
if not self._PchHeader():
return []
pch_ext = os.path.splitext(self.pch_source)[1]
for source in sources:
if _LanguageMatchesForPch(os.path.splitext(source)[1], pch_ext):
return [(None, None, self.output_obj)]
return []
def GetPchBuildCommands(self, arch):
"""Not used on Windows as there are no additional build steps required
(instead, existing steps are modified in GetFlagsModifications below)."""
return []
def GetFlagsModifications(self, input, output, implicit, command,
cflags_c, cflags_cc, expand_special):
"""Get the modified cflags and implicit dependencies that should be used
for the pch compilation step."""
if input == self.pch_source:
pch_output = ['/Yc' + self._PchHeader()]
if command == 'cxx':
return ([('cflags_cc', map(expand_special, cflags_cc + pch_output))],
self.output_obj, [])
elif command == 'cc':
return ([('cflags_c', map(expand_special, cflags_c + pch_output))],
self.output_obj, [])
return [], output, implicit
vs_version = None
def GetVSVersion(generator_flags):
global vs_version
if not vs_version:
vs_version = gyp.MSVSVersion.SelectVisualStudioVersion(
generator_flags.get('msvs_version', 'auto'),
allow_fallback=False)
return vs_version
def _GetVsvarsSetupArgs(generator_flags, arch):
vs = GetVSVersion(generator_flags)
return vs.SetupScript()
def ExpandMacros(string, expansions):
"""Expand $(Variable) per expansions dict. See MsvsSettings.GetVSMacroEnv
for the canonical way to retrieve a suitable dict."""
if '$' in string:
for old, new in expansions.iteritems():
assert '$(' not in new, new
string = string.replace(old, new)
return string
def _ExtractImportantEnvironment(output_of_set):
"""Extracts environment variables required for the toolchain to run from
a textual dump output by the cmd.exe 'set' command."""
envvars_to_save = (
'goma_.*', # TODO(scottmg): This is ugly, but needed for goma.
'include',
'lib',
'libpath',
'path',
'pathext',
'systemroot',
'temp',
'tmp',
)
env = {}
for line in output_of_set.splitlines():
for envvar in envvars_to_save:
if re.match(envvar + '=', line.lower()):
var, setting = line.split('=', 1)
if envvar == 'path':
# Our own rules (for running gyp-win-tool) and other actions in
# Chromium rely on python being in the path. Add the path to this
# python here so that if it's not in the path when ninja is run
# later, python will still be found.
setting = os.path.dirname(sys.executable) + os.pathsep + setting
env[var.upper()] = setting
break
for required in ('SYSTEMROOT', 'TEMP', 'TMP'):
if required not in env:
raise Exception('Environment variable "%s" '
'required to be set to valid path' % required)
return env
def _FormatAsEnvironmentBlock(envvar_dict):
"""Format as an 'environment block' directly suitable for CreateProcess.
Briefly this is a list of key=value\0, terminated by an additional \0. See
CreateProcess documentation for more details."""
block = ''
nul = '\0'
for key, value in envvar_dict.iteritems():
block += key + '=' + value + nul
block += nul
return block
def _ExtractCLPath(output_of_where):
"""Gets the path to cl.exe based on the output of calling the environment
setup batch file, followed by the equivalent of `where`."""
# Take the first line, as that's the first found in the PATH.
for line in output_of_where.strip().splitlines():
if line.startswith('LOC:'):
return line[len('LOC:'):].strip()
def GenerateEnvironmentFiles(toplevel_build_dir, generator_flags,
system_includes, open_out):
"""It's not sufficient to have the absolute path to the compiler, linker,
etc. on Windows, as those tools rely on .dlls being in the PATH. We also
need to support both x86 and x64 compilers within the same build (to support
msvs_target_platform hackery). Different architectures require a different
compiler binary, and different supporting environment variables (INCLUDE,
LIB, LIBPATH). So, we extract the environment here, wrap all invocations
of compiler tools (cl, link, lib, rc, midl, etc.) via win_tool.py which
sets up the environment, and then we do not prefix the compiler with
an absolute path, instead preferring something like "cl.exe" in the rule
which will then run whichever the environment setup has put in the path.
When the following procedure to generate environment files does not
meet your requirement (e.g. for custom toolchains), you can pass
"-G ninja_use_custom_environment_files" to the gyp to suppress file
generation and use custom environment files prepared by yourself."""
archs = ('x86', 'x64')
if generator_flags.get('ninja_use_custom_environment_files', 0):
cl_paths = {}
for arch in archs:
cl_paths[arch] = 'cl.exe'
return cl_paths
vs = GetVSVersion(generator_flags)
cl_paths = {}
for arch in archs:
# Extract environment variables for subprocesses.
args = vs.SetupScript(arch)
args.extend(('&&', 'set'))
popen = subprocess.Popen(
args, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
variables, _ = popen.communicate()
env = _ExtractImportantEnvironment(variables)
# Inject system includes from gyp files into INCLUDE.
if system_includes:
system_includes = system_includes | OrderedSet(
env.get('INCLUDE', '').split(';'))
env['INCLUDE'] = ';'.join(system_includes)
env_block = _FormatAsEnvironmentBlock(env)
f = open_out(os.path.join(toplevel_build_dir, 'environment.' + arch), 'wb')
f.write(env_block)
f.close()
# Find cl.exe location for this architecture.
args = vs.SetupScript(arch)
args.extend(('&&',
'for', '%i', 'in', '(cl.exe)', 'do', '@echo', 'LOC:%~$PATH:i'))
popen = subprocess.Popen(args, shell=True, stdout=subprocess.PIPE)
output, _ = popen.communicate()
cl_paths[arch] = _ExtractCLPath(output)
return cl_paths
def VerifyMissingSources(sources, build_dir, generator_flags, gyp_to_ninja):
"""Emulate behavior of msvs_error_on_missing_sources present in the msvs
generator: Check that all regular source files, i.e. not created at run time,
exist on disk. Missing files cause needless recompilation when building via
VS, and we want this check to match for people/bots that build using ninja,
so they're not surprised when the VS build fails."""
if int(generator_flags.get('msvs_error_on_missing_sources', 0)):
no_specials = filter(lambda x: '$' not in x, sources)
relative = [os.path.join(build_dir, gyp_to_ninja(s)) for s in no_specials]
missing = filter(lambda x: not os.path.exists(x), relative)
if missing:
# They'll look like out\Release\..\..\stuff\things.cc, so normalize the
# path for a slightly less crazy looking output.
cleaned_up = [os.path.normpath(x) for x in missing]
raise Exception('Missing input files:\n%s' % '\n'.join(cleaned_up))
# Sets some values in default_variables, which are required for many
# generators, run on Windows.
def CalculateCommonVariables(default_variables, params):
generator_flags = params.get('generator_flags', {})
# Set a variable so conditions can be based on msvs_version.
msvs_version = gyp.msvs_emulation.GetVSVersion(generator_flags)
default_variables['MSVS_VERSION'] = msvs_version.ShortName()
# To determine processor word size on Windows, in addition to checking
# PROCESSOR_ARCHITECTURE (which reflects the word size of the current
# process), it is also necessary to check PROCESSOR_ARCHITEW6432 (which
# contains the actual word size of the system when running thru WOW64).
if ('64' in os.environ.get('PROCESSOR_ARCHITECTURE', '') or
'64' in os.environ.get('PROCESSOR_ARCHITEW6432', '')):
default_variables['MSVS_OS_BITS'] = 64
else:
default_variables['MSVS_OS_BITS'] = 32<|fim▁end|>
|
else:
return None
def GetNoImportLibrary(self, config):
|
<|file_name|>vlantransparent.py<|end_file_name|><|fim▁begin|># Copyright (c) 2015 Cisco Systems, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron_lib._i18n import _
from neutron_lib import exceptions
class VlanTransparencyDriverError(exceptions.NeutronException):<|fim▁hole|><|fim▁end|>
|
"""Vlan Transparency not supported by all mechanism drivers."""
message = _("Backend does not support VLAN Transparency.")
|
<|file_name|>jquery.atwho.js<|end_file_name|><|fim▁begin|>/*
Implement Github like autocomplete mentions
http://ichord.github.com/At.js
Copyright (c) 2013 [email protected]
Licensed under the MIT license.
*/
/*
本插件操作 textarea 或者 input 内的插入符
只实现了获得插入符在文本框中的位置,我设置
插入符的位置.
*/
/**
* --------------------
* Vanilla Forums NOTE:
* --------------------
*
* This file has been highly modified to work with iframes, as well
* as custom username handling with quotation marks and spaces for Vanilla.
* Do not just replace with a more current version. At the time of
* development there was no support for iframes, or spaces in names.
* This may have changed, so if you do decide to upgrade this library,
* you're going to have to update the code that uses this library as well.
* It's all wrapped up in a function called `atCompleteInit`.
*/
(function() {
(function(factory) {
if (typeof define === 'function' && define.amd) {
return define(['jquery'], factory);
} else {
return factory(window.jQuery);
}
})(function($) {
"use strict";
////var EditableCaret, InputCaret, Mirror, Utils, methods, pluginName;
var EditableCaret, InputCaret, Mirror, Utils, methods, pluginName, cWin;
pluginName = 'caret';
EditableCaret = (function() {
function EditableCaret($inputor) {
this.$inputor = $inputor;
this.domInputor = this.$inputor[0];
}
EditableCaret.prototype.setPos = function(pos) {
return this.domInputor;
};
EditableCaret.prototype.getIEPosition = function() {
return $.noop();
};
EditableCaret.prototype.getPosition = function() {
return $.noop();
};
EditableCaret.prototype.getOldIEPos = function() {
var preCaretTextRange, textRange;
textRange = document.selection.createRange();
preCaretTextRange = document.body.createTextRange();
preCaretTextRange.moveToElementText(this.domInputor);
preCaretTextRange.setEndPoint("EndToEnd", textRange);
return preCaretTextRange.text.length;
};
EditableCaret.prototype.getPos = function() {
var clonedRange, pos, range;
if (range = this.range()) {
clonedRange = range.cloneRange();
clonedRange.selectNodeContents(this.domInputor);
clonedRange.setEnd(range.endContainer, range.endOffset);
pos = clonedRange.toString().length;
clonedRange.detach();
return pos;
} else if (document.selection) {
return this.getOldIEPos();
}
};
EditableCaret.prototype.getOldIEOffset = function() {
var range, rect;
range = document.selection.createRange().duplicate();
range.moveStart("character", -1);
rect = range.getBoundingClientRect();
return {
height: rect.bottom - rect.top,
left: rect.left,
top: rect.top
};
};
EditableCaret.prototype.getOffset = function(pos) {
var clonedRange, offset, range, rect;
offset = null;
////if (window.getSelection && (range = this.range())) {
if (cWin.getSelection && (range = this.range())) {
if (range.endOffset - 1 < 0) {
return null;
}
clonedRange = range.cloneRange();
clonedRange.setStart(range.endContainer, range.endOffset - 1);
clonedRange.setEnd(range.endContainer, range.endOffset);
rect = clonedRange.getBoundingClientRect();
offset = {
height: rect.height,
left: rect.left + rect.width,
top: rect.top
};
clonedRange.detach();
offset;
} else if (document.selection) {
this.getOldIEOffset();
}
return Utils.adjustOffset(offset, this.$inputor);
};
EditableCaret.prototype.range = function() {
var sel;
////if (!window.getSelection) {
if (!cWin.getSelection) {
return;
}
////sel = window.getSelection();
sel = cWin.getSelection();
if (sel.rangeCount > 0) {
return sel.getRangeAt(0);
} else {
return null;
}
};
return EditableCaret;
})();
InputCaret = (function() {
function InputCaret($inputor) {
this.$inputor = $inputor;
this.domInputor = this.$inputor[0];
}
InputCaret.prototype.getIEPos = function() {
var endRange, inputor, len, normalizedValue, pos, range, textInputRange;
inputor = this.domInputor;
range = document.selection.createRange();
pos = 0;
if (range && range.parentElement() === inputor) {
normalizedValue = inputor.value.replace(/\r\n/g, "\n");
len = normalizedValue.length;
textInputRange = inputor.createTextRange();
textInputRange.moveToBookmark(range.getBookmark());
endRange = inputor.createTextRange();
endRange.collapse(false);
if (textInputRange.compareEndPoints("StartToEnd", endRange) > -1) {
pos = len;
} else {
pos = -textInputRange.moveStart("character", -len);
}
}
return pos;
};
InputCaret.prototype.getPos = function() {
if (document.selection) {
return this.getIEPos();
} else {
return this.domInputor.selectionStart;
}
};
InputCaret.prototype.setPos = function(pos) {
var inputor, range;
inputor = this.domInputor;
if (document.selection) {
range = inputor.createTextRange();
range.move("character", pos);
range.select();
} else if (inputor.setSelectionRange) {
inputor.setSelectionRange(pos, pos);
}
return inputor;
};
InputCaret.prototype.getIEOffset = function(pos) {
var h, range, textRange, x, y;
textRange = this.domInputor.createTextRange();
if (pos) {
textRange.move('character', pos);
} else {
range = document.selection.createRange();
textRange.moveToBookmark(range.getBookmark());
}
x = textRange.boundingLeft;
y = textRange.boundingTop;
h = textRange.boundingHeight;
return {
left: x,
top: y,
height: h
};
};
InputCaret.prototype.getOffset = function(pos) {
var $inputor, offset, position;
$inputor = this.$inputor;
if (document.selection) {
return Utils.adjustOffset(this.getIEOffset(pos), $inputor);
} else {
offset = $inputor.offset();
position = this.getPosition(pos);
return offset = {
left: offset.left + position.left,
top: offset.top + position.top,
height: position.height
};
}
};
InputCaret.prototype.getPosition = function(pos) {
var $inputor, at_rect, format, html, mirror, start_range;
$inputor = this.$inputor;
format = function(value) {
return value.replace(/</g, '<').replace(/>/g, '>').replace(/`/g, '`').replace(/"/g, '"').replace(/\r\n|\r|\n/g, "<br />");
};
if (pos === void 0) {
pos = this.getPos();
}
start_range = $inputor.val().slice(0, pos);
html = "<span>" + format(start_range) + "</span>";
html += "<span id='caret'>|</span>";
mirror = new Mirror($inputor);
return at_rect = mirror.create(html).rect();
};
InputCaret.prototype.getIEPosition = function(pos) {
var h, inputorOffset, offset, x, y;
offset = this.getIEOffset(pos);
inputorOffset = this.$inputor.offset();
x = offset.left - inputorOffset.left;
y = offset.top - inputorOffset.top;
h = offset.height;
return {
left: x,
top: y,
height: h
};
};
return InputCaret;
})();
Mirror = (function() {
Mirror.prototype.css_attr = ["overflowY", "height", "width", "paddingTop", "paddingLeft", "paddingRight", "paddingBottom", "marginTop", "marginLeft", "marginRight", "marginBottom", "fontFamily", "borderStyle", "borderWidth", "wordWrap", "fontSize", "lineHeight", "overflowX", "text-align"];
function Mirror($inputor) {
this.$inputor = $inputor;
}
Mirror.prototype.mirrorCss = function() {
var css,
_this = this;
css = {
position: 'absolute',
left: -9999,
top: 0,
zIndex: -20000,
'white-space': 'pre-wrap'
};
$.each(this.css_attr, function(i, p) {
return css[p] = _this.$inputor.css(p);
});
return css;
};
Mirror.prototype.create = function(html) {
this.$mirror = $('<div></div>');
this.$mirror.css(this.mirrorCss());
this.$mirror.html(html);
this.$inputor.after(this.$mirror);
return this;
};
Mirror.prototype.rect = function() {
var $flag, pos, rect;
$flag = this.$mirror.find("#caret");
pos = $flag.position();
rect = {
left: pos.left,
top: pos.top,
height: $flag.height()
};
this.$mirror.remove();
return rect;
};
return Mirror;
})();
Utils = {
adjustOffset: function(offset, $inputor) {
if (!offset) {
return;
}
offset.top += $(window).scrollTop() + $inputor.scrollTop();
offset.left += +$(window).scrollLeft() + $inputor.scrollLeft();
return offset;
},
contentEditable: function($inputor) {
return !!($inputor[0].contentEditable && $inputor[0].contentEditable === 'true');
}
};
methods = {
pos: function(pos) {
if (pos) {
return this.setPos(pos);
} else {
return this.getPos();
}
},
position: function(pos) {
if (document.selection) {
return this.getIEPosition(pos);
} else {
return this.getPosition(pos);
}
},
offset: function(pos) {
return this.getOffset(pos);
}
};
////$.fn.caret = function(method) {
$.fn.caret = function(method, aWin) {
var caret;
////
cWin = aWin;
caret = Utils.contentEditable(this) ? new EditableCaret(this) : new InputCaret(this);
if (methods[method]) {
////return methods[method].apply(caret, Array.prototype.slice.call(arguments, 1));
///////return methods[method].apply(caret, Array.prototype.slice.call(arguments, method == 'pos' ? 2 : 1));
return methods[method].apply(caret, Array.prototype.slice.call(arguments, 2));
} else {
return $.error("Method " + method + " does not exist on jQuery.caret");
}
};
$.fn.caret.EditableCaret = EditableCaret;
$.fn.caret.InputCaret = InputCaret;
$.fn.caret.Utils = Utils;
return $.fn.caret.apis = methods;
});
}).call(this);
/*
Implement Github like autocomplete mentions
http://ichord.github.com/At.js
Copyright (c) 2013 [email protected]
Licensed under the MIT license.
*/
(function() {
var __slice = [].slice;
(function(factory) {
if (typeof define === 'function' && define.amd) {
return define(['jquery'], factory);
} else {
return factory(window.jQuery);
}
})(function($) {
var $CONTAINER, Api, App, Atwho, Controller, DEFAULT_CALLBACKS, KEY_CODE, Model, View;
App = (function() {
function App(inputor) {
this.current_flag = null;
this.controllers = {};
this.$inputor = $(inputor);
this.listen();
}
App.prototype.controller = function(at) {
return this.controllers[at || this.current_flag];
};
App.prototype.set_context_for = function(at) {
this.current_flag = at;
return this;
};
App.prototype.reg = function(flag, setting) {
var controller, _base;
controller = (_base = this.controllers)[flag] || (_base[flag] = new Controller(this, flag));
if (setting.alias) {
this.controllers[setting.alias] = controller;
}
controller.init(setting);
return this;
};
App.prototype.listen = function() {
var _this = this;
return this.$inputor.on('keyup.atwho', function(e) {
return _this.on_keyup(e);
}).on('keydown.atwho', function(e) {
return _this.on_keydown(e);
}).on('scroll.atwho', function(e) {
var _ref;
return (_ref = _this.controller()) != null ? _ref.view.hide() : void 0;
}).on('blur.atwho', function(e) {
var c;
if (c = _this.controller()) {
return c.view.hide(c.get_opt("display_timeout"));
}
});
};
App.prototype.dispatch = function() {
var _this = this;
return $.map(this.controllers, function(c) {
if (c.look_up()) {
return _this.set_context_for(c.at);
}
});
};
App.prototype.on_keyup = function(e) {
var _ref;
switch (e.keyCode) {
case KEY_CODE.ESC:
case KEY_CODE.TAB:
case KEY_CODE.ENTER:
e.preventDefault();
if ((_ref = this.controller()) != null) {
_ref.view.hide();
}
break;
case KEY_CODE.DOWN:
case KEY_CODE.UP:
$.noop();
break;
default:
this.dispatch();
}
};
App.prototype.on_keydown = function(e) {
var view, _ref;
view = (_ref = this.controller()) != null ? _ref.view : void 0;
if (!(view && view.visible())) {
return;
}
switch (e.keyCode) {
case KEY_CODE.ESC:
e.preventDefault();
view.hide();
break;
case KEY_CODE.UP:
e.preventDefault();<|fim▁hole|> break;
case KEY_CODE.DOWN:
e.preventDefault();
view.next();
break;
case KEY_CODE.TAB:
case KEY_CODE.ENTER:
if (!view.visible()) {
return;
}
view.choose(e);
break;
default:
$.noop();
}
};
return App;
})();
Controller = (function() {
var uuid, _uuid;
_uuid = 0;
uuid = function() {
return _uuid += 1;
};
function Controller(app, at) {
this.app = app;
this.at = at;
this.$inputor = this.app.$inputor;
this.id = this.$inputor[0].id || uuid();
this.setting = null;
this.query = null;
this.pos = 0;
this.cur_rect = null;
this.range = null;
$CONTAINER.append(this.$el = $("<div id='atwho-ground-" + this.id + "'></div>"));
this.model = new Model(this);
this.view = new View(this);
}
Controller.prototype.init = function(setting) {
this.setting = $.extend({}, this.setting || $.fn.atwho["default"], setting);
this.view.init();
return this.model.reload(this.setting.data);
};
Controller.prototype.call_default = function() {
var args, func_name;
func_name = arguments[0], args = 2 <= arguments.length ? __slice.call(arguments, 1) : [];
try {
return DEFAULT_CALLBACKS[func_name].apply(this, args);
} catch (error) {
return $.error("" + error + " Or maybe At.js doesn't have function " + func_name);
}
};
Controller.prototype.trigger = function(name, data) {
var alias, event_name;
data.push(this);
alias = this.get_opt('alias');
event_name = alias ? "" + name + "-" + alias + ".atwho" : "" + name + ".atwho";
return this.$inputor.trigger(event_name, data);
};
Controller.prototype.callbacks = function(func_name) {
return this.get_opt("callbacks")[func_name] || DEFAULT_CALLBACKS[func_name];
};
Controller.prototype.get_opt = function(at, default_value) {
try {
return this.setting[at];
} catch (e) {
return null;
}
};
Controller.prototype.content = function() {
var result = {
content: null,
offset: 0
};
if (this.$inputor.is('textarea, input')) {
result.content = this.$inputor.val();
} else {
var textNode = $(document.createElement('div'));
var html = this.$inputor.html();
var breaks = /<br(\s+)?(\/)?>/g;
result.offset = html.match(breaks) ? html.match(breaks).length : 0;
textNode.html(html.replace(breaks, "\n"));
result.content = textNode.text();
}
return result;
};
Controller.prototype.catch_query = function() {
var caret_pos, contents, end, query, start, subtext;
contents = this.content();
////caret_pos = this.$inputor.caret('pos');
caret_pos = this.$inputor.caret('pos', this.setting.cWindow) + contents.offset;
subtext = contents.content.slice(0, caret_pos);
query = this.callbacks("matcher").call(this, this.at, subtext, this.get_opt('start_with_space'));
if (typeof query === "string" && query.length <= this.get_opt('max_len', 20)) {
start = caret_pos - query.length;
end = start + query.length;
this.pos = start;
query = {
'text': query.toLowerCase(),
'head_pos': start,
'end_pos': end
};
this.trigger("matched", [this.at, query.text]);
} else {
this.view.hide();
}
return this.query = query;
};
Controller.prototype.rect = function() {
var c, scale_bottom;
/////if (!(c = this.$inputor.caret('offset', this.pos - 1))) {
if (!(c = this.$inputor.caret('offset', this.setting.cWindow, this.pos - 1))) {
return;
}
if (this.$inputor.attr('contentEditable') === 'true') {
c = (this.cur_rect || (this.cur_rect = c)) || c;
}
scale_bottom = document.selection ? 0 : 2;
return {
left: c.left,
top: c.top,
bottom: c.top + c.height + scale_bottom
};
};
Controller.prototype.reset_rect = function() {
if (this.$inputor.attr('contentEditable') === 'true') {
return this.cur_rect = null;
}
};
Controller.prototype.mark_range = function() {
return this.range = this.get_range() || this.get_ie_range();
};
Controller.prototype.clear_range = function() {
return this.range = null;
};
Controller.prototype.get_range = function() {
////return this.range || (window.getSelection ? window.getSelection().getRangeAt(0) : void 0);
var thisWin = this.setting.cWindow;
//////return this.range || (thisWin.getSelection ? thisWin.getSelection().getRangeAt(0) : void 0);
return thisWin.getSelection ? thisWin.getSelection().getRangeAt(0) : (this.range || void 0);
};
Controller.prototype.get_ie_range = function() {
return this.range || (document.selection ? document.selection.createRange() : void 0);
};
Controller.prototype.insert_content_for = function($li) {
var data, data_value, tpl;
data_value = $li.data('value');
tpl = this.get_opt('insert_tpl');
if (this.$inputor.is('textarea, input') || !tpl) {
return data_value;
}
data = $.extend({}, $li.data('item-data'), {
'atwho-data-value': data_value,
'atwho-at': this.at
});
return this.callbacks("tpl_eval").call(this, tpl, data);
};
Controller.prototype.insert = function(content, $li) {
////var $inputor, $insert_node, class_name, content_node, insert_node, pos, range, sel, source, start_str, text;
//////////var $inputor, $insert_node, class_name, content_node, insert_node, pos, range, sel, source, start_str, text, thisWin;
var $inputor, $insert_node, class_name, content_editable, content_node, insert_node, pos, range, sel, source, start_str, text;
$inputor = this.$inputor;
if ($inputor.attr('contentEditable') === 'true') {
//////////
content_editable = '' + /firefox/i.test(navigator.userAgent);
//class_name = "atwho-view-flag atwho-view-flag-" + (this.get_opt('alias') || this.at);
class_name = "vanilla-mention-" + (this.get_opt('alias') || this.at);
//////////content_node = "" + content + "<span contenteditable='false'> <span>";
//////////insert_node = "<span contenteditable='false' class='" + class_name + "'>" + content_node + "</span>";
//content_node = ("" + content + "<span contenteditable='") + content_editable + "'> <span>";
content_node = "" + content + " ";
//content_node = "" + content;
//insert_node = "<span contenteditable='" + content_editable + ("' class='" + class_name + "'>" + content_node + "</span>");
insert_node = '<span class="' + class_name + '">' + content_node + '</span>';
$insert_node = $(insert_node).data('atwho-data-item', $li.data('item-data'));
if (document.selection) {
$insert_node = $("<span contenteditable='true'></span>").html($insert_node);
}
}
if ($inputor.is('textarea, input')) {
content = '' + content;
source = $inputor.val();
start_str = source.slice(0, Math.max(this.query.head_pos - this.at.length, 0));
text = "" + start_str + content + " " + (source.slice(this.query['end_pos'] || 0));
$inputor.val(text);
////$inputor.caret('pos', start_str.length + content.length + 1);
$inputor.caret('pos', this.setting.cWindow, start_str.length + content.length + 1);
} else if (range = this.get_range()) {
////
thisWin = this.setting.cWindow;
pos = range.startOffset - (this.query.end_pos - this.query.head_pos) - this.at.length;
range.setStart(range.endContainer, Math.max(pos, 0));
range.setEnd(range.endContainer, range.endOffset);
range.deleteContents();
range.insertNode(document.createTextNode(content + " "));
//range.insertNode($insert_node[0]);
range.collapse(false);
////sel = window.getSelection();
sel = thisWin.getSelection();
sel.removeAllRanges();
sel.addRange(range);
} else if (range = this.get_ie_range()) {
range.moveStart('character', this.query.end_pos - this.query.head_pos - this.at.length);
///////
range.pasteHTML($insert_node[0]);
range.collapse(false);
range.select();
}
$inputor.focus();
return $inputor.change();
};
Controller.prototype.render_view = function(data) {
var search_key;
search_key = this.get_opt("search_key");
data = this.callbacks("sorter").call(this, this.query.text, data.slice(0, 1001), search_key);
return this.view.render(data.slice(0, this.get_opt('limit')));
};
Controller.prototype.look_up = function() {
var query, _callback;
if (!(query = this.catch_query())) {
return;
}
_callback = function(data) {
if (data && data.length > 0) {
return this.render_view(data);
} else {
return this.view.hide();
}
};
this.model.query(query.text, $.proxy(_callback, this));
return query;
};
return Controller;
})();
Model = (function() {
var _storage;
_storage = {};
function Model(context) {
this.context = context;
this.at = this.context.at;
}
Model.prototype.saved = function() {
return this.fetch() > 0;
};
Model.prototype.query = function(query, callback) {
var data, search_key, _ref;
data = this.fetch();
search_key = this.context.get_opt("search_key");
callback(data = this.context.callbacks('filter').call(this.context, query, data, search_key));
if (!(data && data.length > 0)) {
return (_ref = this.context.callbacks('remote_filter')) != null ? _ref.call(this.context, query, callback) : void 0;
}
};
Model.prototype.fetch = function() {
return _storage[this.at] || [];
};
Model.prototype.save = function(data) {
return _storage[this.at] = this.context.callbacks("before_save").call(this.context, data || []);
};
Model.prototype.load = function(data) {
if (!(this.saved() || !data)) {
return this._load(data);
}
};
Model.prototype.reload = function(data) {
return this._load(data);
};
Model.prototype._load = function(data) {
var _this = this;
if (typeof data === "string") {
return $.ajax(data, {
dataType: "json"
}).done(function(data) {
return _this.save(data);
});
} else {
return this.save(data);
}
};
return Model;
})();
View = (function() {
function View(context) {
this.context = context;
this.$el = $("<div class='atwho-view'><ul class='atwho-view-ul'></ul></div>");
this.timeout_id = null;
this.context.$el.append(this.$el);
this.bind_event();
}
View.prototype.init = function() {
var id;
id = this.context.get_opt("alias") || this.context.at.charCodeAt(0);
return this.$el.attr({
'id': "at-view-" + id
});
};
View.prototype.bind_event = function() {
var $menu,
_this = this;
$menu = this.$el.find('ul');
$menu.on('mouseenter.atwho-view', 'li', function(e) {
$menu.find('.cur').removeClass('cur');
return $(e.currentTarget).addClass('cur');
}).on('click', function(e) {
_this.choose(e);
return e.preventDefault();
});
return this.$el.on('mouseenter.atwho-view', 'ul', function(e) {
return _this.context.mark_range();
}).on('mouseleave.atwho-view', 'ul', function(e) {
return _this.context.clear_range();
});
};
View.prototype.visible = function() {
return this.$el.is(":visible");
};
View.prototype.choose = function(event) {
var $li, content;
if (($li = this.$el.find(".cur")).length) {
event.preventDefault();
content = this.context.insert_content_for($li);
this.context.insert(this.context.callbacks("before_insert").call(this.context, content, $li), $li);
this.context.trigger("inserted", [$li]);
return this.hide();
}
};
View.prototype.reposition = function(rect) {
////var offset;
////if (rect.bottom + this.$el.height() - $(window).scrollTop() > $(window).height()) {
var offset;
////
// Make sure the non-iframe version still references window.
var thisWin = (this.context.setting.cWindow)
? null
: window;
if (rect.bottom + this.$el.height() - $(thisWin).scrollTop() > $(thisWin).height()) {
rect.bottom = rect.top - this.$el.height();
}
offset = {
left: rect.left,
top: rect.bottom
};
this.$el.offset(offset);
return this.context.trigger("reposition", [offset]);
};
View.prototype.next = function() {
var cur, next;
cur = this.$el.find('.cur').removeClass('cur');
next = cur.next();
if (!next.length) {
next = this.$el.find('li:first');
}
return next.addClass('cur');
};
View.prototype.prev = function() {
var cur, prev;
cur = this.$el.find('.cur').removeClass('cur');
prev = cur.prev();
if (!prev.length) {
prev = this.$el.find('li:last');
}
return prev.addClass('cur');
};
View.prototype.show = function() {
var rect;
if (!this.visible()) {
this.$el.show();
}
if (rect = this.context.rect()) {
return this.reposition(rect);
}
};
View.prototype.hide = function(time) {
var callback,
_this = this;
if (isNaN(time && this.visible())) {
this.context.reset_rect();
return this.$el.hide();
} else {
callback = function() {
return _this.hide();
};
clearTimeout(this.timeout_id);
return this.timeout_id = setTimeout(callback, time);
}
};
View.prototype.render = function(list) {
var $li, $ul, item, li, tpl, _i, _len;
if (!$.isArray(list || list.length <= 0)) {
this.hide();
return;
}
this.$el.find('ul').empty();
$ul = this.$el.find('ul');
tpl = this.context.get_opt('tpl');
for (_i = 0, _len = list.length; _i < _len; _i++) {
item = list[_i];
item = $.extend({}, item, {
'atwho-at': this.context.at
});
li = this.context.callbacks("tpl_eval").call(this.context, tpl, item);
$li = $(this.context.callbacks("highlighter").call(this.context, li, this.context.query.text));
$li.data("item-data", item);
$ul.append($li);
}
this.show();
return $ul.find("li:first").addClass("cur");
};
return View;
})();
KEY_CODE = {
DOWN: 40,
UP: 38,
ESC: 27,
TAB: 9,
ENTER: 13
};
DEFAULT_CALLBACKS = {
before_save: function(data) {
var item, _i, _len, _results;
if (!$.isArray(data)) {
return data;
}
_results = [];
for (_i = 0, _len = data.length; _i < _len; _i++) {
item = data[_i];
if ($.isPlainObject(item)) {
_results.push(item);
} else {
_results.push({
name: item
});
}
}
return _results;
},
matcher: function(flag, subtext, should_start_with_space) {
var match, regexp;
flag = flag.replace(/[\-\[\]\/\{\}\(\)\*\+\?\.\\\^\$\|]/g, "\\$&");
if (should_start_with_space) {
flag = '(?:^|\\s)' + flag;
}
regexp = new RegExp(flag + '([A-Za-z0-9_\+\-]*)$|' + flag + '([^\\x00-\\xff]*)$', 'gi');
match = regexp.exec(subtext);
if (match) {
return match[2] || match[1];
} else {
return null;
}
},
filter: function(query, data, search_key) {
var item, _i, _len, _results;
_results = [];
for (_i = 0, _len = data.length; _i < _len; _i++) {
item = data[_i];
if (~item[search_key].toLowerCase().indexOf(query)) {
_results.push(item);
}
}
return _results;
},
remote_filter: null,
sorter: function(query, items, search_key) {
var item, _i, _len, _results;
if (!query) {
return items;
}
_results = [];
for (_i = 0, _len = items.length; _i < _len; _i++) {
item = items[_i];
item.atwho_order = item[search_key].toLowerCase().indexOf(query);
if (item.atwho_order > -1) {
_results.push(item);
}
}
return _results.sort(function(a, b) {
return a.atwho_order - b.atwho_order;
});
},
tpl_eval: function(tpl, map) {
try {
return tpl.replace(/\$\{([^\}]*)\}/g, function(tag, key, pos) {
return map[key];
});
} catch (error) {
return "";
}
},
highlighter: function(li, query) {
var regexp;
if (!query) {
return li;
}
regexp = new RegExp(">\\s*(\\w*)(" + query.replace("+", "\\+") + ")(\\w*)\\s*<", 'ig');
return li.replace(regexp, function(str, $1, $2, $3) {
return '> ' + $1 + '<strong>' + $2 + '</strong>' + $3 + ' <';
});
},
before_insert: function(value, $li) {
return value;
}
};
Api = {
load: function(at, data) {
var c;
if (c = this.controller(at)) {
return c.model.load(data);
}
},
getInsertedItemsWithIDs: function(at) {
var c, ids, items;
if (!(c = this.controller(at))) {
return [null, null];
}
if (at) {
at = "-" + (c.get_opt('alias') || c.at);
}
ids = [];
items = $.map(this.$inputor.find("span.atwho-view-flag" + (at || "")), function(item) {
var data;
data = $(item).data('atwho-data-item');
if (ids.indexOf(data.id) > -1) {
return;
}
if (data.id) {
ids.push = data.id;
}
return data;
});
return [ids, items];
},
getInsertedItems: function(at) {
return Api.getInsertedItemsWithIDs.apply(this, [at])[1];
},
getInsertedIDs: function(at) {
return Api.getInsertedItemsWithIDs.apply(this, [at])[0];
},
run: function() {
return this.dispatch();
}
};
Atwho = {
init: function(options) {
var $this, app;
app = ($this = $(this)).data("atwho");
if (!app) {
$this.data('atwho', (app = new App(this)));
}
app.reg(options.at, options);
return this;
}
};
$CONTAINER = $("<div id='atwho-container'></div>");
$.fn.atwho = function(method) {
var result, _args;
_args = arguments;
$('body').append($CONTAINER);
result = null;
this.filter('textarea, input, [contenteditable=true]').each(function() {
//console.log('at inloop');
//console.log(this);
var app;
if (typeof method === 'object' || !method) {
return Atwho.init.apply(this, _args);
} else if (Api[method]) {
if (app = $(this).data('atwho')) {
return result = Api[method].apply(app, Array.prototype.slice.call(_args, 1));
}
} else {
return $.error("Method " + method + " does not exist on jQuery.caret");
}
});
return result || this;
};
return $.fn.atwho["default"] = {
at: void 0,
alias: void 0,
data: null,
tpl: "<li data-value='${atwho-at}${name}'>${name}</li>",
insert_tpl: "<span>${atwho-data-value}</span>",
callbacks: DEFAULT_CALLBACKS,
search_key: "name",
start_with_space: true,
limit: 5,
max_len: 20,
display_timeout: 300,
////
cWindow: window
};
});
}).call(this);<|fim▁end|>
|
view.prev();
|
<|file_name|>tower_of_hanoi.rs<|end_file_name|><|fim▁begin|>fn main() {
tower_of_hanoi(5,"A","C","B");
}
fn tower_of_hanoi(n:u32, from: &str, to: &str, aux: &str) {
if n >= 1 {<|fim▁hole|> println!("Move {}, from: {} to from {}",n,from,to );
tower_of_hanoi(n-1,aux,to,from);
}
}<|fim▁end|>
|
tower_of_hanoi(n-1, from, aux, to);
|
<|file_name|>backend.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
from invenio.legacy.dbquery import run_sql, IntegrityError
# Number of retries to insert a value in the DB storage
MAX_DB_RETRY = 10
class SequenceGenerator(object):
seq_name = None
def __init__(self):
assert self.seq_name
def _value_exists(self, value):
"""
Checks if the value exists in the storage
@param value: value to be checked in storage
@type value: string
@return: result of select SQL query
@rtype: tuple
"""
return run_sql("""SELECT seq_value FROM seqSTORE
WHERE seq_value=%s AND seq_name=%s""",
(value, self.seq_name))
def _insert_value(self, value):
"""
Inserts value into storage
@param value: value to be stored
@type value: string
@return: result of insert SQL query
@rtype: tuple
"""
run_sql("""INSERT INTO seqSTORE (seq_name, seq_value)
VALUES (%s, %s)""",
(self.seq_name, value))
def _next_value(self, *args, **kwargs):
"""
Internal implementation to calculate next value in sequence
"""
raise NotImplementedError
def next_value(self, *args, **kwargs):
"""
Get the next value in the sequence
@return: next value in sequence
@rtype: string
"""
db_retries = 0
value = None
while MAX_DB_RETRY > db_retries:
value = self._next_value(*args, **kwargs)
try:
self._insert_value(value)
break
except IntegrityError:
# The value is already in the storage, get next one
db_retries += 1<|fim▁hole|><|fim▁end|>
|
return value
|
<|file_name|>forms.py<|end_file_name|><|fim▁begin|>#
# Newfies-Dialer License
# http://www.newfies-dialer.org
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Copyright (C) 2011-2014 Star2Billing S.L.
#
# The primary maintainer of this project is
# Arezqui Belaid <[email protected]>
#
from django.forms import ModelForm
from django.contrib.auth.forms import UserChangeForm
from django.contrib.auth.forms import UserCreationForm, AdminPasswordChangeForm
from agent.models import AgentProfile, Agent
from agent.function_def import manager_list
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Layout, Fieldset, Div
class AgentPasswordChangeForm(AdminPasswordChangeForm):
def __init__(self, *args, **kwargs):
self.helper = FormHelper()
self.helper.form_tag = False
self.helper.form_class = 'well'
self.helper.layout = Layout(
Fieldset('', 'password1', 'password2', css_class='col-md-4')
)
super(AgentPasswordChangeForm, self).__init__(*args, **kwargs)
class AgentCreationForm(UserCreationForm):
def __init__(self, *args, **kwargs):<|fim▁hole|> self.helper.disable_csrf = False
self.helper.form_class = 'well'
self.helper.layout = Layout(
Fieldset('', 'username', 'password1', 'password2', css_class='col-md-6 col-xs-8')
)
class AgentNameChangeForm(UserChangeForm):
"""AgentNameChangeForm is used to change agent username"""
class Meta:
model = Agent
fields = ["username"]
def __init__(self, *args, **kwargs):
super(AgentNameChangeForm, self).__init__(*args, **kwargs)
self.fields['username'].widget.attrs['class'] = "form-control"
class AgentProfileForm(ModelForm):
"""AgentProfileForm is used to change agent profile"""
class Meta:
model = AgentProfile
exclude = ('is_agent', )
def __init__(self, *args, **kwargs):
super(AgentProfileForm, self).__init__(*args, **kwargs)
self.fields['manager'].choices = manager_list()
for i in self.fields.keyOrder:
self.fields[i].widget.attrs['class'] = "form-control"
class AgentChangeDetailExtendForm(ModelForm):
"""A form used to change the detail of a agent in the manager UI."""
class Meta:
model = AgentProfile
fields = ["type", "call_timeout", "contact", "status",
"no_answer_delay_time", "max_no_answer", "wrap_up_time",
"reject_delay_time", "busy_delay_time"]
def __init__(self, user, *args, **kwargs):
self.user = user
super(AgentChangeDetailExtendForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_tag = False
self.helper.disable_csrf = False
css_class = 'col-md-6'
self.helper.layout = Layout(
Div(
Div('type', css_class=css_class),
Div('call_timeout', css_class=css_class),
Div('contact', css_class=css_class),
Div('status', css_class=css_class),
Div('no_answer_delay_time', css_class=css_class),
Div('max_no_answer', css_class=css_class),
Div('wrap_up_time', css_class=css_class),
Div('reject_delay_time', css_class=css_class),
Div('busy_delay_time', css_class=css_class),
css_class='row'
),
)
class AgentDetailExtendForm(ModelForm):
"""A form used to change the detail of a agent in the Agent UI."""
class Meta:
model = AgentProfile
# fields = ["address", "city", "state", "country", "zip_code",
# "phone_no", "fax", "company_name", "company_website",
# "language", "note"]
fields = ["address"]
def __init__(self, user, *args, **kwargs):
self.user = user
super(AgentDetailExtendForm, self).__init__(*args, **kwargs)
self.fields['address'].widget.attrs['ng-model'] = "user.address"
"""
self.fields['city'].widget.attrs['ng-model'] = "user.city"
self.fields['state'].widget.attrs['ng-model'] = "user.state"
self.fields['country'].widget.attrs['ng-model'] = "user.country"
self.fields['zip_code'].widget.attrs['ng-model'] = "user.zip_code"
self.fields['phone_no'].widget.attrs['ng-model'] = "user.phone_no"
self.fields['fax'].widget.attrs['ng-model'] = "user.fax"
self.fields['company_name'].widget.attrs['ng-model'] = "user.company_name"
self.fields['company_website'].widget.attrs['ng-model'] = "user.company_website"
self.fields['language'].widget.attrs['ng-model'] = "user.language"
self.fields['note'].widget.attrs['ng-model'] = "user.note"
"""<|fim▁end|>
|
super(AgentCreationForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_tag = False
|
<|file_name|>cdn.js<|end_file_name|><|fim▁begin|>(() => {
// packages/alpinejs/src/scheduler.js
var flushPending = false;
var flushing = false;
var queue = [];
function scheduler(callback) {
queueJob(callback);
}
function queueJob(job) {
if (!queue.includes(job))
queue.push(job);
queueFlush();
}
function queueFlush() {
if (!flushing && !flushPending) {
flushPending = true;
queueMicrotask(flushJobs);
}
}
function flushJobs() {
flushPending = false;
flushing = true;
for (let i = 0; i < queue.length; i++) {
queue[i]();
}
queue.length = 0;
flushing = false;
}
// packages/alpinejs/src/reactivity.js
var reactive;
var effect;
var release;
var raw;
var shouldSchedule = true;
function disableEffectScheduling(callback) {
shouldSchedule = false;
callback();
shouldSchedule = true;
}
function setReactivityEngine(engine) {
reactive = engine.reactive;
release = engine.release;
effect = (callback) => engine.effect(callback, {scheduler: (task) => {
if (shouldSchedule) {
scheduler(task);
} else {
task();
}
}});
raw = engine.raw;
}
function overrideEffect(override) {
effect = override;
}
function elementBoundEffect(el) {
let cleanup2 = () => {
};
let wrappedEffect = (callback) => {
let effectReference = effect(callback);
if (!el._x_effects) {
el._x_effects = new Set();
el._x_runEffects = () => {
el._x_effects.forEach((i) => i());
};
}
el._x_effects.add(effectReference);
cleanup2 = () => {
if (effectReference === void 0)
return;
el._x_effects.delete(effectReference);
release(effectReference);
};
};
return [wrappedEffect, () => {
cleanup2();
}];
}
// packages/alpinejs/src/mutation.js
var onAttributeRemoveds = new WeakMap();
var onAttributeAddeds = [];
var onElRemovedByEl = new WeakMap();
var onElRemoveds = [];
var onElAddeds = [];
function onElAdded(callback) {
onElAddeds.push(callback);
}
function onElRemoved(el, callback) {
if (typeof el === "function" && callback === void 0) {
onElRemoveds.push(el);
} else {
if (!onElRemovedByEl.has(el))
onElRemovedByEl.set(el, []);
onElRemovedByEl.get(el).push(callback);
}
}
function onAttributesAdded(callback) {
onAttributeAddeds.push(callback);
}
function onAttributeRemoved(el, name, callback) {
if (!onAttributeRemoveds.has(el))
onAttributeRemoveds.set(el, {});
if (!onAttributeRemoveds.get(el)[name])
onAttributeRemoveds.get(el)[name] = [];
onAttributeRemoveds.get(el)[name].push(callback);
}
var observer = new MutationObserver(onMutate);
var currentlyObserving = false;
function startObservingMutations() {
observer.observe(document, {subtree: true, childList: true, attributes: true, attributeOldValue: true});
currentlyObserving = true;
}
function stopObservingMutations() {
observer.disconnect();
currentlyObserving = false;
}
var recordQueue = [];
var willProcessRecordQueue = false;
function flushObserver() {
recordQueue = recordQueue.concat(observer.takeRecords());
if (recordQueue.length && !willProcessRecordQueue) {
willProcessRecordQueue = true;
queueMicrotask(() => {
processRecordQueue();
willProcessRecordQueue = false;
});
}
}
function processRecordQueue() {
onMutate(recordQueue);
recordQueue.length = 0;
}
function mutateDom(callback) {
if (!currentlyObserving)
return callback();
flushObserver();
stopObservingMutations();
let result = callback();
startObservingMutations();
return result;
}
function onMutate(mutations) {
let addedNodes = [];
let removedNodes = [];
let addedAttributes = new Map();
let removedAttributes = new Map();
for (let i = 0; i < mutations.length; i++) {
if (mutations[i].target._x_ignoreMutationObserver)
continue;
if (mutations[i].type === "childList") {
mutations[i].addedNodes.forEach((node) => node.nodeType === 1 && addedNodes.push(node));
mutations[i].removedNodes.forEach((node) => node.nodeType === 1 && removedNodes.push(node));
}
if (mutations[i].type === "attributes") {
let el = mutations[i].target;
let name = mutations[i].attributeName;
let oldValue = mutations[i].oldValue;
let add2 = () => {
if (!addedAttributes.has(el))
addedAttributes.set(el, []);
addedAttributes.get(el).push({name, value: el.getAttribute(name)});
};
let remove = () => {
if (!removedAttributes.has(el))
removedAttributes.set(el, []);
removedAttributes.get(el).push(name);
};
if (el.hasAttribute(name) && oldValue === null) {
add2();
} else if (el.hasAttribute(name)) {
remove();
add2();
} else {
remove();
}
}
}
removedAttributes.forEach((attrs, el) => {
if (onAttributeRemoveds.get(el)) {
attrs.forEach((name) => {
if (onAttributeRemoveds.get(el)[name]) {
onAttributeRemoveds.get(el)[name].forEach((i) => i());
}
});
}
});
addedAttributes.forEach((attrs, el) => {
onAttributeAddeds.forEach((i) => i(el, attrs));
});
for (let node of addedNodes) {
if (removedNodes.includes(node))
continue;
onElAddeds.forEach((i) => i(node));
}
for (let node of removedNodes) {
if (addedNodes.includes(node))
continue;
if (onAttributeRemoveds.has(node)) {
Object.entries(onAttributeRemoveds.get(node)).forEach(([key, value]) => {
value.forEach((i) => i());
});
onAttributeRemoveds.delete(node);
}
if (onElRemovedByEl.has(node)) {
onElRemovedByEl.get(node).forEach((i) => i());
onElRemovedByEl.delete(node);
}
onElRemoveds.forEach((i) => i(node));
}
addedNodes = null;
removedNodes = null;
addedAttributes = null;
removedAttributes = null;
}
// packages/alpinejs/src/scope.js
function addScopeToNode(node, data2, referenceNode) {
node._x_dataStack = [data2, ...closestDataStack(referenceNode || node)];
return () => {
node._x_dataStack = node._x_dataStack.filter((i) => i !== data2);
};
}
function refreshScope(element, scope) {
let existingScope = element._x_dataStack[0];
Object.entries(scope).forEach(([key, value]) => {
existingScope[key] = value;
});
}
function closestDataStack(node) {
if (node._x_dataStack)
return node._x_dataStack;
if (node instanceof ShadowRoot) {
return closestDataStack(node.host);
}
if (!node.parentNode) {
return [];
}
return closestDataStack(node.parentNode);
}
function mergeProxies(objects) {
return new Proxy({}, {
ownKeys: () => {
return Array.from(new Set(objects.flatMap((i) => Object.keys(i))));
},
has: (target, name) => {
return objects.some((obj) => obj.hasOwnProperty(name));
},
get: (target, name) => {
return (objects.find((obj) => obj.hasOwnProperty(name)) || {})[name];
},
set: (target, name, value) => {
let closestObjectWithKey = objects.find((obj) => obj.hasOwnProperty(name));
if (closestObjectWithKey) {
closestObjectWithKey[name] = value;
} else {
objects[objects.length - 1][name] = value;
}
return true;
}
});
}
// packages/alpinejs/src/interceptor.js
function initInterceptors(data2) {
let isObject2 = (val) => typeof val === "object" && !Array.isArray(val) && val !== null;
let recurse = (obj, basePath = "") => {
Object.entries(obj).forEach(([key, value]) => {
let path = basePath === "" ? key : `${basePath}.${key}`;
if (typeof value === "object" && value !== null && value._x_interceptor) {
obj[key] = value.initialize(data2, path, key);
} else {
if (isObject2(value) && value !== obj && !(value instanceof Element)) {
recurse(value, path);
}
}
});
};
return recurse(data2);
}
function interceptor(callback, mutateObj = () => {
}) {
let obj = {
initialValue: void 0,
_x_interceptor: true,
initialize(data2, path, key) {
return callback(this.initialValue, () => get(data2, path), (value) => set(data2, path, value), path, key);
}
};
mutateObj(obj);
return (initialValue) => {
if (typeof initialValue === "object" && initialValue !== null && initialValue._x_interceptor) {
let initialize = obj.initialize.bind(obj);
obj.initialize = (data2, path, key) => {
let innerValue = initialValue.initialize(data2, path, key);
obj.initialValue = innerValue;
return initialize(data2, path, key);
};
} else {
obj.initialValue = initialValue;
}
return obj;
};
}
function get(obj, path) {
return path.split(".").reduce((carry, segment) => carry[segment], obj);
}
function set(obj, path, value) {
if (typeof path === "string")
path = path.split(".");
if (path.length === 1)
obj[path[0]] = value;
else if (path.length === 0)
throw error;
else {
if (obj[path[0]])
return set(obj[path[0]], path.slice(1), value);
else {
obj[path[0]] = {};
return set(obj[path[0]], path.slice(1), value);
}
}
}
// packages/alpinejs/src/magics.js
var magics = {};
function magic(name, callback) {
magics[name] = callback;
}
function injectMagics(obj, el) {
Object.entries(magics).forEach(([name, callback]) => {
Object.defineProperty(obj, `$${name}`, {
get() {
return callback(el, {Alpine: alpine_default, interceptor});
},
enumerable: false
});
});
return obj;
}
// packages/alpinejs/src/evaluator.js
function evaluate(el, expression, extras = {}) {
let result;
evaluateLater(el, expression)((value) => result = value, extras);
return result;
}
function evaluateLater(...args) {
return theEvaluatorFunction(...args);
}
var theEvaluatorFunction = normalEvaluator;
function setEvaluator(newEvaluator) {
theEvaluatorFunction = newEvaluator;
}
function normalEvaluator(el, expression) {
let overriddenMagics = {};
injectMagics(overriddenMagics, el);
let dataStack = [overriddenMagics, ...closestDataStack(el)];
if (typeof expression === "function") {
return generateEvaluatorFromFunction(dataStack, expression);
}
let evaluator = generateEvaluatorFromString(dataStack, expression);
return tryCatch.bind(null, el, expression, evaluator);
}
function generateEvaluatorFromFunction(dataStack, func) {
return (receiver = () => {
}, {scope = {}, params = []} = {}) => {
let result = func.apply(mergeProxies([scope, ...dataStack]), params);
runIfTypeOfFunction(receiver, result);
};
}
var evaluatorMemo = {};
function generateFunctionFromString(expression) {
if (evaluatorMemo[expression]) {
return evaluatorMemo[expression];
}
let AsyncFunction = Object.getPrototypeOf(async function() {
}).constructor;
let rightSideSafeExpression = /^[\n\s]*if.*\(.*\)/.test(expression) || /^(let|const)/.test(expression) ? `(() => { ${expression} })()` : expression;
let func = new AsyncFunction(["__self", "scope"], `with (scope) { __self.result = ${rightSideSafeExpression} }; __self.finished = true; return __self.result;`);
evaluatorMemo[expression] = func;
return func;
}
function generateEvaluatorFromString(dataStack, expression) {
let func = generateFunctionFromString(expression);
return (receiver = () => {
}, {scope = {}, params = []} = {}) => {
func.result = void 0;
func.finished = false;
let completeScope = mergeProxies([scope, ...dataStack]);
let promise = func(func, completeScope);
if (func.finished) {
runIfTypeOfFunction(receiver, func.result, completeScope, params);
} else {
promise.then((result) => {
runIfTypeOfFunction(receiver, result, completeScope, params);
});
}
};
}
function runIfTypeOfFunction(receiver, value, scope, params) {
if (typeof value === "function") {
let result = value.apply(scope, params);
if (result instanceof Promise) {
result.then((i) => runIfTypeOfFunction(receiver, i, scope, params));
} else {
receiver(result);
}
} else {
receiver(value);
}
}
function tryCatch(el, expression, callback, ...args) {
try {
return callback(...args);
} catch (e) {
console.warn(`Alpine Expression Error: ${e.message}
Expression: "${expression}"
`, el);
throw e;
}
}
// packages/alpinejs/src/directives.js
var prefixAsString = "x-";
function prefix(subject = "") {
return prefixAsString + subject;
}
function setPrefix(newPrefix) {
prefixAsString = newPrefix;
}
var directiveHandlers = {};
function directive(name, callback) {
directiveHandlers[name] = callback;
}
function directives(el, attributes, originalAttributeOverride) {
let transformedAttributeMap = {};
let directives2 = Array.from(attributes).map(toTransformedAttributes((newName, oldName) => transformedAttributeMap[newName] = oldName)).filter(outNonAlpineAttributes).map(toParsedDirectives(transformedAttributeMap, originalAttributeOverride)).sort(byPriority);
return directives2.map((directive2) => {
return getDirectiveHandler(el, directive2);
});
}
var isDeferringHandlers = false;
var directiveHandlerStack = [];
function deferHandlingDirectives(callback) {
isDeferringHandlers = true;
let flushHandlers = () => {
while (directiveHandlerStack.length)
directiveHandlerStack.shift()();
};
let stopDeferring = () => {
isDeferringHandlers = false;
flushHandlers();
};
callback(flushHandlers);
stopDeferring();
}
function getDirectiveHandler(el, directive2) {
let noop = () => {
};
let handler3 = directiveHandlers[directive2.type] || noop;
let cleanups = [];
let cleanup2 = (callback) => cleanups.push(callback);
let [effect3, cleanupEffect] = elementBoundEffect(el);
cleanups.push(cleanupEffect);
let utilities = {
Alpine: alpine_default,
effect: effect3,
cleanup: cleanup2,
evaluateLater: evaluateLater.bind(evaluateLater, el),
evaluate: evaluate.bind(evaluate, el)
};
let doCleanup = () => cleanups.forEach((i) => i());
onAttributeRemoved(el, directive2.original, doCleanup);
let fullHandler = () => {
if (el._x_ignore || el._x_ignoreSelf)
return;
handler3.inline && handler3.inline(el, directive2, utilities);
handler3 = handler3.bind(handler3, el, directive2, utilities);
isDeferringHandlers ? directiveHandlerStack.push(handler3) : handler3();
};
fullHandler.runCleanups = doCleanup;
return fullHandler;
}
var startingWith = (subject, replacement) => ({name, value}) => {
if (name.startsWith(subject))
name = name.replace(subject, replacement);
return {name, value};
};
var into = (i) => i;
function toTransformedAttributes(callback) {
return ({name, value}) => {
let {name: newName, value: newValue} = attributeTransformers.reduce((carry, transform) => {
return transform(carry);
}, {name, value});
if (newName !== name)
callback(newName, name);
return {name: newName, value: newValue};
};
}
var attributeTransformers = [];
function mapAttributes(callback) {
attributeTransformers.push(callback);
}
function outNonAlpineAttributes({name}) {
return alpineAttributeRegex().test(name);
}
var alpineAttributeRegex = () => new RegExp(`^${prefixAsString}([^:^.]+)\\b`);
function toParsedDirectives(transformedAttributeMap, originalAttributeOverride) {
return ({name, value}) => {
let typeMatch = name.match(alpineAttributeRegex());
let valueMatch = name.match(/:([a-zA-Z0-9\-:]+)/);
let modifiers = name.match(/\.[^.\]]+(?=[^\]]*$)/g) || [];
let original = originalAttributeOverride || transformedAttributeMap[name] || name;
return {
type: typeMatch ? typeMatch[1] : null,
value: valueMatch ? valueMatch[1] : null,
modifiers: modifiers.map((i) => i.replace(".", "")),
expression: value,
original
};
};
}
var DEFAULT = "DEFAULT";
var directiveOrder = [
"ignore",
"ref",
"data",
"bind",
"init",
"for",
"model",
"transition",
"show",
"if",
DEFAULT,
"element"
];
function byPriority(a, b) {
let typeA = directiveOrder.indexOf(a.type) === -1 ? DEFAULT : a.type;
let typeB = directiveOrder.indexOf(b.type) === -1 ? DEFAULT : b.type;
return directiveOrder.indexOf(typeA) - directiveOrder.indexOf(typeB);
}
// packages/alpinejs/src/utils/dispatch.js
function dispatch(el, name, detail = {}) {
el.dispatchEvent(new CustomEvent(name, {
detail,
bubbles: true,
composed: true,
cancelable: true
}));
}
// packages/alpinejs/src/nextTick.js
var tickStack = [];
var isHolding = false;
function nextTick(callback) {
tickStack.push(callback);
queueMicrotask(() => {
isHolding || setTimeout(() => {
releaseNextTicks();
});
});
}
function releaseNextTicks() {
isHolding = false;
while (tickStack.length)
tickStack.shift()();
}
function holdNextTicks() {
isHolding = true;
}
// packages/alpinejs/src/utils/walk.js
function walk(el, callback) {
if (el instanceof ShadowRoot) {
Array.from(el.children).forEach((el2) => walk(el2, callback));
return;
}
let skip = false;
callback(el, () => skip = true);
if (skip)
return;
let node = el.firstElementChild;
while (node) {
walk(node, callback, false);
node = node.nextElementSibling;
}
}
// packages/alpinejs/src/utils/warn.js
function warn(message, ...args) {
console.warn(`Alpine Warning: ${message}`, ...args);
}
// packages/alpinejs/src/lifecycle.js
function start() {
if (!document.body)
warn("Unable to initialize. Trying to load Alpine before `<body>` is available. Did you forget to add `defer` in Alpine's `<script>` tag?");
dispatch(document, "alpine:init");
dispatch(document, "alpine:initializing");
startObservingMutations();
onElAdded((el) => initTree(el, walk));
onElRemoved((el) => nextTick(() => destroyTree(el)));
onAttributesAdded((el, attrs) => {
directives(el, attrs).forEach((handle) => handle());
});
let outNestedComponents = (el) => !closestRoot(el.parentNode || closestRoot(el));
Array.from(document.querySelectorAll(rootSelectors())).filter(outNestedComponents).forEach((el) => {
initTree(el);
});
dispatch(document, "alpine:initialized");
}
var rootSelectorCallbacks = [];
function rootSelectors() {
return rootSelectorCallbacks.map((fn) => fn());
}
function addRootSelector(selectorCallback) {
rootSelectorCallbacks.push(selectorCallback);
}
function closestRoot(el) {
if (rootSelectors().some((selector) => el.matches(selector)))
return el;
if (!el.parentElement)
return;
return closestRoot(el.parentElement);
}
function isRoot(el) {
return rootSelectors().some((selector) => el.matches(selector));
}
function initTree(el, walker = walk) {
deferHandlingDirectives(() => {
walker(el, (el2, skip) => {
directives(el2, el2.attributes).forEach((handle) => handle());
el2._x_ignore && skip();
});
});
}
var onDestroys = new WeakMap();
function destroyTree(root) {
walk(root, (el) => {
let callbacks = onDestroys.get(el);
callbacks && callbacks.forEach((callback) => callback());
onDestroys.delete(el);
});
}
// packages/alpinejs/src/plugin.js
function plugin(callback) {
callback(alpine_default);
}
// packages/alpinejs/src/store.js
var stores = {};
var isReactive = false;
function store(name, value) {
if (!isReactive) {
stores = reactive(stores);
isReactive = true;
}
if (value === void 0) {
return stores[name];
}
stores[name] = value;
if (typeof value === "object" && value !== null && value.hasOwnProperty("init") && typeof value.init === "function") {
stores[name].init();
}
}
function getStores() {
return stores;
}
// packages/alpinejs/src/clone.js
var isCloning = false;
function skipDuringClone(callback) {
return (...args) => isCloning || callback(...args);
}
function clone(oldEl, newEl) {
newEl._x_dataStack = oldEl._x_dataStack;
isCloning = true;
dontRegisterReactiveSideEffects(() => {
cloneTree(newEl);
});
isCloning = false;
}
function cloneTree(el) {
let hasRunThroughFirstEl = false;
let shallowWalker = (el2, callback) => {
walk(el2, (el3, skip) => {
if (hasRunThroughFirstEl && isRoot(el3))
return skip();
hasRunThroughFirstEl = true;
callback(el3, skip);
});
};
initTree(el, shallowWalker);
}
function dontRegisterReactiveSideEffects(callback) {
let cache = effect;
overrideEffect((callback2, el) => {
let storedEffect = cache(callback2);
release(storedEffect);
return () => {
};
});
callback();
overrideEffect(cache);
}
// packages/alpinejs/src/datas.js
var datas = {};
function data(name, callback) {
datas[name] = callback;
}
function getNamedDataProvider(name) {
return datas[name];
}
// packages/alpinejs/src/alpine.js
var Alpine = {
get reactive() {
return reactive;
},
get release() {
return release;
},
get effect() {
return effect;
},
get raw() {
return raw;
},
version: "3.0.7",
disableEffectScheduling,
setReactivityEngine,
addRootSelector,
mapAttributes,
evaluateLater,
setEvaluator,
closestRoot,
interceptor,
mutateDom,
directive,
evaluate,
nextTick,
prefix: setPrefix,
plugin,
magic,
store,
start,
clone,
data
};
var alpine_default = Alpine;
// node_modules/@vue/shared/dist/shared.esm-bundler.js
function makeMap(str, expectsLowerCase) {
const map = Object.create(null);
const list = str.split(",");
for (let i = 0; i < list.length; i++) {
map[list[i]] = true;
}
return expectsLowerCase ? (val) => !!map[val.toLowerCase()] : (val) => !!map[val];
}
var PatchFlagNames = {
[1]: `TEXT`,
[2]: `CLASS`,
[4]: `STYLE`,
[8]: `PROPS`,
[16]: `FULL_PROPS`,
[32]: `HYDRATE_EVENTS`,
[64]: `STABLE_FRAGMENT`,
[128]: `KEYED_FRAGMENT`,
[256]: `UNKEYED_FRAGMENT`,
[512]: `NEED_PATCH`,
[1024]: `DYNAMIC_SLOTS`,
[2048]: `DEV_ROOT_FRAGMENT`,
[-1]: `HOISTED`,
[-2]: `BAIL`
};
var slotFlagsText = {
[1]: "STABLE",
[2]: "DYNAMIC",
[3]: "FORWARDED"
};
var specialBooleanAttrs = `itemscope,allowfullscreen,formnovalidate,ismap,nomodule,novalidate,readonly`;
var isBooleanAttr = /* @__PURE__ */ makeMap(specialBooleanAttrs + `,async,autofocus,autoplay,controls,default,defer,disabled,hidden,loop,open,required,reversed,scoped,seamless,checked,muted,multiple,selected`);
var EMPTY_OBJ = false ? Object.freeze({}) : {};
var EMPTY_ARR = false ? Object.freeze([]) : [];
var extend = Object.assign;
var hasOwnProperty = Object.prototype.hasOwnProperty;
var hasOwn = (val, key) => hasOwnProperty.call(val, key);
var isArray = Array.isArray;
var isMap = (val) => toTypeString(val) === "[object Map]";
var isString = (val) => typeof val === "string";
var isSymbol = (val) => typeof val === "symbol";
var isObject = (val) => val !== null && typeof val === "object";
var objectToString = Object.prototype.toString;
var toTypeString = (value) => objectToString.call(value);
var toRawType = (value) => {
return toTypeString(value).slice(8, -1);
};
var isIntegerKey = (key) => isString(key) && key !== "NaN" && key[0] !== "-" && "" + parseInt(key, 10) === key;
var cacheStringFunction = (fn) => {
const cache = Object.create(null);
return (str) => {
const hit = cache[str];
return hit || (cache[str] = fn(str));
};
};
var camelizeRE = /-(\w)/g;
var camelize = cacheStringFunction((str) => {
return str.replace(camelizeRE, (_, c) => c ? c.toUpperCase() : "");
});
var hyphenateRE = /\B([A-Z])/g;
var hyphenate = cacheStringFunction((str) => str.replace(hyphenateRE, "-$1").toLowerCase());
var capitalize = cacheStringFunction((str) => str.charAt(0).toUpperCase() + str.slice(1));
var toHandlerKey = cacheStringFunction((str) => str ? `on${capitalize(str)}` : ``);
var hasChanged = (value, oldValue) => value !== oldValue && (value === value || oldValue === oldValue);
// node_modules/@vue/reactivity/dist/reactivity.esm-bundler.js
var targetMap = new WeakMap();
var effectStack = [];
var activeEffect;
var ITERATE_KEY = Symbol(false ? "iterate" : "");
var MAP_KEY_ITERATE_KEY = Symbol(false ? "Map key iterate" : "");
function isEffect(fn) {
return fn && fn._isEffect === true;
}
function effect2(fn, options = EMPTY_OBJ) {
if (isEffect(fn)) {
fn = fn.raw;
}
const effect3 = createReactiveEffect(fn, options);
if (!options.lazy) {
effect3();
}
return effect3;
}
function stop(effect3) {
if (effect3.active) {
cleanup(effect3);
if (effect3.options.onStop) {
effect3.options.onStop();
}
effect3.active = false;
}
}
var uid = 0;
function createReactiveEffect(fn, options) {
const effect3 = function reactiveEffect() {
if (!effect3.active) {
return fn();
}
if (!effectStack.includes(effect3)) {
cleanup(effect3);
try {
enableTracking();
effectStack.push(effect3);
activeEffect = effect3;
return fn();
} finally {
effectStack.pop();
resetTracking();
activeEffect = effectStack[effectStack.length - 1];
}
}
};
effect3.id = uid++;
effect3.allowRecurse = !!options.allowRecurse;
effect3._isEffect = true;
effect3.active = true;
effect3.raw = fn;
effect3.deps = [];
effect3.options = options;
return effect3;
}
function cleanup(effect3) {
const {deps} = effect3;
if (deps.length) {
for (let i = 0; i < deps.length; i++) {
deps[i].delete(effect3);
}
deps.length = 0;
}
}
var shouldTrack = true;
var trackStack = [];
function pauseTracking() {
trackStack.push(shouldTrack);
shouldTrack = false;
}
function enableTracking() {
trackStack.push(shouldTrack);
shouldTrack = true;
}
function resetTracking() {
const last = trackStack.pop();
shouldTrack = last === void 0 ? true : last;
}
function track(target, type, key) {
if (!shouldTrack || activeEffect === void 0) {
return;
}
let depsMap = targetMap.get(target);
if (!depsMap) {
targetMap.set(target, depsMap = new Map());
}
let dep = depsMap.get(key);
if (!dep) {
depsMap.set(key, dep = new Set());
}
if (!dep.has(activeEffect)) {
dep.add(activeEffect);
activeEffect.deps.push(dep);
if (false) {
activeEffect.options.onTrack({
effect: activeEffect,
target,
type,
key
});
}
}
}
function trigger(target, type, key, newValue, oldValue, oldTarget) {
const depsMap = targetMap.get(target);
if (!depsMap) {
return;
}
const effects = new Set();
const add2 = (effectsToAdd) => {
if (effectsToAdd) {
effectsToAdd.forEach((effect3) => {
if (effect3 !== activeEffect || effect3.allowRecurse) {
effects.add(effect3);
}
});
}
};
if (type === "clear") {
depsMap.forEach(add2);
} else if (key === "length" && isArray(target)) {
depsMap.forEach((dep, key2) => {
if (key2 === "length" || key2 >= newValue) {
add2(dep);
}
});
} else {
if (key !== void 0) {
add2(depsMap.get(key));
}
switch (type) {
case "add":
if (!isArray(target)) {
add2(depsMap.get(ITERATE_KEY));
if (isMap(target)) {
add2(depsMap.get(MAP_KEY_ITERATE_KEY));
}
} else if (isIntegerKey(key)) {
add2(depsMap.get("length"));
}
break;
case "delete":
if (!isArray(target)) {
add2(depsMap.get(ITERATE_KEY));
if (isMap(target)) {
add2(depsMap.get(MAP_KEY_ITERATE_KEY));
}
}
break;
case "set":
if (isMap(target)) {
add2(depsMap.get(ITERATE_KEY));
}
break;
}
}
const run = (effect3) => {
if (false) {
effect3.options.onTrigger({
effect: effect3,
target,
key,
type,
newValue,
oldValue,
oldTarget
});
}
if (effect3.options.scheduler) {
effect3.options.scheduler(effect3);
} else {
effect3();
}
};
effects.forEach(run);
}
var isNonTrackableKeys = /* @__PURE__ */ makeMap(`__proto__,__v_isRef,__isVue`);
var builtInSymbols = new Set(Object.getOwnPropertyNames(Symbol).map((key) => Symbol[key]).filter(isSymbol));
var get2 = /* @__PURE__ */ createGetter();
var shallowGet = /* @__PURE__ */ createGetter(false, true);
var readonlyGet = /* @__PURE__ */ createGetter(true);
var shallowReadonlyGet = /* @__PURE__ */ createGetter(true, true);
var arrayInstrumentations = {};
["includes", "indexOf", "lastIndexOf"].forEach((key) => {
const method = Array.prototype[key];
arrayInstrumentations[key] = function(...args) {
const arr = toRaw(this);
for (let i = 0, l = this.length; i < l; i++) {
track(arr, "get", i + "");
}
const res = method.apply(arr, args);
if (res === -1 || res === false) {
return method.apply(arr, args.map(toRaw));
} else {
return res;
}
};
});
["push", "pop", "shift", "unshift", "splice"].forEach((key) => {
const method = Array.prototype[key];
arrayInstrumentations[key] = function(...args) {
pauseTracking();
const res = method.apply(this, args);
resetTracking();
return res;
};
});
function createGetter(isReadonly = false, shallow = false) {
return function get3(target, key, receiver) {
if (key === "__v_isReactive") {
return !isReadonly;
} else if (key === "__v_isReadonly") {
return isReadonly;
} else if (key === "__v_raw" && receiver === (isReadonly ? shallow ? shallowReadonlyMap : readonlyMap : shallow ? shallowReactiveMap : reactiveMap).get(target)) {
return target;
}
const targetIsArray = isArray(target);
if (!isReadonly && targetIsArray && hasOwn(arrayInstrumentations, key)) {
return Reflect.get(arrayInstrumentations, key, receiver);
}
const res = Reflect.get(target, key, receiver);
if (isSymbol(key) ? builtInSymbols.has(key) : isNonTrackableKeys(key)) {
return res;
}
if (!isReadonly) {
track(target, "get", key);
}
if (shallow) {
return res;
}
if (isRef(res)) {
const shouldUnwrap = !targetIsArray || !isIntegerKey(key);
return shouldUnwrap ? res.value : res;
}
if (isObject(res)) {
return isReadonly ? readonly(res) : reactive2(res);
}
return res;
};
}
var set2 = /* @__PURE__ */ createSetter();
var shallowSet = /* @__PURE__ */ createSetter(true);
function createSetter(shallow = false) {
return function set3(target, key, value, receiver) {
let oldValue = target[key];
if (!shallow) {
value = toRaw(value);
oldValue = toRaw(oldValue);
if (!isArray(target) && isRef(oldValue) && !isRef(value)) {
oldValue.value = value;
return true;
}
}
const hadKey = isArray(target) && isIntegerKey(key) ? Number(key) < target.length : hasOwn(target, key);
const result = Reflect.set(target, key, value, receiver);
if (target === toRaw(receiver)) {
if (!hadKey) {
trigger(target, "add", key, value);
} else if (hasChanged(value, oldValue)) {
trigger(target, "set", key, value, oldValue);
}
}
return result;
};
}
function deleteProperty(target, key) {
const hadKey = hasOwn(target, key);
const oldValue = target[key];
const result = Reflect.deleteProperty(target, key);
if (result && hadKey) {
trigger(target, "delete", key, void 0, oldValue);
}
return result;
}
function has(target, key) {
const result = Reflect.has(target, key);
if (!isSymbol(key) || !builtInSymbols.has(key)) {
track(target, "has", key);
}
return result;
}
function ownKeys(target) {
track(target, "iterate", isArray(target) ? "length" : ITERATE_KEY);
return Reflect.ownKeys(target);
}
var mutableHandlers = {
get: get2,
set: set2,
deleteProperty,
has,
ownKeys
};
var readonlyHandlers = {
get: readonlyGet,
set(target, key) {
if (false) {
console.warn(`Set operation on key "${String(key)}" failed: target is readonly.`, target);
}
return true;
},
deleteProperty(target, key) {
if (false) {
console.warn(`Delete operation on key "${String(key)}" failed: target is readonly.`, target);
}
return true;
}
};
var shallowReactiveHandlers = extend({}, mutableHandlers, {
get: shallowGet,
set: shallowSet
});
var shallowReadonlyHandlers = extend({}, readonlyHandlers, {
get: shallowReadonlyGet
});
var toReactive = (value) => isObject(value) ? reactive2(value) : value;
var toReadonly = (value) => isObject(value) ? readonly(value) : value;
var toShallow = (value) => value;
var getProto = (v) => Reflect.getPrototypeOf(v);
function get$1(target, key, isReadonly = false, isShallow = false) {
target = target["__v_raw"];
const rawTarget = toRaw(target);
const rawKey = toRaw(key);
if (key !== rawKey) {
!isReadonly && track(rawTarget, "get", key);
}
!isReadonly && track(rawTarget, "get", rawKey);
const {has: has2} = getProto(rawTarget);
const wrap = isShallow ? toShallow : isReadonly ? toReadonly : toReactive;
if (has2.call(rawTarget, key)) {
return wrap(target.get(key));
} else if (has2.call(rawTarget, rawKey)) {
return wrap(target.get(rawKey));
} else if (target !== rawTarget) {
target.get(key);
}
}
function has$1(key, isReadonly = false) {
const target = this["__v_raw"];
const rawTarget = toRaw(target);
const rawKey = toRaw(key);
if (key !== rawKey) {
!isReadonly && track(rawTarget, "has", key);
}
!isReadonly && track(rawTarget, "has", rawKey);
return key === rawKey ? target.has(key) : target.has(key) || target.has(rawKey);
}
function size(target, isReadonly = false) {
target = target["__v_raw"];
!isReadonly && track(toRaw(target), "iterate", ITERATE_KEY);
return Reflect.get(target, "size", target);
}
function add(value) {
value = toRaw(value);
const target = toRaw(this);
const proto = getProto(target);
const hadKey = proto.has.call(target, value);
if (!hadKey) {
target.add(value);
trigger(target, "add", value, value);
}
return this;
}
function set$1(key, value) {
value = toRaw(value);
const target = toRaw(this);
const {has: has2, get: get3} = getProto(target);
let hadKey = has2.call(target, key);
if (!hadKey) {
key = toRaw(key);
hadKey = has2.call(target, key);
} else if (false) {
checkIdentityKeys(target, has2, key);
}
const oldValue = get3.call(target, key);
target.set(key, value);
if (!hadKey) {
trigger(target, "add", key, value);
} else if (hasChanged(value, oldValue)) {
trigger(target, "set", key, value, oldValue);
}
return this;
}
function deleteEntry(key) {
const target = toRaw(this);
const {has: has2, get: get3} = getProto(target);
let hadKey = has2.call(target, key);
if (!hadKey) {
key = toRaw(key);
hadKey = has2.call(target, key);
} else if (false) {
checkIdentityKeys(target, has2, key);
}
const oldValue = get3 ? get3.call(target, key) : void 0;
const result = target.delete(key);
if (hadKey) {
trigger(target, "delete", key, void 0, oldValue);
}
return result;
}
function clear() {
const target = toRaw(this);
const hadItems = target.size !== 0;
const oldTarget = false ? isMap(target) ? new Map(target) : new Set(target) : void 0;
const result = target.clear();
if (hadItems) {
trigger(target, "clear", void 0, void 0, oldTarget);
}
return result;
}
function createForEach(isReadonly, isShallow) {
return function forEach(callback, thisArg) {
const observed = this;
const target = observed["__v_raw"];
const rawTarget = toRaw(target);
const wrap = isShallow ? toShallow : isReadonly ? toReadonly : toReactive;
!isReadonly && track(rawTarget, "iterate", ITERATE_KEY);
return target.forEach((value, key) => {
return callback.call(thisArg, wrap(value), wrap(key), observed);
});
};
}
function createIterableMethod(method, isReadonly, isShallow) {
return function(...args) {
const target = this["__v_raw"];
const rawTarget = toRaw(target);
const targetIsMap = isMap(rawTarget);
const isPair = method === "entries" || method === Symbol.iterator && targetIsMap;
const isKeyOnly = method === "keys" && targetIsMap;
const innerIterator = target[method](...args);
const wrap = isShallow ? toShallow : isReadonly ? toReadonly : toReactive;
!isReadonly && track(rawTarget, "iterate", isKeyOnly ? MAP_KEY_ITERATE_KEY : ITERATE_KEY);
return {
next() {
const {value, done} = innerIterator.next();
return done ? {value, done} : {
value: isPair ? [wrap(value[0]), wrap(value[1])] : wrap(value),
done
};
},
[Symbol.iterator]() {
return this;
}
};
};
}
function createReadonlyMethod(type) {
return function(...args) {
if (false) {
const key = args[0] ? `on key "${args[0]}" ` : ``;
console.warn(`${capitalize(type)} operation ${key}failed: target is readonly.`, toRaw(this));
}
return type === "delete" ? false : this;
};
}
var mutableInstrumentations = {
get(key) {
return get$1(this, key);
},
get size() {
return size(this);
},
has: has$1,
add,
set: set$1,
delete: deleteEntry,
clear,
forEach: createForEach(false, false)
};
var shallowInstrumentations = {
get(key) {
return get$1(this, key, false, true);
},
get size() {
return size(this);
},
has: has$1,
add,
set: set$1,
delete: deleteEntry,
clear,
forEach: createForEach(false, true)
};
var readonlyInstrumentations = {
get(key) {
return get$1(this, key, true);
},
get size() {
return size(this, true);
},
has(key) {
return has$1.call(this, key, true);
},
add: createReadonlyMethod("add"),
set: createReadonlyMethod("set"),
delete: createReadonlyMethod("delete"),
clear: createReadonlyMethod("clear"),
forEach: createForEach(true, false)
};
var shallowReadonlyInstrumentations = {
get(key) {
return get$1(this, key, true, true);
},
get size() {
return size(this, true);
},
has(key) {
return has$1.call(this, key, true);
},
add: createReadonlyMethod("add"),
set: createReadonlyMethod("set"),
delete: createReadonlyMethod("delete"),
clear: createReadonlyMethod("clear"),
forEach: createForEach(true, true)
};
var iteratorMethods = ["keys", "values", "entries", Symbol.iterator];
iteratorMethods.forEach((method) => {
mutableInstrumentations[method] = createIterableMethod(method, false, false);
readonlyInstrumentations[method] = createIterableMethod(method, true, false);
shallowInstrumentations[method] = createIterableMethod(method, false, true);
shallowReadonlyInstrumentations[method] = createIterableMethod(method, true, true);
});
function createInstrumentationGetter(isReadonly, shallow) {
const instrumentations = shallow ? isReadonly ? shallowReadonlyInstrumentations : shallowInstrumentations : isReadonly ? readonlyInstrumentations : mutableInstrumentations;
return (target, key, receiver) => {
if (key === "__v_isReactive") {
return !isReadonly;
} else if (key === "__v_isReadonly") {
return isReadonly;
} else if (key === "__v_raw") {
return target;
}
return Reflect.get(hasOwn(instrumentations, key) && key in target ? instrumentations : target, key, receiver);
};
}
var mutableCollectionHandlers = {
get: createInstrumentationGetter(false, false)
};
var shallowCollectionHandlers = {
get: createInstrumentationGetter(false, true)
};
var readonlyCollectionHandlers = {
get: createInstrumentationGetter(true, false)
};
var shallowReadonlyCollectionHandlers = {
get: createInstrumentationGetter(true, true)
};
var reactiveMap = new WeakMap();
var shallowReactiveMap = new WeakMap();
var readonlyMap = new WeakMap();
var shallowReadonlyMap = new WeakMap();
function targetTypeMap(rawType) {
switch (rawType) {
case "Object":
case "Array":
return 1;
case "Map":
case "Set":
case "WeakMap":
case "WeakSet":
return 2;
default:
return 0;
}
}
function getTargetType(value) {
return value["__v_skip"] || !Object.isExtensible(value) ? 0 : targetTypeMap(toRawType(value));
}
function reactive2(target) {
if (target && target["__v_isReadonly"]) {
return target;
}
return createReactiveObject(target, false, mutableHandlers, mutableCollectionHandlers, reactiveMap);
}
function readonly(target) {
return createReactiveObject(target, true, readonlyHandlers, readonlyCollectionHandlers, readonlyMap);
}
function createReactiveObject(target, isReadonly, baseHandlers, collectionHandlers, proxyMap) {
if (!isObject(target)) {
if (false) {
console.warn(`value cannot be made reactive: ${String(target)}`);
}
return target;
}
if (target["__v_raw"] && !(isReadonly && target["__v_isReactive"])) {
return target;
}
const existingProxy = proxyMap.get(target);
if (existingProxy) {
return existingProxy;
}
const targetType = getTargetType(target);
if (targetType === 0) {
return target;
}
const proxy = new Proxy(target, targetType === 2 ? collectionHandlers : baseHandlers);
proxyMap.set(target, proxy);
return proxy;
}
function toRaw(observed) {
return observed && toRaw(observed["__v_raw"]) || observed;
}
function isRef(r) {
return Boolean(r && r.__v_isRef === true);
}
// packages/alpinejs/src/magics/$nextTick.js
magic("nextTick", () => nextTick);
// packages/alpinejs/src/magics/$dispatch.js
magic("dispatch", (el) => dispatch.bind(dispatch, el));
// packages/alpinejs/src/magics/$watch.js
magic("watch", (el) => (key, callback) => {
let evaluate2 = evaluateLater(el, key);
let firstTime = true;
let oldValue;
effect(() => evaluate2((value) => {
let div = document.createElement("div");
div.dataset.throwAway = value;
if (!firstTime)
callback(value, oldValue);
oldValue = value;
firstTime = false;
}));
});
// packages/alpinejs/src/magics/$store.js
magic("store", getStores);
// packages/alpinejs/src/magics/$refs.js
magic("refs", (el) => closestRoot(el)._x_refs || {});
// packages/alpinejs/src/magics/$el.js
magic("el", (el) => el);
// packages/alpinejs/src/utils/classes.js
function setClasses(el, value) {
if (Array.isArray(value)) {
return setClassesFromString(el, value.join(" "));
} else if (typeof value === "object" && value !== null) {
return setClassesFromObject(el, value);
}
return setClassesFromString(el, value);
}
function setClassesFromString(el, classString) {
let split = (classString2) => classString2.split(" ").filter(Boolean);
let missingClasses = (classString2) => classString2.split(" ").filter((i) => !el.classList.contains(i)).filter(Boolean);
let addClassesAndReturnUndo = (classes) => {
el.classList.add(...classes);
return () => {
el.classList.remove(...classes);
};
};
classString = classString === true ? classString = "" : classString || "";
return addClassesAndReturnUndo(missingClasses(classString));
}
function setClassesFromObject(el, classObject) {
let split = (classString) => classString.split(" ").filter(Boolean);
let forAdd = Object.entries(classObject).flatMap(([classString, bool]) => bool ? split(classString) : false).filter(Boolean);
let forRemove = Object.entries(classObject).flatMap(([classString, bool]) => !bool ? split(classString) : false).filter(Boolean);
let added = [];
let removed = [];
forRemove.forEach((i) => {
if (el.classList.contains(i)) {
el.classList.remove(i);
removed.push(i);
}
});
forAdd.forEach((i) => {
if (!el.classList.contains(i)) {
el.classList.add(i);
added.push(i);
}
});
return () => {
removed.forEach((i) => el.classList.add(i));
added.forEach((i) => el.classList.remove(i));
};
}
// packages/alpinejs/src/utils/styles.js
function setStyles(el, value) {
if (typeof value === "object" && value !== null) {
return setStylesFromObject(el, value);
}
return setStylesFromString(el, value);
}
function setStylesFromObject(el, value) {
let previousStyles = {};
Object.entries(value).forEach(([key, value2]) => {
previousStyles[key] = el.style[key];
el.style[key] = value2;
});
setTimeout(() => {
if (el.style.length === 0) {
el.removeAttribute("style");
}
});
return () => {
setStyles(el, previousStyles);
};
}
function setStylesFromString(el, value) {
let cache = el.getAttribute("style", value);
el.setAttribute("style", value);
return () => {
el.setAttribute("style", cache);
};
}
// packages/alpinejs/src/utils/once.js
function once(callback, fallback = () => {
}) {
let called = false;
return function() {
if (!called) {
called = true;
callback.apply(this, arguments);
} else {
fallback.apply(this, arguments);
}
};
}
// packages/alpinejs/src/directives/x-transition.js
directive("transition", (el, {value, modifiers, expression}) => {
if (!expression) {
registerTransitionsFromHelper(el, modifiers, value);
} else {
registerTransitionsFromClassString(el, expression, value);
}
});
function registerTransitionsFromClassString(el, classString, stage) {
registerTransitionObject(el, setClasses, "");
let directiveStorageMap = {
enter: (classes) => {
el._x_transition.enter.during = classes;
},
"enter-start": (classes) => {
el._x_transition.enter.start = classes;
},
"enter-end": (classes) => {
el._x_transition.enter.end = classes;
},
leave: (classes) => {
el._x_transition.leave.during = classes;
},
"leave-start": (classes) => {
el._x_transition.leave.start = classes;
},
"leave-end": (classes) => {
el._x_transition.leave.end = classes;
}
};
directiveStorageMap[stage](classString);
}
function registerTransitionsFromHelper(el, modifiers, stage) {
registerTransitionObject(el, setStyles);
let doesntSpecify = !modifiers.includes("in") && !modifiers.includes("out") && !stage;
let transitioningIn = doesntSpecify || modifiers.includes("in") || ["enter"].includes(stage);
let transitioningOut = doesntSpecify || modifiers.includes("out") || ["leave"].includes(stage);
if (modifiers.includes("in") && !doesntSpecify) {
modifiers = modifiers.filter((i, index) => index < modifiers.indexOf("out"));
}
if (modifiers.includes("out") && !doesntSpecify) {
modifiers = modifiers.filter((i, index) => index > modifiers.indexOf("out"));
}
let wantsAll = !modifiers.includes("opacity") && !modifiers.includes("scale");
let wantsOpacity = wantsAll || modifiers.includes("opacity");
let wantsScale = wantsAll || modifiers.includes("scale");
let opacityValue = wantsOpacity ? 0 : 1;
let scaleValue = wantsScale ? modifierValue(modifiers, "scale", 95) / 100 : 1;
let delay = modifierValue(modifiers, "delay", 0);
let origin = modifierValue(modifiers, "origin", "center");
let property = "opacity, transform";
let durationIn = modifierValue(modifiers, "duration", 150) / 1e3;
let durationOut = modifierValue(modifiers, "duration", 75) / 1e3;
let easing = `cubic-bezier(0.4, 0.0, 0.2, 1)`;
if (transitioningIn) {
el._x_transition.enter.during = {
transformOrigin: origin,
transitionDelay: delay,
transitionProperty: property,
transitionDuration: `${durationIn}s`,
transitionTimingFunction: easing
};
el._x_transition.enter.start = {
opacity: opacityValue,
transform: `scale(${scaleValue})`
};
el._x_transition.enter.end = {
opacity: 1,
transform: `scale(1)`
};
}
if (transitioningOut) {
el._x_transition.leave.during = {
transformOrigin: origin,
transitionDelay: delay,
transitionProperty: property,
transitionDuration: `${durationOut}s`,
transitionTimingFunction: easing
};
el._x_transition.leave.start = {
opacity: 1,
transform: `scale(1)`
};
el._x_transition.leave.end = {
opacity: opacityValue,
transform: `scale(${scaleValue})`
};
}
}
function registerTransitionObject(el, setFunction, defaultValue = {}) {
if (!el._x_transition)
el._x_transition = {
enter: {during: defaultValue, start: defaultValue, end: defaultValue},
leave: {during: defaultValue, start: defaultValue, end: defaultValue},
in(before = () => {
}, after = () => {
}) {<|fim▁hole|> start: this.enter.start,
end: this.enter.end,
entering: true
}, before, after);
},
out(before = () => {
}, after = () => {
}) {
transition(el, setFunction, {
during: this.leave.during,
start: this.leave.start,
end: this.leave.end,
entering: false
}, before, after);
}
};
}
window.Element.prototype._x_toggleAndCascadeWithTransitions = function(el, value, show, hide) {
let clickAwayCompatibleShow = () => requestAnimationFrame(show);
if (value) {
el._x_transition ? el._x_transition.in(show) : clickAwayCompatibleShow();
return;
}
el._x_hidePromise = el._x_transition ? new Promise((resolve, reject) => {
el._x_transition.out(() => {
}, () => resolve(hide));
el._x_transitioning.beforeCancel(() => reject({isFromCancelledTransition: true}));
}) : Promise.resolve(hide);
queueMicrotask(() => {
let closest = closestHide(el);
if (closest) {
if (!closest._x_hideChildren)
closest._x_hideChildren = [];
closest._x_hideChildren.push(el);
} else {
queueMicrotask(() => {
let hideAfterChildren = (el2) => {
let carry = Promise.all([
el2._x_hidePromise,
...(el2._x_hideChildren || []).map(hideAfterChildren)
]).then(([i]) => i());
delete el2._x_hidePromise;
delete el2._x_hideChildren;
return carry;
};
hideAfterChildren(el).catch((e) => {
if (!e.isFromCancelledTransition)
throw e;
});
});
}
});
};
function closestHide(el) {
let parent = el.parentNode;
if (!parent)
return;
return parent._x_hidePromise ? parent : closestHide(parent);
}
function transition(el, setFunction, {during, start: start2, end, entering} = {}, before = () => {
}, after = () => {
}) {
if (el._x_transitioning)
el._x_transitioning.cancel();
if (Object.keys(during).length === 0 && Object.keys(start2).length === 0 && Object.keys(end).length === 0) {
before();
after();
return;
}
let undoStart, undoDuring, undoEnd;
performTransition(el, {
start() {
undoStart = setFunction(el, start2);
},
during() {
undoDuring = setFunction(el, during);
},
before,
end() {
undoStart();
undoEnd = setFunction(el, end);
},
after,
cleanup() {
undoDuring();
undoEnd();
}
}, entering);
}
function performTransition(el, stages, entering) {
let interrupted, reachedBefore, reachedEnd;
let finish = once(() => {
mutateDom(() => {
interrupted = true;
if (!reachedBefore)
stages.before();
if (!reachedEnd) {
stages.end();
releaseNextTicks();
}
stages.after();
if (el.isConnected)
stages.cleanup();
delete el._x_transitioning;
});
});
el._x_transitioning = {
beforeCancels: [],
beforeCancel(callback) {
this.beforeCancels.push(callback);
},
cancel: once(function() {
while (this.beforeCancels.length) {
this.beforeCancels.shift()();
}
;
finish();
}),
finish,
entering
};
mutateDom(() => {
stages.start();
stages.during();
});
holdNextTicks();
requestAnimationFrame(() => {
if (interrupted)
return;
let duration = Number(getComputedStyle(el).transitionDuration.replace(/,.*/, "").replace("s", "")) * 1e3;
let delay = Number(getComputedStyle(el).transitionDelay.replace(/,.*/, "").replace("s", "")) * 1e3;
if (duration === 0)
duration = Number(getComputedStyle(el).animationDuration.replace("s", "")) * 1e3;
mutateDom(() => {
stages.before();
});
reachedBefore = true;
requestAnimationFrame(() => {
if (interrupted)
return;
mutateDom(() => {
stages.end();
});
releaseNextTicks();
setTimeout(el._x_transitioning.finish, duration + delay);
reachedEnd = true;
});
});
}
function modifierValue(modifiers, key, fallback) {
if (modifiers.indexOf(key) === -1)
return fallback;
const rawValue = modifiers[modifiers.indexOf(key) + 1];
if (!rawValue)
return fallback;
if (key === "scale") {
if (isNaN(rawValue))
return fallback;
}
if (key === "duration") {
let match = rawValue.match(/([0-9]+)ms/);
if (match)
return match[1];
}
if (key === "origin") {
if (["top", "right", "left", "center", "bottom"].includes(modifiers[modifiers.indexOf(key) + 2])) {
return [rawValue, modifiers[modifiers.indexOf(key) + 2]].join(" ");
}
}
return rawValue;
}
// packages/alpinejs/src/directives/x-ignore.js
var handler = () => {
};
handler.inline = (el, {modifiers}, {cleanup: cleanup2}) => {
modifiers.includes("self") ? el._x_ignoreSelf = true : el._x_ignore = true;
cleanup2(() => {
modifiers.includes("self") ? delete el._x_ignoreSelf : delete el._x_ignore;
});
};
directive("ignore", handler);
// packages/alpinejs/src/directives/x-effect.js
directive("effect", (el, {expression}, {effect: effect3}) => effect3(evaluateLater(el, expression)));
// packages/alpinejs/src/utils/bind.js
function bind(el, name, value, modifiers = []) {
if (!el._x_bindings)
el._x_bindings = reactive({});
el._x_bindings[name] = value;
name = modifiers.includes("camel") ? camelCase(name) : name;
switch (name) {
case "value":
bindInputValue(el, value);
break;
case "style":
bindStyles(el, value);
break;
case "class":
bindClasses(el, value);
break;
default:
bindAttribute(el, name, value);
break;
}
}
function bindInputValue(el, value) {
if (el.type === "radio") {
if (el.attributes.value === void 0) {
el.value = value;
}
if (window.fromModel) {
el.checked = checkedAttrLooseCompare(el.value, value);
}
} else if (el.type === "checkbox") {
if (Number.isInteger(value)) {
el.value = value;
} else if (!Number.isInteger(value) && !Array.isArray(value) && typeof value !== "boolean" && ![null, void 0].includes(value)) {
el.value = String(value);
} else {
if (Array.isArray(value)) {
el.checked = value.some((val) => checkedAttrLooseCompare(val, el.value));
} else {
el.checked = !!value;
}
}
} else if (el.tagName === "SELECT") {
updateSelect(el, value);
} else {
if (el.value === value)
return;
el.value = value;
}
}
function bindClasses(el, value) {
if (el._x_undoAddedClasses)
el._x_undoAddedClasses();
el._x_undoAddedClasses = setClasses(el, value);
}
function bindStyles(el, value) {
if (el._x_undoAddedStyles)
el._x_undoAddedStyles();
el._x_undoAddedStyles = setStyles(el, value);
}
function bindAttribute(el, name, value) {
if ([null, void 0, false].includes(value) && attributeShouldntBePreservedIfFalsy(name)) {
el.removeAttribute(name);
} else {
if (isBooleanAttr2(name))
value = name;
setIfChanged(el, name, value);
}
}
function setIfChanged(el, attrName, value) {
if (el.getAttribute(attrName) != value) {
el.setAttribute(attrName, value);
}
}
function updateSelect(el, value) {
const arrayWrappedValue = [].concat(value).map((value2) => {
return value2 + "";
});
Array.from(el.options).forEach((option) => {
option.selected = arrayWrappedValue.includes(option.value);
});
}
function camelCase(subject) {
return subject.toLowerCase().replace(/-(\w)/g, (match, char) => char.toUpperCase());
}
function checkedAttrLooseCompare(valueA, valueB) {
return valueA == valueB;
}
function isBooleanAttr2(attrName) {
const booleanAttributes = [
"disabled",
"checked",
"required",
"readonly",
"hidden",
"open",
"selected",
"autofocus",
"itemscope",
"multiple",
"novalidate",
"allowfullscreen",
"allowpaymentrequest",
"formnovalidate",
"autoplay",
"controls",
"loop",
"muted",
"playsinline",
"default",
"ismap",
"reversed",
"async",
"defer",
"nomodule"
];
return booleanAttributes.includes(attrName);
}
function attributeShouldntBePreservedIfFalsy(name) {
return !["aria-pressed", "aria-checked"].includes(name);
}
// packages/alpinejs/src/utils/on.js
function on(el, event, modifiers, callback) {
let listenerTarget = el;
let handler3 = (e) => callback(e);
let options = {};
let wrapHandler = (callback2, wrapper) => (e) => wrapper(callback2, e);
if (modifiers.includes("camel"))
event = camelCase2(event);
if (modifiers.includes("passive"))
options.passive = true;
if (modifiers.includes("window"))
listenerTarget = window;
if (modifiers.includes("document"))
listenerTarget = document;
if (modifiers.includes("prevent"))
handler3 = wrapHandler(handler3, (next, e) => {
e.preventDefault();
next(e);
});
if (modifiers.includes("stop"))
handler3 = wrapHandler(handler3, (next, e) => {
e.stopPropagation();
next(e);
});
if (modifiers.includes("self"))
handler3 = wrapHandler(handler3, (next, e) => {
e.target === el && next(e);
});
if (modifiers.includes("away") || modifiers.includes("outside")) {
listenerTarget = document;
handler3 = wrapHandler(handler3, (next, e) => {
if (el.contains(e.target))
return;
if (el.offsetWidth < 1 && el.offsetHeight < 1)
return;
next(e);
});
}
handler3 = wrapHandler(handler3, (next, e) => {
if (isKeyEvent(event)) {
if (isListeningForASpecificKeyThatHasntBeenPressed(e, modifiers)) {
return;
}
}
next(e);
});
if (modifiers.includes("debounce")) {
let nextModifier = modifiers[modifiers.indexOf("debounce") + 1] || "invalid-wait";
let wait = isNumeric(nextModifier.split("ms")[0]) ? Number(nextModifier.split("ms")[0]) : 250;
handler3 = debounce(handler3, wait, this);
}
if (modifiers.includes("throttle")) {
let nextModifier = modifiers[modifiers.indexOf("throttle") + 1] || "invalid-wait";
let wait = isNumeric(nextModifier.split("ms")[0]) ? Number(nextModifier.split("ms")[0]) : 250;
handler3 = throttle(handler3, wait, this);
}
if (modifiers.includes("once")) {
handler3 = wrapHandler(handler3, (next, e) => {
next(e);
listenerTarget.removeEventListener(event, handler3, options);
});
}
listenerTarget.addEventListener(event, handler3, options);
return () => {
listenerTarget.removeEventListener(event, handler3, options);
};
}
function camelCase2(subject) {
return subject.toLowerCase().replace(/-(\w)/g, (match, char) => char.toUpperCase());
}
function debounce(func, wait) {
var timeout;
return function() {
var context = this, args = arguments;
var later = function() {
timeout = null;
func.apply(context, args);
};
clearTimeout(timeout);
timeout = setTimeout(later, wait);
};
}
function throttle(func, limit) {
let inThrottle;
return function() {
let context = this, args = arguments;
if (!inThrottle) {
func.apply(context, args);
inThrottle = true;
setTimeout(() => inThrottle = false, limit);
}
};
}
function isNumeric(subject) {
return !Array.isArray(subject) && !isNaN(subject);
}
function kebabCase(subject) {
return subject.replace(/([a-z])([A-Z])/g, "$1-$2").replace(/[_\s]/, "-").toLowerCase();
}
function isKeyEvent(event) {
return ["keydown", "keyup"].includes(event);
}
function isListeningForASpecificKeyThatHasntBeenPressed(e, modifiers) {
let keyModifiers = modifiers.filter((i) => {
return !["window", "document", "prevent", "stop", "once"].includes(i);
});
if (keyModifiers.includes("debounce")) {
let debounceIndex = keyModifiers.indexOf("debounce");
keyModifiers.splice(debounceIndex, isNumeric((keyModifiers[debounceIndex + 1] || "invalid-wait").split("ms")[0]) ? 2 : 1);
}
if (keyModifiers.length === 0)
return false;
if (keyModifiers.length === 1 && keyModifiers[0] === keyToModifier(e.key))
return false;
const systemKeyModifiers = ["ctrl", "shift", "alt", "meta", "cmd", "super"];
const selectedSystemKeyModifiers = systemKeyModifiers.filter((modifier) => keyModifiers.includes(modifier));
keyModifiers = keyModifiers.filter((i) => !selectedSystemKeyModifiers.includes(i));
if (selectedSystemKeyModifiers.length > 0) {
const activelyPressedKeyModifiers = selectedSystemKeyModifiers.filter((modifier) => {
if (modifier === "cmd" || modifier === "super")
modifier = "meta";
return e[`${modifier}Key`];
});
if (activelyPressedKeyModifiers.length === selectedSystemKeyModifiers.length) {
if (keyModifiers[0] === keyToModifier(e.key))
return false;
}
}
return true;
}
function keyToModifier(key) {
switch (key) {
case "/":
return "slash";
case " ":
case "Spacebar":
return "space";
default:
return key && kebabCase(key);
}
}
// packages/alpinejs/src/directives/x-model.js
directive("model", (el, {modifiers, expression}, {effect: effect3, cleanup: cleanup2}) => {
let evaluate2 = evaluateLater(el, expression);
let assignmentExpression = `${expression} = rightSideOfExpression($event, ${expression})`;
let evaluateAssignment = evaluateLater(el, assignmentExpression);
var event = el.tagName.toLowerCase() === "select" || ["checkbox", "radio"].includes(el.type) || modifiers.includes("lazy") ? "change" : "input";
let assigmentFunction = generateAssignmentFunction(el, modifiers, expression);
let removeListener = on(el, event, modifiers, (e) => {
evaluateAssignment(() => {
}, {scope: {
$event: e,
rightSideOfExpression: assigmentFunction
}});
});
cleanup2(() => removeListener());
el._x_forceModelUpdate = () => {
evaluate2((value) => {
if (value === void 0 && expression.match(/\./))
value = "";
window.fromModel = true;
mutateDom(() => bind(el, "value", value));
delete window.fromModel;
});
};
effect3(() => {
if (modifiers.includes("unintrusive") && document.activeElement.isSameNode(el))
return;
el._x_forceModelUpdate();
});
});
function generateAssignmentFunction(el, modifiers, expression) {
if (el.type === "radio") {
mutateDom(() => {
if (!el.hasAttribute("name"))
el.setAttribute("name", expression);
});
}
return (event, currentValue) => {
return mutateDom(() => {
if (event instanceof CustomEvent && event.detail !== void 0) {
return event.detail;
} else if (el.type === "checkbox") {
if (Array.isArray(currentValue)) {
let newValue = modifiers.includes("number") ? safeParseNumber(event.target.value) : event.target.value;
return event.target.checked ? currentValue.concat([newValue]) : currentValue.filter((el2) => !checkedAttrLooseCompare2(el2, newValue));
} else {
return event.target.checked;
}
} else if (el.tagName.toLowerCase() === "select" && el.multiple) {
return modifiers.includes("number") ? Array.from(event.target.selectedOptions).map((option) => {
let rawValue = option.value || option.text;
return safeParseNumber(rawValue);
}) : Array.from(event.target.selectedOptions).map((option) => {
return option.value || option.text;
});
} else {
let rawValue = event.target.value;
return modifiers.includes("number") ? safeParseNumber(rawValue) : modifiers.includes("trim") ? rawValue.trim() : rawValue;
}
});
};
}
function safeParseNumber(rawValue) {
let number = rawValue ? parseFloat(rawValue) : null;
return isNumeric2(number) ? number : rawValue;
}
function checkedAttrLooseCompare2(valueA, valueB) {
return valueA == valueB;
}
function isNumeric2(subject) {
return !Array.isArray(subject) && !isNaN(subject);
}
// packages/alpinejs/src/directives/x-cloak.js
directive("cloak", (el) => nextTick(() => mutateDom(() => el.removeAttribute(prefix("cloak")))));
// packages/alpinejs/src/directives/x-init.js
addRootSelector(() => `[${prefix("init")}]`);
directive("init", skipDuringClone((el, {expression}) => evaluate(el, expression, {}, false)));
// packages/alpinejs/src/directives/x-text.js
directive("text", (el, {expression}, {effect: effect3}) => {
let evaluate2 = evaluateLater(el, expression);
effect3(() => {
evaluate2((value) => {
mutateDom(() => {
el.textContent = value;
});
});
});
});
// packages/alpinejs/src/directives/x-bind.js
mapAttributes(startingWith(":", into(prefix("bind:"))));
directive("bind", (el, {value, modifiers, expression, original}, {effect: effect3}) => {
if (!value)
return applyBindingsObject(el, expression, original, effect3);
if (value === "key")
return storeKeyForXFor(el, expression);
let evaluate2 = evaluateLater(el, expression);
effect3(() => evaluate2((result) => {
if (result === void 0 && expression.match(/\./))
result = "";
mutateDom(() => bind(el, value, result, modifiers));
}));
});
function applyBindingsObject(el, expression, original, effect3) {
let getBindings = evaluateLater(el, expression);
let cleanupRunners = [];
effect3(() => {
while (cleanupRunners.length)
cleanupRunners.pop()();
getBindings((bindings) => {
let attributes = Object.entries(bindings).map(([name, value]) => ({name, value}));
directives(el, attributes, original).map((handle) => {
cleanupRunners.push(handle.runCleanups);
handle();
});
});
});
}
function storeKeyForXFor(el, expression) {
el._x_keyExpression = expression;
}
// packages/alpinejs/src/directives/x-data.js
addRootSelector(() => `[${prefix("data")}]`);
directive("data", skipDuringClone((el, {expression}, {cleanup: cleanup2}) => {
expression = expression === "" ? "{}" : expression;
let dataProvider = getNamedDataProvider(expression);
let data2 = {};
if (dataProvider) {
let magics2 = injectMagics({}, el);
data2 = dataProvider.bind(magics2)();
} else {
data2 = evaluate(el, expression);
}
injectMagics(data2, el);
let reactiveData = reactive(data2);
initInterceptors(reactiveData);
let undo = addScopeToNode(el, reactiveData);
if (reactiveData["init"])
reactiveData["init"]();
cleanup2(() => {
undo();
reactiveData["destroy"] && reactiveData["destroy"]();
});
}));
// packages/alpinejs/src/directives/x-show.js
directive("show", (el, {modifiers, expression}, {effect: effect3}) => {
let evaluate2 = evaluateLater(el, expression);
let hide = () => mutateDom(() => {
el.style.display = "none";
el._x_isShown = false;
});
let show = () => mutateDom(() => {
if (el.style.length === 1 && el.style.display === "none") {
el.removeAttribute("style");
} else {
el.style.removeProperty("display");
}
el._x_isShown = true;
});
let clickAwayCompatibleShow = () => setTimeout(show);
let toggle = once((value) => value ? show() : hide(), (value) => {
if (typeof el._x_toggleAndCascadeWithTransitions === "function") {
el._x_toggleAndCascadeWithTransitions(el, value, show, hide);
} else {
value ? clickAwayCompatibleShow() : hide();
}
});
let oldValue;
let firstTime = true;
effect3(() => evaluate2((value) => {
if (!firstTime && value === oldValue)
return;
if (modifiers.includes("immediate"))
value ? clickAwayCompatibleShow() : hide();
toggle(value);
oldValue = value;
firstTime = false;
}));
});
// packages/alpinejs/src/directives/x-for.js
directive("for", (el, {expression}, {effect: effect3, cleanup: cleanup2}) => {
let iteratorNames = parseForExpression(expression);
let evaluateItems = evaluateLater(el, iteratorNames.items);
let evaluateKey = evaluateLater(el, el._x_keyExpression || "index");
el._x_prevKeys = [];
el._x_lookup = {};
effect3(() => loop(el, iteratorNames, evaluateItems, evaluateKey));
cleanup2(() => {
Object.values(el._x_lookup).forEach((el2) => el2.remove());
delete el._x_prevKeys;
delete el._x_lookup;
});
});
function loop(el, iteratorNames, evaluateItems, evaluateKey) {
let isObject2 = (i) => typeof i === "object" && !Array.isArray(i);
let templateEl = el;
evaluateItems((items) => {
if (isNumeric3(items) && items >= 0) {
items = Array.from(Array(items).keys(), (i) => i + 1);
}
let lookup = el._x_lookup;
let prevKeys = el._x_prevKeys;
let scopes = [];
let keys = [];
if (isObject2(items)) {
items = Object.entries(items).map(([key, value]) => {
let scope = getIterationScopeVariables(iteratorNames, value, key, items);
evaluateKey((value2) => keys.push(value2), {scope: {index: key, ...scope}});
scopes.push(scope);
});
} else {
for (let i = 0; i < items.length; i++) {
let scope = getIterationScopeVariables(iteratorNames, items[i], i, items);
evaluateKey((value) => keys.push(value), {scope: {index: i, ...scope}});
scopes.push(scope);
}
}
let adds = [];
let moves = [];
let removes = [];
let sames = [];
for (let i = 0; i < prevKeys.length; i++) {
let key = prevKeys[i];
if (keys.indexOf(key) === -1)
removes.push(key);
}
prevKeys = prevKeys.filter((key) => !removes.includes(key));
let lastKey = "template";
for (let i = 0; i < keys.length; i++) {
let key = keys[i];
let prevIndex = prevKeys.indexOf(key);
if (prevIndex === -1) {
prevKeys.splice(i, 0, key);
adds.push([lastKey, i]);
} else if (prevIndex !== i) {
let keyInSpot = prevKeys.splice(i, 1)[0];
let keyForSpot = prevKeys.splice(prevIndex - 1, 1)[0];
prevKeys.splice(i, 0, keyForSpot);
prevKeys.splice(prevIndex, 0, keyInSpot);
moves.push([keyInSpot, keyForSpot]);
} else {
sames.push(key);
}
lastKey = key;
}
for (let i = 0; i < removes.length; i++) {
let key = removes[i];
lookup[key].remove();
lookup[key] = null;
delete lookup[key];
}
for (let i = 0; i < moves.length; i++) {
let [keyInSpot, keyForSpot] = moves[i];
let elInSpot = lookup[keyInSpot];
let elForSpot = lookup[keyForSpot];
let marker = document.createElement("div");
mutateDom(() => {
elForSpot.after(marker);
elInSpot.after(elForSpot);
marker.before(elInSpot);
marker.remove();
});
refreshScope(elForSpot, scopes[keys.indexOf(keyForSpot)]);
}
for (let i = 0; i < adds.length; i++) {
let [lastKey2, index] = adds[i];
let lastEl = lastKey2 === "template" ? templateEl : lookup[lastKey2];
let scope = scopes[index];
let key = keys[index];
let clone2 = document.importNode(templateEl.content, true).firstElementChild;
addScopeToNode(clone2, reactive(scope), templateEl);
initTree(clone2);
mutateDom(() => {
lastEl.after(clone2);
});
lookup[key] = clone2;
}
for (let i = 0; i < sames.length; i++) {
refreshScope(lookup[sames[i]], scopes[keys.indexOf(sames[i])]);
}
templateEl._x_prevKeys = keys;
});
}
function parseForExpression(expression) {
let forIteratorRE = /,([^,\}\]]*)(?:,([^,\}\]]*))?$/;
let stripParensRE = /^\s*\(|\)\s*$/g;
let forAliasRE = /([\s\S]*?)\s+(?:in|of)\s+([\s\S]*)/;
let inMatch = expression.match(forAliasRE);
if (!inMatch)
return;
let res = {};
res.items = inMatch[2].trim();
let item = inMatch[1].replace(stripParensRE, "").trim();
let iteratorMatch = item.match(forIteratorRE);
if (iteratorMatch) {
res.item = item.replace(forIteratorRE, "").trim();
res.index = iteratorMatch[1].trim();
if (iteratorMatch[2]) {
res.collection = iteratorMatch[2].trim();
}
} else {
res.item = item;
}
return res;
}
function getIterationScopeVariables(iteratorNames, item, index, items) {
let scopeVariables = {};
if (/^\[.*\]$/.test(iteratorNames.item) && Array.isArray(item)) {
let names = iteratorNames.item.replace("[", "").replace("]", "").split(",").map((i) => i.trim());
names.forEach((name, i) => {
scopeVariables[name] = item[i];
});
} else {
scopeVariables[iteratorNames.item] = item;
}
if (iteratorNames.index)
scopeVariables[iteratorNames.index] = index;
if (iteratorNames.collection)
scopeVariables[iteratorNames.collection] = items;
return scopeVariables;
}
function isNumeric3(subject) {
return !Array.isArray(subject) && !isNaN(subject);
}
// packages/alpinejs/src/directives/x-ref.js
function handler2() {
}
handler2.inline = (el, {expression}, {cleanup: cleanup2}) => {
let root = closestRoot(el);
if (!root._x_refs)
root._x_refs = {};
root._x_refs[expression] = el;
cleanup2(() => delete root._x_refs[expression]);
};
directive("ref", handler2);
// packages/alpinejs/src/directives/x-if.js
directive("if", (el, {expression}, {effect: effect3, cleanup: cleanup2}) => {
let evaluate2 = evaluateLater(el, expression);
let show = () => {
if (el._x_currentIfEl)
return el._x_currentIfEl;
let clone2 = el.content.cloneNode(true).firstElementChild;
addScopeToNode(clone2, {}, el);
initTree(clone2);
mutateDom(() => el.after(clone2));
el._x_currentIfEl = clone2;
el._x_undoIf = () => {
clone2.remove();
delete el._x_currentIfEl;
};
return clone2;
};
let hide = () => el._x_undoIf?.() || delete el._x_undoIf;
effect3(() => evaluate2((value) => {
value ? show() : hide();
}));
cleanup2(() => el._x_undoIf && el._x_undoIf());
});
// packages/alpinejs/src/directives/x-on.js
mapAttributes(startingWith("@", into(prefix("on:"))));
directive("on", skipDuringClone((el, {value, modifiers, expression}, {cleanup: cleanup2}) => {
let evaluate2 = expression ? evaluateLater(el, expression) : () => {
};
let removeListener = on(el, value, modifiers, (e) => {
evaluate2(() => {
}, {scope: {$event: e}, params: [e]});
});
cleanup2(() => removeListener());
}));
// packages/alpinejs/src/index.js
alpine_default.setEvaluator(normalEvaluator);
alpine_default.setReactivityEngine({reactive: reactive2, effect: effect2, release: stop, raw: toRaw});
var src_default = alpine_default;
// packages/alpinejs/builds/cdn.js
window.Alpine = src_default;
queueMicrotask(() => {
src_default.start();
});
})();<|fim▁end|>
|
transition(el, setFunction, {
during: this.enter.during,
|
<|file_name|>GetRest.java<|end_file_name|><|fim▁begin|>package org.elasticsearch.action.get;
import com.bazaarvoice.elasticsearch.client.core.spi.RestExecutor;
import com.bazaarvoice.elasticsearch.client.core.spi.RestResponse;
import com.bazaarvoice.elasticsearch.client.core.util.UrlBuilder;
import org.elasticsearch.action.AbstractRestClientAction;
import org.elasticsearch.common.base.Function;
import org.elasticsearch.common.util.concurrent.Futures;
import org.elasticsearch.common.util.concurrent.ListenableFuture;
import static com.bazaarvoice.elasticsearch.client.core.util.StringFunctions.booleanToString;
import static com.bazaarvoice.elasticsearch.client.core.util.StringFunctions.commaDelimitedToString;
import static com.bazaarvoice.elasticsearch.client.core.util.UrlBuilder.urlEncode;
import static com.bazaarvoice.elasticsearch.client.core.util.Validation.notNull;
import static org.elasticsearch.common.base.Optional.fromNullable;<|fim▁hole|> *
* @param <ResponseType>
*/
public class GetRest<ResponseType> extends AbstractRestClientAction<GetRequest, ResponseType> {
public GetRest(final String protocol, final String host, final int port, final RestExecutor executor, final Function<RestResponse, ResponseType> responseTransform) {
super(protocol, host, port, executor, responseTransform);
}
@Override public ListenableFuture<ResponseType> act(GetRequest request) {
UrlBuilder url = UrlBuilder.create()
.protocol(protocol).host(host).port(port)
.path(urlEncode(notNull(request.index())))
.seg(urlEncode(notNull(request.type())))
.seg(urlEncode(notNull(request.id())))
.paramIfPresent("refresh", fromNullable(request.refresh()).transform(booleanToString))
.paramIfPresent("routing", fromNullable(request.routing()))
// note parent(string) seems just to set the routing, so we don't need to provide it here
.paramIfPresent("preference", fromNullable(request.preference()))
.paramIfPresent("realtime", fromNullable(request.realtime()).transform(booleanToString))
.paramIfPresent("fields", fromNullable(request.fields()).transform(commaDelimitedToString));
return Futures.transform(executor.get(url.url()), responseTransform);
}
}<|fim▁end|>
|
/**
* The inverse of {@link org.elasticsearch.rest.action.get.RestGetAction}
|
<|file_name|>lint-output-format.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// compile-flags:-F unstable<|fim▁hole|>
fn main() {
let _x = foo(); //~ WARNING #[warn(deprecated)] on by default
let _y = bar(); //~ ERROR [-F unstable]
}<|fim▁end|>
|
// aux-build:lint_output_format.rs
extern crate lint_output_format; //~ ERROR: use of unmarked item
use lint_output_format::{foo, bar};
|
<|file_name|>opening-tag.hpp<|end_file_name|><|fim▁begin|>// opening-tag.hpp
// Started 14 Aug 2018<|fim▁hole|>namespace client {
// namespace fusion = boost::fusion;
// namespace phoenix = boost::phoenix;
namespace qi = boost::spirit::qi;
namespace ascii = boost::spirit::ascii;
template<typename Iterator>
struct opening_tag : qi::grammar<Iterator, mini_xml_tag(), ascii::space_type>
{
qi::rule<Iterator, mini_xml_tag(), ascii::space_type> start;
qi::rule<Iterator, std::string(), ascii::space_type> head;
qi::rule<Iterator, std::string(), ascii::space_type> tail;
opening_tag()
: base_type{ start }
{
head %= qi::lexeme[+ascii::alnum];
tail %= qi::no_skip[*(qi::char_ - '>')];
start %= qi::lit('<') >> head >> tail >> qi::lit('>');
}
};
}<|fim▁end|>
|
#pragma once
#include <string>
#include <boost/spirit/include/qi.hpp>
|
<|file_name|>updater.py<|end_file_name|><|fim▁begin|>import urllib.request
import pickle
import sys
import ast
try:
import variables as v
except:
class var():
def __init__(self):
self.screen = None
v = var()
import pygame as py
class textLabel(py.sprite.Sprite):
def __init__(self, text, pos, colour, font, size, variable = False, centred = False):
super().__init__()
self.text = text
self.pos = pos
self.colour = colour
self.font = font
self.size = size
self.variable = variable
self.centred = centred
def update(self):
pos = self.pos
font = py.font.Font(self.font, self.size)
if not self.variable:
label = font.render(self.text, 1, self.colour)
if self.variable:
label = font.render(str(getattr(v, self.text)), 1, self.colour)
if self.centred:
pos = list(self.pos)
pos[0] -= font.size(self.text)[0] / 2
pos[1] -= font.size(self.text)[1] / 2
pos = tuple(pos)
v.screen.blit(label, pos)
class Button(py.sprite.Sprite):
def __init__(self, text, pos, size, hovercolour, normalcolour, font, ID, centred = False, bsize=(0,0)):
super().__init__()
self.ID = ID
self.hovered = False
self.text = text
self.pos = pos
self.hcolour = hovercolour
self.ncolour = normalcolour
self.font = font
self.font = py.font.Font(font, int(size))
self.centred = centred
self.size = bsize
self.set_rect()
def update(self):
self.set_rend()
py.draw.rect(v.screen, self.get_color(), self.rect)
v.screen.blit(self.rend, self.rect)
if self.rect.collidepoint(py.mouse.get_pos()):
self.hovered = True
else:
self.hovered = False
def set_rend(self):
self.rend = self.font.render(self.text, True, (0,0,0))
def get_color(self):
if self.hovered:
return self.hcolour
else:
return self.ncolour
def set_rect(self):
self.set_rend()
self.rect = self.rend.get_rect()
if not self.centred:
self.rect.topleft = self.pos
if self.centred:
self.rect.center = self.pos
if not self.size[0] == 0:
self.rect.width = self.size[0]
if not self.size[1] == 0:
self.rect.height = self.size[1]
def pressed(self):
mouse = py.mouse.get_pos()
if mouse[0] > self.rect.topleft[0]:
if mouse[1] > self.rect.topleft[1]:
if mouse[0] < self.rect.bottomright[0]:
if mouse[1] < self.rect.bottomright[1]:
return True
else: return False
else: return False
else: return False
else: return False
import os, shutil
theFont = None
py.init()
v.screen = py.display.set_mode((640, 480))
v.screen.fill((20, 20, 20))
textLabel("Checking For Updates...", (320, 240), (255, 255, 255), theFont, 50, False, True).update()
py.display.flip()
tries = 0
def reporthook(count, blockSize, totalSize):
if totalSize == -1:
print("FAILED TOTALSIZE")
raise Exception()
#Shows percentage of download
py.event.pump()
for event in py.event.get():
if event.type == py.QUIT:
sys.exit()
percent = int(count*blockSize*100/totalSize)
rect = py.Rect(100, 240, percent*4.4, 30)
v.screen.fill((20, 20, 20))
py.draw.rect(v.screen, (255, 0, 0), rect)
py.draw.rect(v.screen, (0, 0, 0), rect, 2)
py.draw.rect(v.screen, (0, 0, 0), (100, 240, 440, 30), 2)
textLabel("Downloading...", (320, 150), (255, 255, 255), theFont, 50, False, True).update()
textLabel(str(percent) + "%", (320, 255), (255, 255, 255), theFont, 20, False, True).update()
py.display.flip()
def recursive_overwrite(src, dest, ignore=None):
if os.path.isdir(src):
if not os.path.isdir(dest):
os.makedirs(dest)
files = os.listdir(src)
if ignore is not None:
ignored = ignore(src, files)
else:
ignored = set()
for f in files:
if f not in ignored:
recursive_overwrite(os.path.join(src, f),
os.path.join(dest, f),
ignore)
else:
shutil.copyfile(src, dest)
def updateCheck():
global latest
page = urllib.request.urlopen('https://api.github.com/repos/lightopa/aiopa-battles/git/refs/heads/master')
#print(page.read().decode("utf-8"))
#data = json.loads(page.read().decode("utf-8"))
data = ast.literal_eval(page.read().decode("utf-8"))
latest = data["object"]["sha"]
#ind = page.find('class="sha btn btn-outline"')
#latest = page[ind + 38:ind + 45]
#print(latest)
#CHECK IF LATEST IS PROPER
try:
f = open("Update/current.version", "rb")
current = pickle.load(f)
f.close()
except:
print("create new file")
try:
os.mkdir("Update")
except:
pass
f = open("Update/current.version", "wb")
current = 0000
pickle.dump(current, f)
f.close()
print(current, "vs", latest)
if current != latest:
from os import remove
try:
remove("Update/download.zip")
except:
pass
print("downloading latest")
buttons = py.sprite.Group()
buttons.add(Button("Update", (220, 240), 60, (100, 100, 100), (255, 255, 255), theFont, "Y", centred=True))
buttons.add(Button("Ignore", (420, 240), 60, (100, 100, 100), (255, 255, 255), theFont, "N", centred=True))
buttons.add(Button("Skip Update", (320, 300), 40, (100, 100, 100), (255, 255, 255), theFont, "S", centred=True))
labels = py.sprite.Group()
labels.add(textLabel("An Update Is Available:", (320, 150), (255, 255, 255), theFont, 50, False, True))
labels.add(textLabel(str(str(current) + " ==> " + str(latest)), (320, 180), (255, 255, 255), theFont, 20, False, True))
while True:
py.event.pump()
v.screen.fill((20, 20, 20))
buttons.update()
labels.update()
for event in py.event.get():
if event.type == py.QUIT:
sys.exit()
elif event.type == py.MOUSEBUTTONDOWN:
for button in buttons:
if button.pressed():
id = button.ID
if id == "Y":
global tries
tries = 0
download()
return
if id == "N":
return
if id == "S":
f = open("Saves/current.version", "wb")
current = latest
pickle.dump(current, f)
f.close()
return
py.display.flip()
else:
v.screen.fill((20, 20, 20))
t = textLabel("No Update!", (320, 250), (255, 0, 0), theFont, 70, False, True)
v.current = current
t.update()
py.display.update()
if __name__ == "__main__":
py.time.wait(2000)
def download():
global tries
try:
try:
os.mkdir("Update")
except:
pass
urllib.request.urlretrieve("https://github.com/lightopa/Aiopa-Battles/archive/master.zip", "Update/download.zip", reporthook)
f = open("Update/current.version", "wb")
current = latest
pickle.dump(current, f)
f.close()
unzip()
except Exception as e:
tries += 1
print("Error: " + str(e))
v.screen.fill((20, 20, 20))
textLabel("Download Error. Retry " + str(tries) + "/8", (320, 240), (255, 255, 255), theFont, 50, False, True).update()
textLabel("Error: " + str(e), (320, 240), (255, 255, 255), theFont, 50, False, True).update()
py.display.flip()
if tries > 8:
return
download()
def unzip():
v.screen.fill((20, 20, 20))
textLabel("Extracting Data...", (320, 240), (255, 255, 255), theFont, 50, False, True).update()
py.display.flip()
import zipfile
with zipfile.ZipFile('Update/download.zip', "r") as z:
<|fim▁hole|> v.screen.fill((20, 20, 20))
textLabel("Updating Files...", (320, 240), (255, 255, 255), theFont, 50, False, True).update()
py.display.flip()
from os import getcwd
recursive_overwrite("Update/Aiopa-Battles-master", getcwd())
if __name__ == "__main__":
updateCheck()<|fim▁end|>
|
z.extractall("Update/")
|
<|file_name|>fudge.py<|end_file_name|><|fim▁begin|># -*- mode: python; indent-tabs-mode: nil; tab-width: 3 -*-
# vim: set tabstop=3 shiftwidth=3 expandtab:
#
# Copyright (C) 2001-2005 Ichiro Fujinaga, Michael Droettboom,
# and Karl MacMillan
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# TODO: These are fixed values. We need an intelligent way to vary them.
# This whole approach to fuzziness is syntactically convenient, but maybe
# not very efficient.
FUDGE_AMOUNT = 3
FUDGE_AMOUNT_2 = 6
from gamera.core import Rect, Point, Dim
# This is a factory function that looks like a constructor
def Fudge(o, amount=FUDGE_AMOUNT):
# For rectangles, just return a new rectangle that is slightly larger
if isinstance(o, Rect):
return Rect(Point(int(o.ul_x - amount), int(o.ul_y - amount)), Dim(int(o.ncols + amount * 2), int(o.nrows + amount * 2)))
# For integers, return one of our "fudge number proxies"
elif isinstance(o, int):
return FudgeInt(o, amount)
elif isinstance(o, float):
return FudgeFloat(o, amount)
F = Fudge
class FudgeNumber(object):
def __lt__(self, other):
return self.below < other
def __le__(self, other):
return self.below <= other
def __eq__(self, other):
return self.below <= other and self.above >= other
def __ne__(self, other):
return other < self.below and other > self.above
def __gt__(self, other):
return self.above > other
def __ge__(self, other):
return self.above >= other
class FudgeInt(FudgeNumber, int):
def __init__(self, value, amount=FUDGE_AMOUNT):
int.__init__(self, value)
self.below = int(value - amount)
self.above = int(value + amount)
class FudgeFloat(FudgeNumber, float):
def __init__(self, value, amount=FUDGE_AMOUNT):<|fim▁hole|> self.below = float(value - amount)
self.above = float(value + amount)<|fim▁end|>
|
int.__init__(self, value)
|
<|file_name|>filepath-utils.ts<|end_file_name|><|fim▁begin|>import { lstatSync } from 'fs';
import { sep } from 'path';
const reNormalize = sep !== '/' ? new RegExp(sep.replace(/\\/g, '\\\\'), 'g') : null;
import logUtils from '../utils/log-utils';
const log = logUtils.log;
const normalizePath = function(path: string): string {
if (reNormalize) {
path = path.replace(reNormalize, '/');
}
return path;
}
const createFileTestFunc = function(absolutePaths: string[], debugStr?: string): (path: string) => boolean {
debugStr = debugStr || '';
const reTest = absolutePaths.map(function(absolutePath) {
return new RegExp('^' + absolutePath.replace(/\./g, '\\.') + '$');
});
return function(path: string): boolean {
path = normalizePath(path);
for (var i = 0, size = reTest.length; i < size; ++i) {
var re = reTest[i];
if (re.test(path)) {
log('\ttest'+debugStr+': ', path); //DEBUG
return true;
<|fim▁hole|> };
}
const isDirectory = function(path: string): boolean {
return lstatSync(path).isDirectory();
}
export = {
normalizePath: normalizePath,
createFileTestFunc: createFileTestFunc,
isDirectory: isDirectory
};<|fim▁end|>
|
};
}
return false;
|
<|file_name|>slugify.go<|end_file_name|><|fim▁begin|>package slugify
import (
"bytes"
"strings"
"unicode/utf8"
)
var (
defaultSlugger = New(Configuration{})
)
// Slugify creates the slug for a given value
func Slugify(value string) string {
return defaultSlugger.Slugify(value)
}
func validCharacter(c rune) bool {
if c >= 'a' && c <= 'z' {
return true
}
if c >= '0' && c <= '9' {
return true
}
return false
}
// Slugifier based on settings
type Slugifier struct {
isValidCharacter func(c rune) bool
replaceCharacter rune
replacementMap map[rune]string
}
// Slugify creates a slug for a string
func (s Slugifier) Slugify(value string) string {
value = strings.ToLower(value)
var buffer bytes.Buffer
lastCharacterWasInvalid := false
for len(value) > 0 {
c, size := utf8.DecodeRuneInString(value)
value = value[size:]
if newCharacter, ok := s.replacementMap[c]; ok {
buffer.WriteString(newCharacter)
lastCharacterWasInvalid = false
continue
}
if s.isValidCharacter(c) {
buffer.WriteRune(c)
lastCharacterWasInvalid = false
} else if lastCharacterWasInvalid == false {
buffer.WriteRune(s.replaceCharacter)
lastCharacterWasInvalid = true
}
}
return strings.Trim(buffer.String(), string(s.replaceCharacter))
}
// Configuration is the basic configuration for Slugifier
type Configuration struct {
IsValidCharacterChecker func(rune) bool
ReplaceCharacter rune
ReplacementMap map[rune]string
}
// New initialize a new slugifier
func New(config Configuration) *Slugifier {
if config.IsValidCharacterChecker == nil {
config.IsValidCharacterChecker = validCharacter
}
if config.ReplaceCharacter == 0 {
config.ReplaceCharacter = '-'
}
if config.ReplacementMap == nil {
config.ReplacementMap = map[rune]string{
'&': "and",
'@': "at",
'©': "c",
'®': "r",
'Æ': "ae",
'ß': "ss",
'à': "a",
'á': "a",
'â': "a",
'ä': "ae",
'å': "a",
'æ': "ae",
'ç': "c",
'è': "e",
'é': "e",
'ê': "e",
'ë': "e",
'ì': "i",
'í': "i",
'î': "i",
'ï': "i",
'ò': "o",
'ó': "o",
'ô': "o",
'õ': "o",
'ö': "oe",
'ø': "o",
'ù': "u",
'ú': "u",
'û': "u",
'ü': "ue",
'ý': "y",
'þ': "p",
'ÿ': "y",
'ā': "a",
'ă': "a",
'Ą': "a",
'ą': "a",
'ć': "c",
'ĉ': "c",
'ċ': "c",
'č': "c",
'ď': "d",
'đ': "d",
'ē': "e",
'ĕ': "e",
'ė': "e",
'ę': "e",
'ě': "e",
'ĝ': "g",
'ğ': "g",
'ġ': "g",
'ģ': "g",
'ĥ': "h",
'ħ': "h",
'ĩ': "i",
'ī': "i",
'ĭ': "i",
'į': "i",
'ı': "i",
'ij': "ij",
'ĵ': "j",
'ķ': "k",<|fim▁hole|> 'ĺ': "l",
'ļ': "l",
'ľ': "l",
'ŀ': "l",
'ł': "l",
'ń': "n",
'ņ': "n",
'ň': "n",
'ʼn': "n",
'ŋ': "n",
'ō': "o",
'ŏ': "o",
'ő': "o",
'Œ': "oe",
'œ': "oe",
'ŕ': "r",
'ŗ': "r",
'ř': "r",
'ś': "s",
'ŝ': "s",
'ş': "s",
'š': "s",
'ţ': "t",
'ť': "t",
'ŧ': "t",
'ũ': "u",
'ū': "u",
'ŭ': "u",
'ů': "u",
'ű': "u",
'ų': "u",
'ŵ': "w",
'ŷ': "y",
'ź': "z",
'ż': "z",
'ž': "z",
'ſ': "z",
'Ə': "e",
'ƒ': "f",
'Ơ': "o",
'ơ': "o",
'Ư': "u",
'ư': "u",
'ǎ': "a",
'ǐ': "i",
'ǒ': "o",
'ǔ': "u",
'ǖ': "u",
'ǘ': "u",
'ǚ': "u",
'ǜ': "u",
'ǻ': "a",
'Ǽ': "ae",
'ǽ': "ae",
'Ǿ': "o",
'ǿ': "o",
'ə': "e",
'Є': "e",
'Б': "b",
'Г': "g",
'Д': "d",
'Ж': "zh",
'З': "z",
'У': "u",
'Ф': "f",
'Х': "h",
'Ц': "c",
'Ч': "ch",
'Ш': "sh",
'Щ': "sch",
'Ъ': "-",
'Ы': "y",
'Ь': "-",
'Э': "je",
'Ю': "ju",
'Я': "ja",
'а': "a",
'б': "b",
'в': "v",
'г': "g",
'д': "d",
'е': "e",
'ж': "zh",
'з': "z",
'и': "i",
'й': "j",
'к': "k",
'л': "l",
'м': "m",
'н': "n",
'о': "o",
'п': "p",
'р': "r",
'с': "s",
'т': "t",
'у': "u",
'ф': "f",
'х': "h",
'ц': "c",
'ч': "ch",
'ш': "sh",
'щ': "sch",
'ъ': "-",
'ы': "y",
'ь': "-",
'э': "je",
'ю': "ju",
'я': "ja",
'ё': "jo",
'є': "e",
'і': "i",
'ї': "i",
'Ґ': "g",
'ґ': "g",
'א': "a",
'ב': "b",
'ג': "g",
'ד': "d",
'ה': "h",
'ו': "v",
'ז': "z",
'ח': "h",
'ט': "t",
'י': "i",
'ך': "k",
'כ': "k",
'ל': "l",
'ם': "m",
'מ': "m",
'ן': "n",
'נ': "n",
'ס': "s",
'ע': "e",
'ף': "p",
'פ': "p",
'ץ': "C",
'צ': "c",
'ק': "q",
'ר': "r",
'ש': "w",
'ת': "t",
'™': "tm",
'ả': "a",
'ã': "a",
'ạ': "a",
'ắ': "a",
'ằ': "a",
'ẳ': "a",
'ẵ': "a",
'ặ': "a",
'ấ': "a",
'ầ': "a",
'ẩ': "a",
'ẫ': "a",
'ậ': "a",
'ẻ': "e",
'ẽ': "e",
'ẹ': "e",
'ế': "e",
'ề': "e",
'ể': "e",
'ễ': "e",
'ệ': "e",
'ỉ': "i",
'ị': "i",
'ỏ': "o",
'ọ': "o",
'ố': "o",
'ồ': "o",
'ổ': "o",
'ỗ': "o",
'ộ': "o",
'ớ': "o",
'ờ': "o",
'ở': "o",
'ỡ': "o",
'ợ': "o",
'ủ': "u",
'ụ': "u",
'ứ': "u",
'ừ': "u",
'ử': "u",
'ữ': "u",
'ự': "u",
'ỳ': "y",
'ỷ': "y",
'ỹ': "y",
'ỵ': "y",
}
}
return &Slugifier{
isValidCharacter: config.IsValidCharacterChecker,
replaceCharacter: config.ReplaceCharacter,
replacementMap: config.ReplacementMap,
}
}<|fim▁end|>
|
'ĸ': "k",
'Ĺ': "l",
|
<|file_name|>conf.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# swift documentation build configuration file, created by
# sphinx-quickstart on Mon Oct 3 17:01:55 2016.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#<|fim▁hole|># sys.path.insert(0, os.path.abspath('.'))
import datetime
from swift import __version__
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'oslosphinx',
'reno.sphinxext',
]
# Add any paths that contain templates here, relative to this directory.
# templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Swift Release Notes'
copyright = u'%d, OpenStack Foundation' % datetime.datetime.now().year
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = __version__.rsplit('.', 1)[0]
# The full version, including alpha/beta/rc tags.
release = __version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#
# today = ''
#
# Else, today_fmt is used as the format for a strftime call.
#
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
# todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents.
# "<project> v<release> documentation" by default.
#
# html_title = u'swift v2.10.0'
# A shorter title for the navigation bar. Default is the same as html_title.
#
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#
# html_logo = None
# The name of an image file (relative to this directory) to use as a favicon of
# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
# html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#
# html_extra_path = []
# If not None, a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
# The empty string is equivalent to '%b %d, %Y'.
#
# html_last_updated_fmt = None
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#
# html_additional_pages = {}
# If false, no module index is generated.
#
# html_domain_indices = True
# If false, no index is generated.
#
# html_use_index = True
# If true, the index is split into individual pages for each letter.
#
# html_split_index = False
# If true, links to the reST sources are added to the pages.
#
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr', 'zh'
#
# html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# 'ja' uses this config value.
# 'zh' user can custom change `jieba` dictionary path.
#
# html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#
# html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'SwiftReleaseNotesdoc'
# -- Options for LaTeX output ---------------------------------------------
# latex_elements = {
# # The paper size ('letterpaper' or 'a4paper').
# #
# # 'papersize': 'letterpaper',
# # The font size ('10pt', '11pt' or '12pt').
# #
# # 'pointsize': '10pt',
# # Additional stuff for the LaTeX preamble.
# #
# # 'preamble': '',
# # Latex figure (float) alignment
# #
# # 'figure_align': 'htbp',
# }
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
# latex_documents = [
# (master_doc, 'swift.tex', u'swift Documentation',
# u'swift', 'manual'),
# ]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#
# latex_use_parts = False
# If true, show page references after internal links.
#
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
#
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
#
# latex_appendices = []
# It false, will not define \strong, \code, itleref, \crossref ... but only
# \sphinxstrong, ..., \sphinxtitleref, ... To help avoid clash with user added
# packages.
#
# latex_keep_old_macro_names = True
# If false, no module index is generated.
#
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
# man_pages = [
# (master_doc, 'swift', u'swift Documentation',
# [author], 1)
# ]
# If true, show URL addresses after external links.
#
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
# texinfo_documents = [
# (master_doc, 'swift', u'swift Documentation',
# author, 'swift', 'One line description of project.',
# 'Miscellaneous'),
# ]
# Documents to append as an appendix to all manuals.
#
# texinfo_appendices = []
# If false, no module index is generated.
#
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#
# texinfo_no_detailmenu = False
locale_dirs = ['locale/']<|fim▁end|>
|
# import os
# import sys
|
<|file_name|>MultiChangeHistoryValues.java<|end_file_name|><|fim▁begin|>package org.scada_lts.dao.model.multichangehistory;
import java.util.Objects;
/**
* @author [email protected] on 16.10.2019
*/
public class MultiChangeHistoryValues {
private int id;
private int userId;
private String userName;
private String viewAndCmpIdentyfication;
private String interpretedState;
private long timeStamp;
private int valueId;
private String value;
private int dataPointId;
private String xidPoint;
public MultiChangeHistoryValues() {}
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public int getUserId() {
return userId;
}
public void setUserId(int userId) {
this.userId = userId;
}
public String getUserName() {
return userName;
}
public void setUserName(String userName) {
this.userName = userName;
}
public String getViewAndCmpIdentyfication() {
return viewAndCmpIdentyfication;
}
public void setViewAndCmpIdentyfication(String viewAndCmpIdentyfication) {
this.viewAndCmpIdentyfication = viewAndCmpIdentyfication;
}
public String getInterpretedState() {
return interpretedState;
}
public void setInterpretedState(String interpretedState) {
this.interpretedState = interpretedState;
}
public long getTimeStamp() {
return timeStamp;
}
public void setTimeStamp(long timeStamp) {
this.timeStamp = timeStamp;
}
public int getValueId() {
return valueId;
}
public void setValueId(int valueId) {
this.valueId = valueId;
}
public String getValue() {
return value;
}
public void setValue(String value) {
this.value = value;
}
public int getDataPointId() {
return dataPointId;
}
public void setDataPointId(int dataPointId) {
this.dataPointId = dataPointId;
}
public String getXidPoint() {
return xidPoint;
}
public void setXidPoint(String xidPoint) {
this.xidPoint = xidPoint;
}<|fim▁hole|> if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
MultiChangeHistoryValues that = (MultiChangeHistoryValues) o;
return id == that.id &&
userId == that.userId &&
valueId == that.valueId &&
dataPointId == that.dataPointId &&
Objects.equals(userName, that.userName) &&
Objects.equals(viewAndCmpIdentyfication, that.viewAndCmpIdentyfication) &&
Objects.equals(interpretedState, that.interpretedState) &&
Objects.equals(timeStamp, that.timeStamp) &&
Objects.equals(value, that.value) &&
Objects.equals(xidPoint, that.xidPoint);
}
@Override
public int hashCode() {
return Objects.hash(id, userId, userName, viewAndCmpIdentyfication, interpretedState, timeStamp, valueId, value, dataPointId, xidPoint);
}
@Override
public String toString() {
return "MultiChangeHistoryValues{" +
"id=" + id +
", userId=" + userId +
", userName='" + userName + '\'' +
", viewAndCmpIdentyfication='" + viewAndCmpIdentyfication + '\'' +
", interpretedState='" + interpretedState + '\'' +
", ts=" + timeStamp +
", valueId=" + valueId +
", value='" + value + '\'' +
", dataPointId=" + dataPointId +
", xidPoint='" + xidPoint + '\'' +
'}';
}
}<|fim▁end|>
|
@Override
public boolean equals(Object o) {
|
<|file_name|>akamai_test.go<|end_file_name|><|fim▁begin|>// Package akamai provideds a gramework.Behind implementation
// developed for Gramework.
// This is not an official Akamai-supported implementation.
// If you having any issues with this package, please
// consider to contact Gramework support first.
// Akamai doesn't provide any official support nor guaranties
// about this package.
//
// Akamai is a trademark of Akamai Technologies, Inc.
//
// Copyright 2017-present Kirill Danshin and Gramework contributors
// Copyright 2019-present Highload LTD (UK CN: 11893420)
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
package akamai
import (
"net"
"testing"
)
<|fim▁hole|>// Please, download a fresh CIDRs you need to whitelist
// in your Luna Control Panel.
const csvData = `Service Name,CIDR Block,Port,Activation Date,CIDR Status
"Log Delivery","120.33.22.0/24","21","Tue Dec 18 2021 02:00:00 GMT+0200 (Москва, стандартное время)","current"
"Log Delivery","120.33.21.0/24","80,443","Tue Dec 18 2107 02:00:00 GMT+0200 (Москва, стандартное время)","current"
"Log Delivery","120.33.23.0/24","80-8080","Tue Dec 18 1507 02:00:00 GMT+0200 (Москва, стандартное время)","current"
"Log Delivery","120.33.24.0/24","980-3300","Tue Dec 18 5507 02:00:00 GMT+0200 (Москва, стандартное время)","current"
"Log Delivery","120.17.33.0/24","21","Tue Dec 18 6507 02:00:00 GMT+0200 (Москва, стандартное время)","current"
`
func TestParseCIDRBlocksCSV(t *testing.T) {
cidrs, err := ParseCIDRBlocksCSV([]byte(csvData), true, true)
if err != nil {
t.Error(err)
}
type tcase struct {
cidr *net.IPNet
expected bool
}
cases := []tcase{
{
expected: true,
cidr: parseCIDR("120.33.21.0/24"),
},
{
expected: true,
cidr: parseCIDR("120.33.23.0/24"),
},
{
expected: false,
cidr: parseCIDR("120.33.22.0/24"),
},
{
expected: false,
cidr: parseCIDR("120.33.24.0/24"),
},
{
expected: false,
cidr: parseCIDR("120.17.33.0/24"),
},
}
for _, testcase := range cases {
found := false
for _, cidr := range cidrs {
if cidr.String() == testcase.cidr.String() {
found = true
break
}
}
if found != testcase.expected {
t.Errorf("unexpected result: CIDR %q expected=%v", testcase.cidr.String(), testcase.expected)
return
}
}
}
func parseCIDR(raw string) *net.IPNet {
_, cidr, _ := net.ParseCIDR(raw)
return cidr
}<|fim▁end|>
|
// Note: those IP CIDRs are fake.
|
<|file_name|>clientdetails.component.ts<|end_file_name|><|fim▁begin|>import 'rxjs/add/operator/switchMap';
import {Component, ElementRef, OnInit, AfterViewInit} from '@angular/core';
import { Location } from '@angular/common';
import { LocalDataSource } from 'ng2-smart-table';
import { ActivatedRoute, Params } from '@angular/router';
import { ClientService } from '../../../shared/services/client.service';
import { PartService } from '../../../shared/services/part.service';
import { OrderService } from '../../../shared/services/order.service';
import { MaterialService } from '../../../shared/services/material.service';
import { ChartistJsService } from './chartistJs.service';
import { ClientDetailRender } from '../../../shared/render/client-detail-render.component';
import 'style-loader!./chartistJs.scss';
import 'style-loader!../smartTables.scss';
import * as GoogleMapsLoader from 'google-maps';
import * as Chartist from 'chartist';
@Component({
selector: 'client-details',
styleUrls: ['./googleMaps.scss'],
templateUrl: './clientdetails.html',
})
export class ClientDetails implements OnInit, AfterViewInit {
inputClientCode:string;
inputClientName:string;
inputCountryCode:string;
inputEmail:string;
inputWebsite:string;
inputPostalAddress1:string;
inputDeliveryAddress1:string;
inputPostalAddress2:string;
inputDeliveryAddress2:string;
inputPostalCity:string;
inputDeliveryCity:string;
inputPostalState:string;
inputDeliveryState:string;
inputPostalPostcode:string;
inputDeliveryPostcode:string;
settings1 = {
add: {
addButtonContent: '<i class="ion-ios-plus-outline"></i>',
createButtonContent: '<i class="ion-checkmark"></i>',
cancelButtonContent: '<i class="ion-close"></i>',
},
edit: {
editButtonContent: '<i class="ion-edit"></i>',
saveButtonContent: '<i class="ion-checkmark"></i>',
cancelButtonContent: '<i class="ion-close"></i>',
},
delete: {
deleteButtonContent: '<i class="hidden"></i>',
confirmDelete: true,
},
columns: {
code: {
title: 'Code',
type: 'text',
},
cust_part_no: {
title: 'Part Number',
type: 'text',
},
drawing_no: {
title: 'Drawing Number',
type: 'text',
},
part_description: {
title: 'Description',
type: 'text',
},
unit: {
title: 'Unit',
type: 'text',
}
}
};
settings2 = {
add: {
addButtonContent: '<i class="ion-ios-plus-outline"></i>',
createButtonContent: '<i class="ion-checkmark"></i>',
cancelButtonContent: '<i class="ion-close"></i>',
},
edit: {
editButtonContent: '<i class="ion-edit"></i>',
saveButtonContent: '<i class="ion-checkmark"></i>',
cancelButtonContent: '<i class="ion-close"></i>',
},
delete: {
deleteButtonContent: '<i class="hidden"></i>',
confirmDelete: true
},
columns: {
order_code: {
title: 'Order Code',
type: 'text',
},
customer_name: {
title: 'Customer Name',
type: 'text'
},
customer: {
title: 'Customer Code',
type: 'text',
},
contact_name: {
title: 'Contact Name',
type: 'text',
},
sum_one: {
title: 'Price',
type: 'text',
renderComponents: ClientDetailRender,
},
}
};
settings3 = {
add: {
addButtonContent: '<i class="ion-ios-plus-outline"></i>',
createButtonContent: '<i class="ion-checkmark"></i>',
cancelButtonContent: '<i class="ion-close"></i>',
},
edit: {
editButtonContent: '<i class="ion-edit"></i>',
saveButtonContent: '<i class="ion-checkmark"></i>',
cancelButtonContent: '<i class="ion-close"></i>',
},
delete: {
deleteButtonContent: '<i class="hidden"></i>',
confirmDelete: true
},
columns: {
client_code: {
title: 'Client Code',
type: 'text',
},
client_name: {
title: 'Client Name',
type: 'text',
},
e_mail: {
title: 'Email',
type: 'text',
},
state: {
title: 'State',
type: 'text',
}
}
};
metricsTableData = [
{
image: 'app/browsers/chrome.svg',
browser: 'Google Chrome',
visits: '10,392',
isVisitsUp: true,
purchases: '4,214',
isPurchasesUp: true,
percent: '45%',
isPercentUp: true
},
{
image: 'app/browsers/firefox.svg',
browser: 'Mozilla Firefox',
visits: '7,873',
isVisitsUp: true,
purchases: '3,031',
isPurchasesUp: false,
percent: '28%',
isPercentUp: true
},
{
image: 'app/browsers/ie.svg',
browser: 'Internet Explorer',
visits: '5,890',
isVisitsUp: false,
purchases: '2,102',
isPurchasesUp: false,
percent: '17%',
isPercentUp: false
},
{
image: 'app/browsers/safari.svg',
browser: 'Safari',
visits: '4,001',
isVisitsUp: false,
purchases: '1,001',
isPurchasesUp: false,
percent: '14%',
isPercentUp: true
},
{
image: 'app/browsers/opera.svg',
browser: 'Opera',
visits: '1,833',
isVisitsUp: true,
purchases: '83',
isPurchasesUp: true,
percent: '5%',
isPercentUp: false
}
];
source1: LocalDataSource = new LocalDataSource();
source2: LocalDataSource = new LocalDataSource();
source3: LocalDataSource = new LocalDataSource();
constructor(
private route: ActivatedRoute,
private location: Location,
private order_service: OrderService,
private material_service: MaterialService,
private part_service: PartService,
private client_service: ClientService,
private _elementRef:ElementRef,
private _chartistJsService:ChartistJsService) {
}
ngOnInit() {
var id:number;
this.route.params
.switchMap((params: Params) => this.client_service.getClientInvoices(params['id']))
.subscribe(res => {
var items = res.json()["items"];
var year1 = [];
var year2 = [];
var year3 = [];
var year4 = [];
var labels = ["Jul", "Aug", "Sep", "Oct", "Nov", "Dec", "Jan", "Feb", "Mar", "Apr", "May", "Jun"];
for(var _i = 0; _i < items.length; _i++) {
let item = items[_i];
if (item["y"] == 1) {
year1.push(item["total"])
} else if (item["y"] == 2) {
year2.push(item["total"])
} else if (item["y"] == 3) {
year3.push(item["total"])
} else {
year4.push(item["total"])
}
}
var allYear = [year1, year2, year3, year4];
new Chartist.Line('.ct-chart', {
labels: labels,
series: allYear
});
});
this.route.params.subscribe(params => {
id = params['id'];
});
this.part_service.getPartByClientId(id).subscribe(res=> {
this.source1.load(res.json()["items"]);
})
this.order_service.getOrderByClientId(id).subscribe(res=> {
this.source2.load(res.json()["items"]);
})
this.material_service.getMaterialByClientId(id).subscribe(res=> {
this.source3.load(res.json()["items"]);
})
//this.data = this._chartistJsService.getAll();
}
ngAfterViewInit() {
//part list render
document.getElementsByClassName('code')['0'].style.width = '100px';
document.getElementsByClassName('cust_part_no')['0'].style.width = '100px';
document.getElementsByClassName('drawing_no')['0'].style.width = '100px';
document.getElementsByClassName('unit')['0'].style.width = '100px';
// //order list render
document.getElementsByClassName('order_code')['0'].style.width = '100px';
document.getElementsByClassName('customer')['0'].style.width = '100px';
document.getElementsByClassName('contact_name')['0'].style.width = '150px';
document.getElementsByClassName('sum_one')['0'].style.width = '100px';
// //material list render
// document.getElementsByClassName('client_code')['0'].style.width = '100px';
// document.getElementsByClassName('e_mail')['0'].style.width = '180px';
// document.getElementsByClassName('state')['0'].style.width = '50px';
let el = this._elementRef.nativeElement.querySelector('.google-maps');
this.route.params
.switchMap((params: Params) => this.client_service.getClientDetails(params['id']))
.subscribe(res => {
this.inputClientCode = res.json()["items"][0]["client_code"];
this.inputClientName = res.json()["items"][0]["client_name"];
this.inputCountryCode = res.json()["items"][0]["country_code"];
this.inputEmail = res.json()["items"][0]["e_mail"];
this.inputWebsite = res.json()["items"][0]["web_site"];
this.inputPostalAddress1 = res.json()["items"][0]["address1"];
this.inputDeliveryAddress1 = res.json()["items"][0]["postal_address1"];
this.inputPostalAddress2 = res.json()["items"][0]["address2"];
this.inputDeliveryAddress2 = res.json()["items"][0]["postal_address2"];
this.inputPostalCity = res.json()["items"][0]["city"];
this.inputDeliveryCity = res.json()["items"][0]["postal_city"];
this.inputPostalState = res.json()["items"][0]["state"];
this.inputDeliveryState = res.json()["items"][0]["state"];
this.inputPostalPostcode = res.json()["items"][0]["postcode"];<|fim▁hole|>
// Load Google Map
loadGoogleMap(el, address);
});
}
function loadGoogleMap(el, address) {
GoogleMapsLoader.load((google) => {
var map = new google.maps.Map(el, {
center: new google.maps.LatLng(-26.9968449, 153.3178702),
zoom: 16,
mapTypeId: google.maps.MapTypeId.ROADMAP
});
var geocoder = new google.maps.Geocoder();
geocodeAddress(geocoder, map, address);
});
}
function geocodeAddress(geocoder, resultsMap, address) {
geocoder.geocode({'address': address}, function(results, status) {
if (status === 'OK') {
resultsMap.setCenter(results[0].geometry.location);
var marker = new google.maps.Marker({
map: resultsMap,
position: results[0].geometry.location
});
} else {
alert('Geocode was not successful for the following reason: ' + status);
}
});
}
}<|fim▁end|>
|
this.inputDeliveryPostcode = res.json()["items"][0]["postal_postcode"];
// Address
var address = res.json()["items"][0]["address1"] + "," + res.json()["items"][0]["address2"] + "," + res.json()["items"][0]["city"] + "," + res.json()["items"][0]["state"];
|
<|file_name|>IfcSubContractResourceTypeEnum.java<|end_file_name|><|fim▁begin|><|fim▁hole|> * Copyright (C) 2009-2014 BIMserver.org
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package cn.dlb.bim.models.ifc4;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import org.eclipse.emf.common.util.Enumerator;
/**
* <!-- begin-user-doc -->
* A representation of the literals of the enumeration '<em><b>Ifc Sub Contract Resource Type Enum</b></em>',
* and utility methods for working with them.
* <!-- end-user-doc -->
* @see cn.dlb.bim.models.ifc4.Ifc4Package#getIfcSubContractResourceTypeEnum()
* @model
* @generated
*/
public enum IfcSubContractResourceTypeEnum implements Enumerator {
/**
* The '<em><b>NULL</b></em>' literal object.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #NULL_VALUE
* @generated
* @ordered
*/
NULL(0, "NULL", "NULL"),
/**
* The '<em><b>NOTDEFINED</b></em>' literal object.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #NOTDEFINED_VALUE
* @generated
* @ordered
*/
NOTDEFINED(1, "NOTDEFINED", "NOTDEFINED"),
/**
* The '<em><b>WORK</b></em>' literal object.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #WORK_VALUE
* @generated
* @ordered
*/
WORK(2, "WORK", "WORK"),
/**
* The '<em><b>USERDEFINED</b></em>' literal object.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #USERDEFINED_VALUE
* @generated
* @ordered
*/
USERDEFINED(3, "USERDEFINED", "USERDEFINED"),
/**
* The '<em><b>PURCHASE</b></em>' literal object.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #PURCHASE_VALUE
* @generated
* @ordered
*/
PURCHASE(4, "PURCHASE", "PURCHASE");
/**
* The '<em><b>NULL</b></em>' literal value.
* <!-- begin-user-doc -->
* <p>
* If the meaning of '<em><b>NULL</b></em>' literal object isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @see #NULL
* @model
* @generated
* @ordered
*/
public static final int NULL_VALUE = 0;
/**
* The '<em><b>NOTDEFINED</b></em>' literal value.
* <!-- begin-user-doc -->
* <p>
* If the meaning of '<em><b>NOTDEFINED</b></em>' literal object isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @see #NOTDEFINED
* @model
* @generated
* @ordered
*/
public static final int NOTDEFINED_VALUE = 1;
/**
* The '<em><b>WORK</b></em>' literal value.
* <!-- begin-user-doc -->
* <p>
* If the meaning of '<em><b>WORK</b></em>' literal object isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @see #WORK
* @model
* @generated
* @ordered
*/
public static final int WORK_VALUE = 2;
/**
* The '<em><b>USERDEFINED</b></em>' literal value.
* <!-- begin-user-doc -->
* <p>
* If the meaning of '<em><b>USERDEFINED</b></em>' literal object isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @see #USERDEFINED
* @model
* @generated
* @ordered
*/
public static final int USERDEFINED_VALUE = 3;
/**
* The '<em><b>PURCHASE</b></em>' literal value.
* <!-- begin-user-doc -->
* <p>
* If the meaning of '<em><b>PURCHASE</b></em>' literal object isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @see #PURCHASE
* @model
* @generated
* @ordered
*/
public static final int PURCHASE_VALUE = 4;
/**
* An array of all the '<em><b>Ifc Sub Contract Resource Type Enum</b></em>' enumerators.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private static final IfcSubContractResourceTypeEnum[] VALUES_ARRAY = new IfcSubContractResourceTypeEnum[] { NULL, NOTDEFINED, WORK, USERDEFINED, PURCHASE, };
/**
* A public read-only list of all the '<em><b>Ifc Sub Contract Resource Type Enum</b></em>' enumerators.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public static final List<IfcSubContractResourceTypeEnum> VALUES = Collections.unmodifiableList(Arrays.asList(VALUES_ARRAY));
/**
* Returns the '<em><b>Ifc Sub Contract Resource Type Enum</b></em>' literal with the specified literal value.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param literal the literal.
* @return the matching enumerator or <code>null</code>.
* @generated
*/
public static IfcSubContractResourceTypeEnum get(String literal) {
for (int i = 0; i < VALUES_ARRAY.length; ++i) {
IfcSubContractResourceTypeEnum result = VALUES_ARRAY[i];
if (result.toString().equals(literal)) {
return result;
}
}
return null;
}
/**
* Returns the '<em><b>Ifc Sub Contract Resource Type Enum</b></em>' literal with the specified name.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param name the name.
* @return the matching enumerator or <code>null</code>.
* @generated
*/
public static IfcSubContractResourceTypeEnum getByName(String name) {
for (int i = 0; i < VALUES_ARRAY.length; ++i) {
IfcSubContractResourceTypeEnum result = VALUES_ARRAY[i];
if (result.getName().equals(name)) {
return result;
}
}
return null;
}
/**
* Returns the '<em><b>Ifc Sub Contract Resource Type Enum</b></em>' literal with the specified integer value.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the integer value.
* @return the matching enumerator or <code>null</code>.
* @generated
*/
public static IfcSubContractResourceTypeEnum get(int value) {
switch (value) {
case NULL_VALUE:
return NULL;
case NOTDEFINED_VALUE:
return NOTDEFINED;
case WORK_VALUE:
return WORK;
case USERDEFINED_VALUE:
return USERDEFINED;
case PURCHASE_VALUE:
return PURCHASE;
}
return null;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private final int value;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private final String name;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private final String literal;
/**
* Only this class can construct instances.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
private IfcSubContractResourceTypeEnum(int value, String name, String literal) {
this.value = value;
this.name = name;
this.literal = literal;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public int getValue() {
return value;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public String getName() {
return name;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public String getLiteral() {
return literal;
}
/**
* Returns the literal value of the enumerator, which is its string representation.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String toString() {
return literal;
}
} //IfcSubContractResourceTypeEnum<|fim▁end|>
|
/**
|
<|file_name|>rpcwallet.cpp<|end_file_name|><|fim▁begin|>// Copyright (c) 2010 Satoshi Nakamoto
// Copyright (c) 2009-2012 The Bitcoin developers
// Distributed under the MIT/X11 software license, see the accompanying
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
#include "wallet.h"
#include "walletdb.h"
#include "rpcserver.h"
#include "init.h"
#include "base58.h"
#include "txdb.h"
#include "stealth.h"
#include "sigringu.h"
#include "smessage.h"
#include <sstream>
using namespace json_spirit;
int64_t nWalletUnlockTime;
static CCriticalSection cs_nWalletUnlockTime;
extern void TxToJSON(const CTransaction& tx, const uint256 hashBlock, json_spirit::Object& entry);
static void accountingDeprecationCheck()
{
if (!GetBoolArg("-enableaccounts", false))
throw std::runtime_error(
"Accounting API is deprecated and will be removed in future.\n"
"It can easily result in negative or odd balances if misused or misunderstood, which has happened in the field.\n"
"If you still want to enable it, add to your config file enableaccounts=1\n");
if (GetBoolArg("-staking", true))
throw std::runtime_error("If you want to use accounting API, staking must be disabled, add to your config file staking=0\n");
}
std::string HelpRequiringPassphrase()
{
return pwalletMain->IsCrypted()
? "\nrequires wallet passphrase to be set with walletpassphrase first"
: "";
}
void EnsureWalletIsUnlocked()
{
if (pwalletMain->IsLocked())
throw JSONRPCError(RPC_WALLET_UNLOCK_NEEDED, "Error: Please enter the wallet passphrase with walletpassphrase first.");
if (fWalletUnlockStakingOnly)
throw JSONRPCError(RPC_WALLET_UNLOCK_NEEDED, "Error: Wallet is unlocked for staking only.");
}
void WalletTxToJSON(const CWalletTx& wtx, Object& entry)
{
entry.push_back(Pair("version", wtx.nVersion));
int confirms = wtx.GetDepthInMainChain();
entry.push_back(Pair("confirmations", confirms));
if (wtx.IsCoinBase() || wtx.IsCoinStake())
entry.push_back(Pair("generated", true));
if (confirms > 0)
{
entry.push_back(Pair("blockhash", wtx.hashBlock.GetHex()));
entry.push_back(Pair("blockindex", wtx.nIndex));
int64_t nTime = 0;
if (nNodeMode == NT_FULL)
{
nTime = mapBlockIndex[wtx.hashBlock]->nTime;
} else
{
std::map<uint256, CBlockThinIndex*>::iterator mi = mapBlockThinIndex.find(wtx.hashBlock);
if (mi != mapBlockThinIndex.end())
nTime = (*mi).second->nTime;
};
entry.push_back(Pair("blocktime", nTime));
};
entry.push_back(Pair("txid", wtx.GetHash().GetHex()));
entry.push_back(Pair("time", (int64_t)wtx.GetTxTime()));
entry.push_back(Pair("timereceived", (int64_t)wtx.nTimeReceived));
BOOST_FOREACH(const PAIRTYPE(std::string,std::string)& item, wtx.mapValue)
entry.push_back(Pair(item.first, item.second));
}
std::string AccountFromValue(const Value& value)
{
std::string strAccount = value.get_str();
if (strAccount == "*")
throw JSONRPCError(RPC_WALLET_INVALID_ACCOUNT_NAME, "Invalid account name");
return strAccount;
}
Value getinfo(const Array& params, bool fHelp)
{
static const char *help = ""
"getinfo ['env']\n"
"Returns an object containing various state info.";
if (fHelp || params.size() > 1)
throw std::runtime_error(help);
proxyType proxy;
GetProxy(NET_IPV4, proxy);
Object obj, diff;
if (params.size() > 0)
{
if (params[0].get_str().compare("env") == 0)
{
obj.push_back(Pair("version", FormatFullVersion()));
obj.push_back(Pair("mode", std::string(GetNodeModeName(nNodeMode))));
obj.push_back(Pair("state", nNodeMode == NT_THIN ? std::string(GetNodeStateName(nNodeState)) : "Full Node"));
obj.push_back(Pair("protocolversion", (int)PROTOCOL_VERSION));
obj.push_back(Pair("testnet", fTestNet));
obj.push_back(Pair("debug", fDebug));
obj.push_back(Pair("debugpos", fDebugPoS));
obj.push_back(Pair("debugringsig", fDebugRingSig));
obj.push_back(Pair("datadir", GetDataDir().string()));
obj.push_back(Pair("walletfile", pwalletMain->strWalletFile));
obj.push_back(Pair("walletversion", pwalletMain->GetVersion()));
obj.push_back(Pair("walletcrypted", pwalletMain->IsCrypted()));
obj.push_back(Pair("walletlocked", pwalletMain->IsCrypted() ? pwalletMain->IsLocked() ? "Locked" : "Unlocked" : "Uncrypted"));
obj.push_back(Pair("walletunlockedto", pwalletMain->IsCrypted() ? !pwalletMain->IsLocked() ? strprintf("%d", (int64_t)nWalletUnlockTime / 1000).c_str() : "Locked" : "Uncrypted"));
obj.push_back(Pair("errors", GetWarnings("statusbar")));
return obj;
} else
{
throw std::runtime_error(help);
};
};
obj.push_back(Pair("version", FormatFullVersion()));
obj.push_back(Pair("mode", std::string(GetNodeModeName(nNodeMode))));
obj.push_back(Pair("state", nNodeMode == NT_THIN ? std::string(GetNodeStateName(nNodeState)) : "Full Node"));
obj.push_back(Pair("protocolversion",(int)PROTOCOL_VERSION));
obj.push_back(Pair("walletversion", pwalletMain->GetVersion()));
obj.push_back(Pair("balance", ValueFromAmount(pwalletMain->GetBalance())));
obj.push_back(Pair("eclipsebalance", ValueFromAmount(pwalletMain->GetEclipseBalance())));
obj.push_back(Pair("newmint", ValueFromAmount(pwalletMain->GetNewMint())));
obj.push_back(Pair("stake", ValueFromAmount(pwalletMain->GetStake())));
obj.push_back(Pair("reserve", ValueFromAmount(nReserveBalance)));
obj.push_back(Pair("blocks", (int)nBestHeight));
if (nNodeMode == NT_THIN)
obj.push_back(Pair("filteredblocks", (int)nHeightFilteredNeeded));
obj.push_back(Pair("timeoffset", (int64_t)GetTimeOffset()));
if (nNodeMode == NT_FULL)
obj.push_back(Pair("moneysupply", ValueFromAmount(pindexBest->nMoneySupply)));
obj.push_back(Pair("connections", (int)vNodes.size()));
obj.push_back(Pair("datareceived", bytesReadable(CNode::GetTotalBytesRecv())));
obj.push_back(Pair("datasent", bytesReadable(CNode::GetTotalBytesSent())));
obj.push_back(Pair("proxy", (proxy.IsValid() ? proxy.ToStringIPPort() : std::string())));
obj.push_back(Pair("ip", addrSeenByPeer.ToStringIP()));
if (nNodeMode == NT_FULL)
{
diff.push_back(Pair("proof-of-work", GetDifficulty()));
diff.push_back(Pair("proof-of-stake", GetDifficulty(GetLastBlockIndex(pindexBest, true))));
} else
{
diff.push_back(Pair("proof-of-work", GetHeaderDifficulty()));
diff.push_back(Pair("proof-of-stake", GetHeaderDifficulty(GetLastBlockThinIndex(pindexBestHeader, true))));
};
obj.push_back(Pair("difficulty", diff));
obj.push_back(Pair("testnet", fTestNet));
obj.push_back(Pair("keypoolsize", (int)pwalletMain->GetKeyPoolSize()));
obj.push_back(Pair("paytxfee", ValueFromAmount(nTransactionFee)));
obj.push_back(Pair("mininput", ValueFromAmount(nMinimumInputValue)));
if (pwalletMain->IsCrypted())
obj.push_back(Pair("unlocked_until", (int64_t)nWalletUnlockTime / 1000));
obj.push_back(Pair("errors", GetWarnings("statusbar")));
return obj;
}
Value getnewpubkey(const Array& params, bool fHelp)
{
if (fHelp || params.size() > 1)
throw std::runtime_error(
"getnewpubkey [account]\n"
"Returns new public key for coinbase generation.");
// Parse the account first so we don't generate a key if there's an error
std::string strAccount;
if (params.size() > 0)
{
strAccount = AccountFromValue(params[0]);
};
if (pwalletMain->IsLocked())
throw std::runtime_error("Wallet is locked.");
// Generate a new key that is added to wallet
CPubKey newKey;
if (0 != pwalletMain->NewKeyFromAccount(newKey))
throw std::runtime_error("NewKeyFromAccount failed.");
CKeyID keyID = newKey.GetID();
pwalletMain->SetAddressBookName(keyID, strAccount, NULL, true, true);
return HexStr(newKey.begin(), newKey.end());
}
Value getnewaddress(const Array& params, bool fHelp)
{
if (fHelp || params.size() > 1)
throw std::runtime_error(
"getnewaddress [account]\n"
"Returns a new EclipseCrypto address for receiving payments. "
"If [account] is specified, it is added to the address book "
"so payments received with the address will be credited to [account].");
// Parse the account first so we don't generate a key if there's an error
std::string strAccount;
if (params.size() > 0)
strAccount = AccountFromValue(params[0]);
// Generate a new key that is added to wallet
CPubKey newKey;
if (0 != pwalletMain->NewKeyFromAccount(newKey))
throw std::runtime_error("NewKeyFromAccount failed.");
CKeyID keyID = newKey.GetID();
pwalletMain->SetAddressBookName(keyID, strAccount, NULL, true, true);
return CBitcoinAddress(keyID).ToString();
}
Value getnewextaddress(const Array& params, bool fHelp)
{
if (fHelp || params.size() > 1)
throw std::runtime_error(
"getnewextaddress [label]\n"
"Returns a new EclipseCrypto ext address for receiving payments."
"If [label] is specified, it is added to the address book. ");
std::string strLabel;
if (params.size() > 0)
strLabel = params[0].get_str();
// Generate a new key that is added to wallet
CStoredExtKey *sek = new CStoredExtKey();
if (0 != pwalletMain->NewExtKeyFromAccount(strLabel, sek))
{
delete sek;
throw std::runtime_error("NewExtKeyFromAccount failed.");
};
pwalletMain->SetAddressBookName(sek->kp, strLabel, NULL, true, true);
// - CBitcoinAddress displays public key only
return CBitcoinAddress(sek->kp).ToString();
}
CBitcoinAddress GetAccountAddress(std::string strAccount, bool bForceNew=false)
{
CWalletDB walletdb(pwalletMain->strWalletFile);
CAccount account;
walletdb.ReadAccount(strAccount, account);
bool bKeyUsed = false;
// Check if the current key has been used
if (account.vchPubKey.IsValid())
{
CScript scriptPubKey;
scriptPubKey.SetDestination(account.vchPubKey.GetID());
for (std::map<uint256, CWalletTx>::iterator it = pwalletMain->mapWallet.begin();
it != pwalletMain->mapWallet.end() && account.vchPubKey.IsValid();
++it)
{
const CWalletTx& wtx = (*it).second;
BOOST_FOREACH(const CTxOut& txout, wtx.vout)
if (txout.scriptPubKey == scriptPubKey)
bKeyUsed = true;
}
}
// Generate a new key
if (!account.vchPubKey.IsValid() || bForceNew || bKeyUsed)
{
// Generate a new key that is added to wallet
CStoredExtKey *sek = new CStoredExtKey();
if (0 != pwalletMain->NewExtKeyFromAccount(strAccount, sek))
{
delete sek;
throw std::runtime_error("NewExtKeyFromAccount failed.");
};
account.vchPubKey = sek->kp.pubkey;
pwalletMain->SetAddressBookName(account.vchPubKey.GetID(), strAccount);
walletdb.WriteAccount(strAccount, account);
}
return CBitcoinAddress(account.vchPubKey.GetID());
}
Value getaccountaddress(const Array& params, bool fHelp)
{
if (fHelp || params.size() != 1)
throw std::runtime_error(
"getaccountaddress <account>\n"
"Returns the current EclipseCrypto address for receiving payments to this account.");
// Parse the account first so we don't generate a key if there's an error
std::string strAccount = AccountFromValue(params[0]);
Value ret;
ret = GetAccountAddress(strAccount).ToString();
return ret;
}
Value setaccount(const Array& params, bool fHelp)
{
if (fHelp || params.size() < 1 || params.size() > 2)
throw std::runtime_error(
"setaccount <eclipsecryptoaddress> <account>\n"
"Sets the account associated with the given address.");
CBitcoinAddress address(params[0].get_str());
if (!address.IsValid())
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, "Invalid EclipseCrypto address");
std::string strAccount;
if (params.size() > 1)
strAccount = AccountFromValue(params[1]);
// Detect when changing the account of an address that is the 'unused current key' of another account:
if (pwalletMain->mapAddressBook.count(address.Get()))
{
std::string strOldAccount = pwalletMain->mapAddressBook[address.Get()];
if (address == GetAccountAddress(strOldAccount))
GetAccountAddress(strOldAccount, true);
};
pwalletMain->SetAddressBookName(address.Get(), strAccount);
return Value::null;
}
Value getaccount(const Array& params, bool fHelp)
{
if (fHelp || params.size() != 1)
throw std::runtime_error(
"getaccount <eclipsecryptoaddress>\n"
"Returns the account associated with the given address.");
CBitcoinAddress address(params[0].get_str());
if (!address.IsValid())
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, "Invalid EclipseCrypto address");
std::string strAccount;
std::map<CTxDestination, std::string>::iterator mi = pwalletMain->mapAddressBook.find(address.Get());
if (mi != pwalletMain->mapAddressBook.end() && !(*mi).second.empty())
strAccount = (*mi).second;
return strAccount;
}
Value getaddressesbyaccount(const Array& params, bool fHelp)
{
if (fHelp || params.size() != 1)
throw std::runtime_error(
"getaddressesbyaccount <account>\n"
"Returns the list of addresses for the given account.");
std::string strAccount = AccountFromValue(params[0]);
// Find all addresses that have the given account
Array ret;
BOOST_FOREACH(const PAIRTYPE(CBitcoinAddress, std::string)& item, pwalletMain->mapAddressBook)
{
const CBitcoinAddress& address = item.first;
const std::string& strName = item.second;
if (strName == strAccount)
ret.push_back(address.ToString());
}
return ret;
}
Value sendtoaddress(const Array& params, bool fHelp)
{
if (fHelp || params.size() < 2 || params.size() > 5)
throw std::runtime_error(
"sendtoaddress <eclipsecryptoaddress> <amount> [comment] [comment-to] [narration]\n" // Exchanges use the comments internally...
"sendtoaddress <eclipsecryptoaddress> <amount> [narration]\n"
"<amount> is a real and is rounded to the nearest 0.000001"
+ HelpRequiringPassphrase());
EnsureWalletIsUnlocked();
if (params[0].get_str().length() > 75
&& IsStealthAddress(params[0].get_str()))
return sendtostealthaddress(params, false);
std::string sAddrIn = params[0].get_str();
CBitcoinAddress address(sAddrIn);
if (!address.IsValid())
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, "Invalid EclipseCrypto address");
// Amount
int64_t nAmount = AmountFromValue(params[1]);
CWalletTx wtx;
std::string sNarr;
// Wallet comments
if (params.size() > 2 && params[2].type() != null_type && !params[2].get_str().empty())
wtx.mapValue["comment"] = params[2].get_str();
if (params.size() > 3 && params[3].type() != null_type && !params[3].get_str().empty())
wtx.mapValue["to"] = params[3].get_str();
if (params.size() > 4 && params[4].type() != null_type && !params[4].get_str().empty())
sNarr = params[4].get_str();
if (sNarr.length() > 24)
throw std::runtime_error("Narration must be 24 characters or less.");
std::string strError = pwalletMain->SendMoneyToDestination(address.Get(), nAmount, sNarr, wtx);
if (strError != "")
throw JSONRPCError(RPC_WALLET_ERROR, strError);
return wtx.GetHash().GetHex();
}
Value listaddressgroupings(const Array& params, bool fHelp)
{
if (fHelp)
throw std::runtime_error(
"listaddressgroupings\n"
"Lists groups of addresses which have had their common ownership\n"
"made public by common use as inputs or as the resulting change\n"
"in past transactions");
Array jsonGroupings;
std::map<CTxDestination, int64_t> balances = pwalletMain->GetAddressBalances();
BOOST_FOREACH(std::set<CTxDestination> grouping, pwalletMain->GetAddressGroupings())
{
Array jsonGrouping;
BOOST_FOREACH(CTxDestination address, grouping)
{
Array addressInfo;
addressInfo.push_back(CBitcoinAddress(address).ToString());
addressInfo.push_back(ValueFromAmount(balances[address]));
{
LOCK(pwalletMain->cs_wallet);
if (pwalletMain->mapAddressBook.find(CBitcoinAddress(address).Get()) != pwalletMain->mapAddressBook.end())
addressInfo.push_back(pwalletMain->mapAddressBook.find(CBitcoinAddress(address).Get())->second);
} // cs_wallet
jsonGrouping.push_back(addressInfo);
};
jsonGroupings.push_back(jsonGrouping);
};
return jsonGroupings;
}
Value signmessage(const Array& params, bool fHelp)
{
if (fHelp || params.size() != 2)
throw std::runtime_error(
"signmessage <eclipsecryptoaddress> <message>\n"
"Sign a message with the private key of an address");
EnsureWalletIsUnlocked();
std::string strAddress = params[0].get_str();
std::string strMessage = params[1].get_str();
CBitcoinAddress addr(strAddress);
if (!addr.IsValid())
throw JSONRPCError(RPC_TYPE_ERROR, "Invalid address");
CKeyID keyID;
if (!addr.GetKeyID(keyID))
throw JSONRPCError(RPC_TYPE_ERROR, "Address does not refer to key");
CKey key;
if (!pwalletMain->GetKey(keyID, key))
throw JSONRPCError(RPC_WALLET_ERROR, "Private key not available");
CDataStream ss(SER_GETHASH, 0);
ss << strMessageMagic;
ss << strMessage;
std::vector<unsigned char> vchSig;
if (!key.SignCompact(Hash(ss.begin(), ss.end()), vchSig))
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, "Sign failed");
return EncodeBase64(&vchSig[0], vchSig.size());
}
Value verifymessage(const Array& params, bool fHelp)
{
if (fHelp || params.size() != 3)
throw std::runtime_error(
"verifymessage <eclipsecryptoaddress> <signature> <message>\n"
"Verify a signed message");
std::string strAddress = params[0].get_str();
std::string strSign = params[1].get_str();
std::string strMessage = params[2].get_str();
CBitcoinAddress addr(strAddress);
if (!addr.IsValid())
throw JSONRPCError(RPC_TYPE_ERROR, "Invalid address");
CKeyID keyID;
if (!addr.GetKeyID(keyID))
throw JSONRPCError(RPC_TYPE_ERROR, "Address does not refer to key");
bool fInvalid = false;
std::vector<unsigned char> vchSig = DecodeBase64(strSign.c_str(), &fInvalid);
if (fInvalid)
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, "Malformed base64 encoding");
CHashWriter ss(SER_GETHASH, 0);
ss << strMessageMagic;
ss << strMessage;
CPubKey pubkey;
if (!pubkey.RecoverCompact(ss.GetHash(), vchSig))
return false;
return (pubkey.GetID() == keyID);
}
Value getreceivedbyaddress(const Array& params, bool fHelp)
{
if (fHelp || params.size() < 1 || params.size() > 2)
throw std::runtime_error(
"getreceivedbyaddress <eclipsecryptoaddress> [minconf=1]\n"
"Returns the total amount received by <eclipsecryptoaddress> in transactions with at least [minconf] confirmations.");
// Bitcoin address
CBitcoinAddress address = CBitcoinAddress(params[0].get_str());
CScript scriptPubKey;
if (!address.IsValid())
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, "Invalid EclipseCrypto address");
scriptPubKey.SetDestination(address.Get());
if (!IsMine(*pwalletMain,scriptPubKey))
return (double)0.0;
// Minimum confirmations
int nMinDepth = 1;
if (params.size() > 1)
nMinDepth = params[1].get_int();
// Tally
int64_t nAmount = 0;
for (std::map<uint256, CWalletTx>::iterator it = pwalletMain->mapWallet.begin(); it != pwalletMain->mapWallet.end(); ++it)
{
const CWalletTx& wtx = (*it).second;
if (wtx.IsCoinBase() || wtx.IsCoinStake() || !wtx.IsFinal())
continue;
BOOST_FOREACH(const CTxOut& txout, wtx.vout)
if (txout.scriptPubKey == scriptPubKey)
if (wtx.GetDepthInMainChain() >= nMinDepth)
nAmount += txout.nValue;
}
return ValueFromAmount(nAmount);
}
void GetAccountAddresses(std::string strAccount, std::set<CTxDestination>& setAddress)
{
BOOST_FOREACH(const PAIRTYPE(CTxDestination, std::string)& item, pwalletMain->mapAddressBook)
{
const CTxDestination& address = item.first;
const std::string& strName = item.second;
if (strName == strAccount)
setAddress.insert(address);
};
}
Value getreceivedbyaccount(const Array& params, bool fHelp)
{
if (fHelp || params.size() < 1 || params.size() > 2)
throw std::runtime_error(
"getreceivedbyaccount <account> [minconf=1]\n"
"Returns the total amount received by addresses with <account> in transactions with at least [minconf] confirmations.");
accountingDeprecationCheck();
// Minimum confirmations
int nMinDepth = 1;
if (params.size() > 1)
nMinDepth = params[1].get_int();
// Get the set of pub keys assigned to account
std::string strAccount = AccountFromValue(params[0]);
std::set<CTxDestination> setAddress;
GetAccountAddresses(strAccount, setAddress);
// Tally
int64_t nAmount = 0;
for (std::map<uint256, CWalletTx>::iterator it = pwalletMain->mapWallet.begin(); it != pwalletMain->mapWallet.end(); ++it)
{
const CWalletTx& wtx = (*it).second;
if (wtx.IsCoinBase() || wtx.IsCoinStake() || !wtx.IsFinal())
continue;
BOOST_FOREACH(const CTxOut& txout, wtx.vout)
{
CTxDestination address;
if (ExtractDestination(txout.scriptPubKey, address) && IsDestMine(*pwalletMain, address) && setAddress.count(address))
if (wtx.GetDepthInMainChain() >= nMinDepth)
nAmount += txout.nValue;
};
};
return (double)nAmount / (double)COIN;
}
int64_t GetAccountBalance(CWalletDB& walletdb, const std::string& strAccount, int nMinDepth)
{
int64_t nBalance = 0;
// Tally wallet transactions
for (std::map<uint256, CWalletTx>::iterator it = pwalletMain->mapWallet.begin(); it != pwalletMain->mapWallet.end(); ++it)
{
const CWalletTx& wtx = (*it).second;
if (!wtx.IsFinal() || wtx.GetDepthInMainChain() < 0)
continue;
int64_t nReceived, nSent, nFee;
wtx.GetAccountAmounts(strAccount, nReceived, nSent, nFee);
if (nReceived != 0 && wtx.GetDepthInMainChain() >= nMinDepth && wtx.GetBlocksToMaturity() == 0)
nBalance += nReceived;
nBalance -= nSent + nFee;
}
// Tally internal accounting entries
nBalance += walletdb.GetAccountCreditDebit(strAccount);
return nBalance;
}
int64_t GetAccountBalance(const std::string& strAccount, int nMinDepth)
{
CWalletDB walletdb(pwalletMain->strWalletFile);
return GetAccountBalance(walletdb, strAccount, nMinDepth);
}
Value getbalance(const Array& params, bool fHelp)
{
if (fHelp || params.size() > 2)
throw std::runtime_error(
"getbalance [account] [minconf=1]\n"
"If [account] is not specified, returns the server's total available balance.\n"
"If [account] is specified, returns the balance in the account.");
if (params.size() == 0)
return ValueFromAmount(pwalletMain->GetBalance());
int nMinDepth = 1;
if (params.size() > 1)
nMinDepth = params[1].get_int();
if (params[0].get_str() == "*")
{
// Calculate total balance a different way from GetBalance()
// (GetBalance() sums up all unspent TxOuts)
// getbalance and getbalance '*' 0 should return the same number.
int64_t nBalance = 0;
for (std::map<uint256, CWalletTx>::iterator it = pwalletMain->mapWallet.begin(); it != pwalletMain->mapWallet.end(); ++it)
{
const CWalletTx& wtx = (*it).second;
if (!wtx.IsTrusted())
continue;
int64_t allFee;
std::string strSentAccount;
std::list<std::pair<CTxDestination, int64_t> > listReceived;
std::list<std::pair<CTxDestination, int64_t> > listSent;
wtx.GetAmounts(listReceived, listSent, allFee, strSentAccount);
if (wtx.GetDepthInMainChain() >= nMinDepth && wtx.GetBlocksToMaturity() == 0)
{
BOOST_FOREACH(const PAIRTYPE(CTxDestination,int64_t)& r, listReceived)
nBalance += r.second;
};
BOOST_FOREACH(const PAIRTYPE(CTxDestination,int64_t)& r, listSent)
nBalance -= r.second;
nBalance -= allFee;
};
return ValueFromAmount(nBalance);
};
accountingDeprecationCheck();
std::string strAccount = AccountFromValue(params[0]);
int64_t nBalance = GetAccountBalance(strAccount, nMinDepth);
return ValueFromAmount(nBalance);
}
Value movecmd(const Array& params, bool fHelp)
{
if (fHelp || params.size() < 3 || params.size() > 5)
throw std::runtime_error(
"move <fromaccount> <toaccount> <amount> [minconf=1] [comment]\n"
"Move from one account in your wallet to another.");
accountingDeprecationCheck();
std::string strFrom = AccountFromValue(params[0]);
std::string strTo = AccountFromValue(params[1]);
int64_t nAmount = AmountFromValue(params[2]);
if (params.size() > 3)
// unused parameter, used to be nMinDepth, keep type-checking it though
(void)params[3].get_int();
std::string strComment;
if (params.size() > 4)
strComment = params[4].get_str();
CWalletDB walletdb(pwalletMain->strWalletFile);
if (!walletdb.TxnBegin())
throw JSONRPCError(RPC_DATABASE_ERROR, "database error");
int64_t nNow = GetAdjustedTime();
// Debit
CAccountingEntry debit;
debit.nOrderPos = pwalletMain->IncOrderPosNext(&walletdb);
debit.strAccount = strFrom;
debit.nCreditDebit = -nAmount;
debit.nTime = nNow;
debit.strOtherAccount = strTo;
debit.strComment = strComment;
walletdb.WriteAccountingEntry(debit);
// Credit
CAccountingEntry credit;
credit.nOrderPos = pwalletMain->IncOrderPosNext(&walletdb);
credit.strAccount = strTo;
credit.nCreditDebit = nAmount;
credit.nTime = nNow;
credit.strOtherAccount = strFrom;
credit.strComment = strComment;
walletdb.WriteAccountingEntry(credit);
if (!walletdb.TxnCommit())
throw JSONRPCError(RPC_DATABASE_ERROR, "database error");
return true;
}
Value sendfrom(const Array& params, bool fHelp)
{
if (fHelp || params.size() < 3 || params.size() > 7)
throw std::runtime_error(
"sendfrom <fromaccount> <toeclipsecryptoaddress> <amount> [minconf=1] [comment] [comment-to] [narration] \n"
"<amount> is a real and is rounded to the nearest 0.000001"
+ HelpRequiringPassphrase());
EnsureWalletIsUnlocked();
std::string strAccount = AccountFromValue(params[0]);
CBitcoinAddress address(params[1].get_str());
if (!address.IsValid())
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, "Invalid EclipseCrypto address");
int64_t nAmount = AmountFromValue(params[2]);
int nMinDepth = 1;
if (params.size() > 3)
nMinDepth = params[3].get_int();
CWalletTx wtx;
wtx.strFromAccount = strAccount;
if (params.size() > 4 && params[4].type() != null_type && !params[4].get_str().empty())
wtx.mapValue["comment"] = params[4].get_str();
if (params.size() > 5 && params[5].type() != null_type && !params[5].get_str().empty())
wtx.mapValue["to"] = params[5].get_str();
std::string sNarr;
if (params.size() > 6 && params[6].type() != null_type && !params[6].get_str().empty())
sNarr = params[6].get_str();
if (sNarr.length() > 24)
throw std::runtime_error("Narration must be 24 characters or less.");
// Check funds
int64_t nBalance = GetAccountBalance(strAccount, nMinDepth);
if (nAmount > nBalance)
throw JSONRPCError(RPC_WALLET_INSUFFICIENT_FUNDS, "Account has insufficient funds");
// Send
std::string strError = pwalletMain->SendMoneyToDestination(address.Get(), nAmount, sNarr, wtx);
if (strError != "")
throw JSONRPCError(RPC_WALLET_ERROR, strError);
return wtx.GetHash().GetHex();
}
Value sendmany(const Array& params, bool fHelp)
{
if (fHelp || params.size() < 2 || params.size() > 4)
throw std::runtime_error(
"sendmany <fromaccount> {address:amount,...} [minconf=1] [comment]\n"
"amounts are double-precision floating point numbers"
+ HelpRequiringPassphrase());
std::string strAccount = AccountFromValue(params[0]);
Object sendTo = params[1].get_obj();
int nMinDepth = 1;
if (params.size() > 2)
nMinDepth = params[2].get_int();
CWalletTx wtx;
wtx.strFromAccount = strAccount;
if (params.size() > 3 && params[3].type() != null_type && !params[3].get_str().empty())
wtx.mapValue["comment"] = params[3].get_str();
std::set<CBitcoinAddress> setAddress;
std::vector<std::pair<CScript, int64_t> > vecSend;
int64_t totalAmount = 0;
BOOST_FOREACH(const Pair& s, sendTo)
{
CBitcoinAddress address(s.name_);
if (!address.IsValid())
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, std::string("Invalid EclipseCrypto address: ")+s.name_);
if (setAddress.count(address))
throw JSONRPCError(RPC_INVALID_PARAMETER, std::string("Invalid parameter, duplicated address: ")+s.name_);
setAddress.insert(address);
CScript scriptPubKey;
scriptPubKey.SetDestination(address.Get());
int64_t nAmount = AmountFromValue(s.value_);
totalAmount += nAmount;
vecSend.push_back(make_pair(scriptPubKey, nAmount));
};
EnsureWalletIsUnlocked();
// Check funds
int64_t nBalance = GetAccountBalance(strAccount, nMinDepth);
if (totalAmount > nBalance)
throw JSONRPCError(RPC_WALLET_INSUFFICIENT_FUNDS, "Account has insufficient funds");
// Send
int64_t nFeeRequired = 0;
int nChangePos;
bool fCreated = pwalletMain->CreateTransaction(vecSend, wtx, nFeeRequired, nChangePos);
if (!fCreated)
{
if (totalAmount + nFeeRequired > pwalletMain->GetBalance())
throw JSONRPCError(RPC_WALLET_INSUFFICIENT_FUNDS, "Insufficient funds");
throw JSONRPCError(RPC_WALLET_ERROR, "Transaction creation failed");
}
if (!pwalletMain->CommitTransaction(wtx))
throw JSONRPCError(RPC_WALLET_ERROR, "Transaction commit failed");
return wtx.GetHash().GetHex();
}
/**
* Used by addmultisigaddress / createmultisig:
*/
CScript _createmultisig_redeemScript(const Array& params)
{
int nRequired = params[0].get_int();
const Array& keys = params[1].get_array();
// Gather public keys
if (nRequired < 1)
throw std::runtime_error("a multisignature address must require at least one key to redeem");
if ((int)keys.size() < nRequired)
throw std::runtime_error(
strprintf("not enough keys supplied "
"(got %u keys, but need at least %d to redeem)", keys.size(), nRequired));
if (keys.size() > 16)
throw std::runtime_error("Number of addresses involved in the multisignature address creation > 16\nReduce the number");
std::vector<CPubKey> pubkeys;
pubkeys.resize(keys.size());
for (unsigned int i = 0; i < keys.size(); i++)
{
const std::string& ks = keys[i].get_str();
// Case 1: Bitcoin address and we have full public key:
CBitcoinAddress address(ks);
if (pwalletMain && address.IsValid())
{
CKeyID keyID;
if (!address.GetKeyID(keyID))
throw std::runtime_error(
strprintf("%s does not refer to a key",ks));
CPubKey vchPubKey;
if (!pwalletMain->GetPubKey(keyID, vchPubKey))
throw std::runtime_error(
strprintf("no full public key for address %s",ks));
if (!vchPubKey.IsFullyValid())
throw std::runtime_error(" Invalid public key: "+ks);
pubkeys[i] = vchPubKey;
}
// Case 2: hex public key
else
if (IsHex(ks))
{
CPubKey vchPubKey(ParseHex(ks));
if (!vchPubKey.IsFullyValid())
throw std::runtime_error(" Invalid public key: "+ks);
pubkeys[i] = vchPubKey;
}
else
{
throw std::runtime_error(" Invalid public key: "+ks);
}
}
CScript result = GetScriptForMultisig(nRequired, pubkeys);
if (result.size() > MAX_SCRIPT_ELEMENT_SIZE)
throw std::runtime_error(
strprintf("redeemScript exceeds size limit: %d > %d", result.size(), MAX_SCRIPT_ELEMENT_SIZE));
return result;
}
Value addmultisigaddress(const Array& params, bool fHelp)
{
if (fHelp || params.size() < 2 || params.size() > 3)
{
std::string msg = "addmultisigaddress <nrequired> <'[\"key\",\"key\"]'> [account]\n"
"Add a nrequired-to-sign multisignature address to the wallet\"\n"
"each key is a EclipseCrypto address or hex-encoded public key\n"
"If [account] is specified, assign address to [account].";
throw std::runtime_error(msg);
};
std::string strAccount;
if (params.size() > 2)
strAccount = AccountFromValue(params[2]);
// Construct using pay-to-script-hash:
CScript inner = _createmultisig_redeemScript(params);
CScriptID innerID(inner);
CBitcoinAddress address(innerID);
if (!pwalletMain->AddCScript(inner))
throw std::runtime_error("AddCScript() failed");
pwalletMain->SetAddressBookName(innerID, strAccount);
return CBitcoinAddress(innerID).ToString();
}
Value createmultisig(const Array& params, bool fHelp)
{
if (fHelp || params.size() < 2 || params.size() > 3)
{
std::string msg = "addmultisigaddress <nrequired> <'[\"key\",\"key\"]'> [account]\n"
"\nCreates a multi-signature address with n signature of m keys required.\n"
"Returns a json object with the address and redeemScript.\n"
"Each key is a EclipseCrypto address or hex-encoded public key.\n"
"\nArguments:\n"
"1. nrequired (numeric, required) The number of required signatures out of the n keys or addresses.\n"
"2. \"keys\" (string, required) A json array of keys which are bitcoin addresses or hex-encoded public keys\n"
" [\n"
" \"key\" (string) bitcoin address or hex-encoded public key\n"
" ,...\n"
" ]\n"
"\nResult:\n"
"{\n"
" \"address\":\"multisigaddress\", (string) The value of the new multisig address.\n"
" \"redeemScript\":\"script\" (string) The string value of the hex-encoded redemption script.\n"
"}\n"
;
throw std::runtime_error(msg);
};
// Construct using pay-to-script-hash:
CScript inner = _createmultisig_redeemScript(params);
CScriptID innerID(inner);
CBitcoinAddress address(innerID);
Object result;
result.push_back(Pair("address", address.ToString()));
result.push_back(Pair("redeemScript", HexStr(inner.begin(), inner.end())));
return result;
}
Value addredeemscript(const Array& params, bool fHelp)
{
if (fHelp || params.size() < 1 || params.size() > 2)
{
std::string msg = "addredeemscript <redeemScript> [account]\n"
"Add a P2SH address with a specified redeemScript to the wallet.\n"
"If [account] is specified, assign address to [account].";
throw std::runtime_error(msg);
};
std::string strAccount;
if (params.size() > 1)
strAccount = AccountFromValue(params[1]);
// Construct using pay-to-script-hash:
std::vector<unsigned char> innerData = ParseHexV(params[0], "redeemScript");
CScript inner(innerData.begin(), innerData.end());
CScriptID innerID = inner.GetID();
if (!pwalletMain->AddCScript(inner))
throw std::runtime_error("AddCScript() failed");
pwalletMain->SetAddressBookName(innerID, strAccount);
return CBitcoinAddress(innerID).ToString();
}
struct tallyitem
{
int64_t nAmount;
int nConf;
tallyitem()
{
nAmount = 0;
nConf = std::numeric_limits<int>::max();
}
};
Value ListReceived(const Array& params, bool fByAccounts)
{
// Minimum confirmations
int nMinDepth = 1;
if (params.size() > 0)
nMinDepth = params[0].get_int();
// Whether to include empty accounts
bool fIncludeEmpty = false;
if (params.size() > 1)
fIncludeEmpty = params[1].get_bool();
// Tally
std::map<CBitcoinAddress, tallyitem> mapTally;
for (std::map<uint256, CWalletTx>::iterator it = pwalletMain->mapWallet.begin(); it != pwalletMain->mapWallet.end(); ++it)
{
const CWalletTx& wtx = (*it).second;
if (wtx.IsCoinBase() || wtx.IsCoinStake() || !wtx.IsFinal())
continue;
int nDepth = wtx.GetDepthInMainChain();
if (nDepth < nMinDepth)
continue;
BOOST_FOREACH(const CTxOut& txout, wtx.vout)
{
CTxDestination address;
if (!ExtractDestination(txout.scriptPubKey, address) || !IsDestMine(*pwalletMain, address))
continue;
tallyitem& item = mapTally[address];
item.nAmount += txout.nValue;
item.nConf = std::min(item.nConf, nDepth);
}
}
// Reply
Array ret;
std::map<std::string, tallyitem> mapAccountTally;
BOOST_FOREACH(const PAIRTYPE(CBitcoinAddress, std::string)& item, pwalletMain->mapAddressBook)
{
const CBitcoinAddress& address = item.first;
const std::string& strAccount = item.second;
std::map<CBitcoinAddress, tallyitem>::iterator it = mapTally.find(address);
if (it == mapTally.end() && !fIncludeEmpty)
continue;
int64_t nAmount = 0;
int nConf = std::numeric_limits<int>::max();
if (it != mapTally.end())
{
nAmount = (*it).second.nAmount;
nConf = (*it).second.nConf;
}
if (fByAccounts)
{
tallyitem& item = mapAccountTally[strAccount];
item.nAmount += nAmount;
item.nConf = std::min(item.nConf, nConf);
} else
{
Object obj;
obj.push_back(Pair("address", address.ToString()));
obj.push_back(Pair("account", strAccount));
obj.push_back(Pair("amount", ValueFromAmount(nAmount)));
obj.push_back(Pair("confirmations", (nConf == std::numeric_limits<int>::max() ? 0 : nConf)));
ret.push_back(obj);
};
};
if (fByAccounts)
{
for (std::map<std::string, tallyitem>::iterator it = mapAccountTally.begin(); it != mapAccountTally.end(); ++it)
{
int64_t nAmount = (*it).second.nAmount;
int nConf = (*it).second.nConf;
Object obj;
obj.push_back(Pair("account", (*it).first));
obj.push_back(Pair("amount", ValueFromAmount(nAmount)));
obj.push_back(Pair("confirmations", (nConf == std::numeric_limits<int>::max() ? 0 : nConf)));
ret.push_back(obj);
};
};
return ret;
}
Value listreceivedbyaddress(const Array& params, bool fHelp)
{
if (fHelp || params.size() > 2)
throw std::runtime_error(
"listreceivedbyaddress [minconf=1] [includeempty=false]\n"
"[minconf] is the minimum number of confirmations before payments are included.\n"
"[includeempty] whether to include addresses that haven't received any payments.\n"
"Returns an array of objects containing:\n"
" \"address\" : receiving address\n"
" \"account\" : the account of the receiving address\n"
" \"amount\" : total amount received by the address\n"
" \"confirmations\" : number of confirmations of the most recent transaction included");
return ListReceived(params, false);
}
Value listreceivedbyaccount(const Array& params, bool fHelp)
{
if (fHelp || params.size() > 2)
throw std::runtime_error(
"listreceivedbyaccount [minconf=1] [includeempty=false]\n"
"[minconf] is the minimum number of confirmations before payments are included.\n"
"[includeempty] whether to include accounts that haven't received any payments.\n"
"Returns an array of objects containing:\n"
" \"account\" : the account of the receiving addresses\n"
" \"amount\" : total amount received by addresses with this account\n"
" \"confirmations\" : number of confirmations of the most recent transaction included");
accountingDeprecationCheck();
return ListReceived(params, true);
}
static void MaybePushAddress(Object & entry, const CTxDestination &dest)
{
CBitcoinAddress addr;
if (addr.Set(dest))
entry.push_back(Pair("address", addr.ToString()));
}
void ListTransactions(const CWalletTx& wtx, const std::string& strAccount, int nMinDepth, bool fLong, Array& ret)
{
int64_t nFee;
std::string strSentAccount;
std::list<std::pair<CTxDestination, int64_t> > listReceived;
std::list<std::pair<CTxDestination, int64_t> > listSent;
wtx.GetAmounts(listReceived, listSent, nFee, strSentAccount);
bool fAllAccounts = (strAccount == std::string("*"));
// Sent
if ((!wtx.IsCoinStake()) && (!listSent.empty() || nFee != 0) && (fAllAccounts || strAccount == strSentAccount))
{
BOOST_FOREACH(const PAIRTYPE(CTxDestination, int64_t)& s, listSent)
{
Object entry;
entry.push_back(Pair("account", strSentAccount));
MaybePushAddress(entry, s.first);
entry.push_back(Pair("category", "send"));
entry.push_back(Pair("amount", ValueFromAmount(-s.second)));
entry.push_back(Pair("fee", ValueFromAmount(-nFee)));
if (fLong)
WalletTxToJSON(wtx, entry);
ret.push_back(entry);
};
};
// Received
if (listReceived.size() > 0 && wtx.GetDepthInMainChain() >= nMinDepth)
{
bool stop = false;
BOOST_FOREACH(const PAIRTYPE(CTxDestination, int64_t)& r, listReceived)
{
std::string account;
if (pwalletMain->mapAddressBook.count(r.first))
account = pwalletMain->mapAddressBook[r.first];
if (fAllAccounts || (account == strAccount))
{
Object entry;
entry.push_back(Pair("account", account));
MaybePushAddress(entry, r.first);
if (wtx.IsCoinBase() || wtx.IsCoinStake())
{
if (wtx.GetDepthInMainChain() < 1)
entry.push_back(Pair("category", "orphan"));
else
if (wtx.GetBlocksToMaturity() > 0)
entry.push_back(Pair("category", "immature"));
else
entry.push_back(Pair("category", "generate"));
} else
{
entry.push_back(Pair("category", "receive"));
};
if (!wtx.IsCoinStake())
{
entry.push_back(Pair("amount", ValueFromAmount(r.second)));
} else
{
entry.push_back(Pair("amount", ValueFromAmount(-nFee)));
stop = true; // only one coinstake output
};
if (fLong)
WalletTxToJSON(wtx, entry);
ret.push_back(entry);
};
if (stop)
break;
};
};
}
void AcentryToJSON(const CAccountingEntry& acentry, const std::string& strAccount, Array& ret)
{
bool fAllAccounts = (strAccount == std::string("*"));
if (fAllAccounts || acentry.strAccount == strAccount)
{
Object entry;
entry.push_back(Pair("account", acentry.strAccount));
entry.push_back(Pair("category", "move"));
entry.push_back(Pair("time", (int64_t)acentry.nTime));
entry.push_back(Pair("amount", ValueFromAmount(acentry.nCreditDebit)));
entry.push_back(Pair("otheraccount", acentry.strOtherAccount));
entry.push_back(Pair("comment", acentry.strComment));
ret.push_back(entry);
};
}
Value listtransactions(const Array& params, bool fHelp)
{
if (fHelp || params.size() > 4)
throw std::runtime_error(
"listtransactions [account] [count=10] [from=0] [show_coinstake=1]\n"
"Returns up to [count] most recent transactions skipping the first [from] transactions for account [account].");
// listtransactions "*" 20 0 0
std::string strAccount = "*";
if (params.size() > 0)
strAccount = params[0].get_str();
int nCount = 10;
if (params.size() > 1)
nCount = params[1].get_int();
int nFrom = 0;
if (params.size() > 2)
nFrom = params[2].get_int();
bool fShowCoinstake = true;
if (params.size() > 3)
{
std::string value = params[3].get_str();
if (IsStringBoolNegative(value))
fShowCoinstake = false;
};
if (nCount < 0)
throw JSONRPCError(RPC_INVALID_PARAMETER, "Negative count");
if (nFrom < 0)
throw JSONRPCError(RPC_INVALID_PARAMETER, "Negative from");
Array ret;
std::list<CAccountingEntry> acentries;
CWallet::TxItems txOrdered = pwalletMain->OrderedTxItems(acentries, strAccount, fShowCoinstake);
// iterate backwards until we have nCount items to return:
for (CWallet::TxItems::reverse_iterator it = txOrdered.rbegin(); it != txOrdered.rend(); ++it)
{
CWalletTx *const pwtx = (*it).second.first;
if (pwtx != 0)
ListTransactions(*pwtx, strAccount, 0, true, ret);
CAccountingEntry *const pacentry = (*it).second.second;
if (pacentry != 0)
AcentryToJSON(*pacentry, strAccount, ret);
if ((int)ret.size() >= (nCount+nFrom)) break;
}
// ret is newest to oldest
if (nFrom > (int)ret.size())
nFrom = ret.size();
if ((nFrom + nCount) > (int)ret.size())
nCount = ret.size() - nFrom;
Array::iterator first = ret.begin();
std::advance(first, nFrom);
Array::iterator last = ret.begin();
std::advance(last, nFrom+nCount);
if (last != ret.end()) ret.erase(last, ret.end());
if (first != ret.begin()) ret.erase(ret.begin(), first);
std::reverse(ret.begin(), ret.end()); // Return oldest to newest
return ret;
}
Value listaccounts(const Array& params, bool fHelp)
{
if (fHelp || params.size() > 1)
throw std::runtime_error(
"listaccounts [minconf=1]\n"
"Returns Object that has account names as keys, account balances as values.");
accountingDeprecationCheck();
int nMinDepth = 1;
if (params.size() > 0)
nMinDepth = params[0].get_int();
std::map<std::string, int64_t> mapAccountBalances;
BOOST_FOREACH(const PAIRTYPE(CTxDestination, std::string)& entry, pwalletMain->mapAddressBook)
{
if (IsDestMine(*pwalletMain, entry.first)) // This address belongs to me
mapAccountBalances[entry.second] = 0;
};
for (std::map<uint256, CWalletTx>::iterator it = pwalletMain->mapWallet.begin(); it != pwalletMain->mapWallet.end(); ++it)
{
const CWalletTx& wtx = (*it).second;
int64_t nFee;
std::string strSentAccount;
std::list<std::pair<CTxDestination, int64_t> > listReceived;
std::list<std::pair<CTxDestination, int64_t> > listSent;
int nDepth = wtx.GetDepthInMainChain();
if (nDepth < 0)
continue;
wtx.GetAmounts(listReceived, listSent, nFee, strSentAccount);
mapAccountBalances[strSentAccount] -= nFee;
BOOST_FOREACH(const PAIRTYPE(CTxDestination, int64_t)& s, listSent)
mapAccountBalances[strSentAccount] -= s.second;
if (nDepth >= nMinDepth && wtx.GetBlocksToMaturity() == 0)
{
BOOST_FOREACH(const PAIRTYPE(CTxDestination, int64_t)& r, listReceived)
if (pwalletMain->mapAddressBook.count(r.first))
mapAccountBalances[pwalletMain->mapAddressBook[r.first]] += r.second;
else
mapAccountBalances[""] += r.second;
};
};
std::list<CAccountingEntry> acentries;
CWalletDB(pwalletMain->strWalletFile).ListAccountCreditDebit("*", acentries);
BOOST_FOREACH(const CAccountingEntry& entry, acentries)
mapAccountBalances[entry.strAccount] += entry.nCreditDebit;
Object ret;
BOOST_FOREACH(const PAIRTYPE(std::string, int64_t)& accountBalance, mapAccountBalances)
{
ret.push_back(Pair(accountBalance.first, ValueFromAmount(accountBalance.second)));
};
return ret;
}
Value listsinceblock(const Array& params, bool fHelp)
{
if (fHelp)
throw std::runtime_error(
"listsinceblock [blockhash] [target-confirmations]\n"
"Get all transactions in blocks since block [blockhash], or all transactions if omitted");
CBlockIndex *pindex = NULL;
int target_confirms = 1;
if (params.size() > 0)
{
uint256 blockId = 0;
blockId.SetHex(params[0].get_str());
pindex = CBlockLocator(blockId).GetBlockIndex();
};
if (params.size() > 1)
{
target_confirms = params[1].get_int();
if (target_confirms < 1)
throw JSONRPCError(RPC_INVALID_PARAMETER, "Invalid parameter");
};
int depth = pindex ? (1 + nBestHeight - pindex->nHeight) : -1;
Array transactions;
for (std::map<uint256, CWalletTx>::iterator it = pwalletMain->mapWallet.begin(); it != pwalletMain->mapWallet.end(); it++)
{
CWalletTx tx = (*it).second;
if (depth == -1 || tx.GetDepthInMainChain() < depth)
ListTransactions(tx, "*", 0, true, transactions);
};
uint256 lastblock;
if (target_confirms == 1)
{
lastblock = hashBestChain;
} else
{
int target_height = pindexBest->nHeight + 1 - target_confirms;
CBlockIndex *block;
for (block = pindexBest;
block && block->nHeight > target_height;
block = block->pprev) { }
lastblock = block ? block->GetBlockHash() : 0;
};
Object ret;
ret.push_back(Pair("transactions", transactions));
ret.push_back(Pair("lastblock", lastblock.GetHex()));
return ret;
}
Value gettransaction(const Array& params, bool fHelp)
{
if (fHelp || params.size() != 1)
throw std::runtime_error(
"gettransaction <txid>\n"
"Get detailed information about <txid>");
uint256 hash;
hash.SetHex(params[0].get_str());
Object entry;
if (pwalletMain->mapWallet.count(hash))
{
const CWalletTx& wtx = pwalletMain->mapWallet[hash];
TxToJSON(wtx, 0, entry);
int64_t nCredit = wtx.GetCredit();
int64_t nDebit = wtx.GetDebit();
int64_t nNet = nCredit - nDebit;
int64_t nFee = (wtx.IsFromMe() ? wtx.GetValueOut() - nDebit : 0);
entry.push_back(Pair("amount", ValueFromAmount(nNet - nFee)));
if (wtx.IsFromMe())
entry.push_back(Pair("fee", ValueFromAmount(nFee)));
WalletTxToJSON(wtx, entry);
Array details;
ListTransactions(pwalletMain->mapWallet[hash], "*", 0, false, details);
entry.push_back(Pair("details", details));
} else
{
CTransaction tx;
uint256 hashBlock = 0;
if (GetTransaction(hash, tx, hashBlock))
{
TxToJSON(tx, 0, entry);
if (hashBlock == 0)
{
entry.push_back(Pair("confirmations", 0));
} else
{
entry.push_back(Pair("blockhash", hashBlock.GetHex()));
std::map<uint256, CBlockIndex*>::iterator mi = mapBlockIndex.find(hashBlock);
if (mi != mapBlockIndex.end() && (*mi).second)
{
CBlockIndex* pindex = (*mi).second;
if (pindex->IsInMainChain())
entry.push_back(Pair("confirmations", 1 + nBestHeight - pindex->nHeight));
else
entry.push_back(Pair("confirmations", 0));
};
};
} else
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, "No information available about transaction");
};
return entry;
}
Value backupwallet(const Array& params, bool fHelp)
{
if (fHelp || params.size() != 1)
throw std::runtime_error(
"backupwallet <destination>\n"
"Safely copies wallet.dat to destination, which can be a directory or a path with filename.");
std::string strDest = params[0].get_str();
if (!BackupWallet(*pwalletMain, strDest))
throw JSONRPCError(RPC_WALLET_ERROR, "Error: Wallet backup failed!");
return Value::null;
}
Value keypoolrefill(const Array& params, bool fHelp)
{
if (fHelp || params.size() > 1)
throw std::runtime_error(
"keypoolrefill [new-size]\n"
"Fills the keypool."
+ HelpRequiringPassphrase());
unsigned int nSize = std::max(GetArg("-keypool", 100), (int64_t)0);
if (params.size() > 0) {
if (params[0].get_int() < 0)
throw JSONRPCError(RPC_INVALID_PARAMETER, "Invalid parameter, expected valid size");
nSize = (unsigned int) params[0].get_int();
}
EnsureWalletIsUnlocked();
pwalletMain->TopUpKeyPool(nSize);
if (pwalletMain->GetKeyPoolSize() < nSize)
throw JSONRPCError(RPC_WALLET_ERROR, "Error refreshing keypool.");
return Value::null;
}
static void LockWallet(CWallet* pWallet)
{
LOCK2(pWallet->cs_wallet, cs_nWalletUnlockTime);
nWalletUnlockTime = 0;
pWallet->Lock();
}
Value walletpassphrase(const Array& params, bool fHelp)
{
if (pwalletMain->IsCrypted() && (fHelp || params.size() < 2 || params.size() > 3))
throw std::runtime_error(
"walletpassphrase <passphrase> <timeout> [stakingonly]\n"
"Stores the wallet decryption key in memory for <timeout> seconds.\n"
"if [stakingonly] is true sending functions are disabled.");
if (fHelp)
return true;
if (!pwalletMain->IsCrypted())
throw JSONRPCError(RPC_WALLET_WRONG_ENC_STATE, "Error: running with an unencrypted wallet, but walletpassphrase was called.");
if (!pwalletMain->IsLocked())
throw JSONRPCError(RPC_WALLET_ALREADY_UNLOCKED, "Error: Wallet is already unlocked, use walletlock first if need to change unlock settings.");
// Note that the walletpassphrase is stored in params[0] which is not mlock()ed
SecureString strWalletPass;
strWalletPass.reserve(100);
// TODO: get rid of this .c_str() by implementing SecureString::operator=(std::string)
// Alternately, find a way to make params[0] mlock()'d to begin with.
strWalletPass = params[0].get_str().c_str();
if (strWalletPass.length() > 0)
{
if (!pwalletMain->Unlock(strWalletPass))
throw JSONRPCError(RPC_WALLET_PASSPHRASE_INCORRECT, "Error: The wallet passphrase entered was incorrect.");
} else
{
throw std::runtime_error(
"walletpassphrase <passphrase> <timeout>\n"
"Stores the wallet decryption key in memory for <timeout> seconds.");
};
pwalletMain->TopUpKeyPool();
int64_t nSleepTime = params[1].get_int64();
LOCK(cs_nWalletUnlockTime);
nWalletUnlockTime = GetTime() + nSleepTime;
RPCRunLater("lockwallet", boost::bind(LockWallet, pwalletMain), nSleepTime);
// ppcoin: if user OS account compromised prevent trivial sendmoney commands
if (params.size() > 2)
fWalletUnlockStakingOnly = params[2].get_bool();
else
fWalletUnlockStakingOnly = false;
return Value::null;
}
Value walletpassphrasechange(const Array& params, bool fHelp)
{
if (pwalletMain->IsCrypted() && (fHelp || params.size() != 2))
throw std::runtime_error(
"walletpassphrasechange <oldpassphrase> <newpassphrase>\n"
"Changes the wallet passphrase from <oldpassphrase> to <newpassphrase>.");
if (fHelp)
return true;
if (!pwalletMain->IsCrypted())
throw JSONRPCError(RPC_WALLET_WRONG_ENC_STATE, "Error: running with an unencrypted wallet, but walletpassphrasechange was called.");
// TODO: get rid of these .c_str() calls by implementing SecureString::operator=(std::string)
// Alternately, find a way to make params[0] mlock()'d to begin with.
SecureString strOldWalletPass;
strOldWalletPass.reserve(100);
strOldWalletPass = params[0].get_str().c_str();
SecureString strNewWalletPass;
strNewWalletPass.reserve(100);
strNewWalletPass = params[1].get_str().c_str();
if (strOldWalletPass.length() < 1 || strNewWalletPass.length() < 1)
throw std::runtime_error(
"walletpassphrasechange <oldpassphrase> <newpassphrase>\n"
"Changes the wallet passphrase from <oldpassphrase> to <newpassphrase>.");
if (!pwalletMain->ChangeWalletPassphrase(strOldWalletPass, strNewWalletPass))
throw JSONRPCError(RPC_WALLET_PASSPHRASE_INCORRECT, "Error: The wallet passphrase entered was incorrect.");
return Value::null;
}
Value walletlock(const Array& params, bool fHelp)
{
if (pwalletMain->IsCrypted() && (fHelp || params.size() != 0))
throw std::runtime_error(
"walletlock\n"
"Removes the wallet encryption key from memory, locking the wallet.\n"
"After calling this method, you will need to call walletpassphrase again\n"
"before being able to call any methods which require the wallet to be unlocked.");
if (fHelp)
return true;
if (!pwalletMain->IsCrypted())
throw JSONRPCError(RPC_WALLET_WRONG_ENC_STATE, "Error: running with an unencrypted wallet, but walletlock was called.");
{
LOCK(cs_nWalletUnlockTime);
pwalletMain->Lock();
nWalletUnlockTime = 0;
}
return Value::null;
}
Value encryptwallet(const Array& params, bool fHelp)
{
if (!pwalletMain->IsCrypted() && (fHelp || params.size() != 1))
throw std::runtime_error(
"encryptwallet <passphrase>\n"
"Encrypts the wallet with <passphrase>.");
if (fHelp)
return true;
if (pwalletMain->IsCrypted())
throw JSONRPCError(RPC_WALLET_WRONG_ENC_STATE, "Error: running with an encrypted wallet, but encryptwallet was called.");
// TODO: get rid of this .c_str() by implementing SecureString::operator=(std::string)
// Alternately, find a way to make params[0] mlock()'d to begin with.
SecureString strWalletPass;
strWalletPass.reserve(100);
strWalletPass = params[0].get_str().c_str();
if (strWalletPass.length() < 1)
throw std::runtime_error(
"encryptwallet <passphrase>\n"
"Encrypts the wallet with <passphrase>.");
if (!pwalletMain->EncryptWallet(strWalletPass))
throw JSONRPCError(RPC_WALLET_ENCRYPTION_FAILED, "Error: Failed to encrypt the wallet.");
// BDB seems to have a bad habit of writing old data into
// slack space in .dat files; that is bad if the old data is
// unencrypted private keys. So:
StartShutdown();
return "wallet encrypted; EclipseCrypto server stopping, restart to run with encrypted wallet. The keypool has been flushed, you need to make a new backup.";
}
class DescribeAddressVisitor : public boost::static_visitor<Object>
{
public:
Object operator()(const CNoDestination &dest) const { return Object(); }
Object operator()(const CKeyID &keyID) const {
Object obj;
CPubKey vchPubKey;
pwalletMain->GetPubKey(keyID, vchPubKey);
obj.push_back(Pair("isscript", false));
obj.push_back(Pair("pubkey", HexStr(vchPubKey)));
obj.push_back(Pair("iscompressed", vchPubKey.IsCompressed()));
return obj;
}
Object operator()(const CScriptID &scriptID) const {
Object obj;
obj.push_back(Pair("isscript", true));
CScript subscript;
pwalletMain->GetCScript(scriptID, subscript);
std::vector<CTxDestination> addresses;
txnouttype whichType;
int nRequired;
ExtractDestinations(subscript, whichType, addresses, nRequired);
obj.push_back(Pair("script", GetTxnOutputType(whichType)));
obj.push_back(Pair("hex", HexStr(subscript.begin(), subscript.end())));
Array a;
BOOST_FOREACH(const CTxDestination& addr, addresses)
a.push_back(CBitcoinAddress(addr).ToString());
obj.push_back(Pair("addresses", a));
if (whichType == TX_MULTISIG)
obj.push_back(Pair("sigsrequired", nRequired));
return obj;
}
Object operator()(const CStealthAddress &sxAddr) const {
Object obj;
obj.push_back(Pair("todo - stealth address", true));
return obj;
}
Object operator()(const CExtKeyPair &ek) const {
Object obj;
obj.push_back(Pair("todo - bip32 address", true));
return obj;
}
};
Value validateaddress(const Array& params, bool fHelp)
{
if (fHelp || params.size() != 1)
throw std::runtime_error(
"validateaddress <eclipsecryptoaddress>\n"
"Return information about <eclipsecryptoaddress>.");
CBitcoinAddress address(params[0].get_str());
bool isValid = address.IsValid();
Object ret;
ret.push_back(Pair("isvalid", isValid));
if (isValid)
{
CTxDestination dest = address.Get();
std::string currentAddress = address.ToString();
ret.push_back(Pair("address", currentAddress));
bool fMine = IsDestMine(*pwalletMain, dest);
ret.push_back(Pair("ismine", fMine));
if (fMine)
{
Object detail = boost::apply_visitor(DescribeAddressVisitor(), dest);
ret.insert(ret.end(), detail.begin(), detail.end());
};
if (pwalletMain->mapAddressBook.count(dest))
ret.push_back(Pair("account", pwalletMain->mapAddressBook[dest]));
}
return ret;
}
Value validatepubkey(const Array& params, bool fHelp)
{
if (fHelp || !params.size() || params.size() > 2)
throw std::runtime_error(
"validatepubkey <eclipsecryptopubkey>\n"
"Return information about <eclipsecryptopubkey>.");
std::vector<unsigned char> vchPubKey = ParseHex(params[0].get_str());
CPubKey pubKey(vchPubKey);
bool isValid = pubKey.IsValid();
bool isCompressed = pubKey.IsCompressed();
CKeyID keyID = pubKey.GetID();
CBitcoinAddress address;
address.Set(keyID);
Object ret;
ret.push_back(Pair("isvalid", isValid));
if (isValid)
{
CTxDestination dest = address.Get();
std::string currentAddress = address.ToString();
ret.push_back(Pair("address", currentAddress));
bool fMine = IsDestMine(*pwalletMain, dest);
ret.push_back(Pair("ismine", fMine));
ret.push_back(Pair("iscompressed", isCompressed));
if (fMine)
{
Object detail = boost::apply_visitor(DescribeAddressVisitor(), dest);
ret.insert(ret.end(), detail.begin(), detail.end());
};
if (pwalletMain->mapAddressBook.count(dest))
ret.push_back(Pair("account", pwalletMain->mapAddressBook[dest]));
};
return ret;
}
// ppcoin: reserve balance from being staked for network protection
Value reservebalance(const Array& params, bool fHelp)
{
if (fHelp || params.size() > 2)
throw std::runtime_error(
"reservebalance [<reserve> [amount]]\n"
"<reserve> is true or false to turn balance reserve on or off.\n"
"<amount> is a real and rounded to cent.\n"
"Set reserve amount not participating in network protection.\n"
"If no parameters provided current setting is printed.\n");
if (params.size() > 0)
{
bool fReserve = params[0].get_bool();
if (fReserve)
{
if (params.size() == 1)
throw std::runtime_error("must provide amount to reserve balance.\n");
int64_t nAmount = AmountFromValue(params[1]);
nAmount = (nAmount / CENT) * CENT; // round to cent
if (nAmount < 0)
throw std::runtime_error("amount cannot be negative.\n");
nReserveBalance = nAmount;
} else
{
if (params.size() > 1)
throw std::runtime_error("cannot specify amount to turn off reserve.\n");
nReserveBalance = 0;
}
}
Object result;
result.push_back(Pair("reserve", (nReserveBalance > 0)));
result.push_back(Pair("amount", ValueFromAmount(nReserveBalance)));
return result;
}
// ppcoin: check wallet integrity
Value checkwallet(const Array& params, bool fHelp)
{
if (fHelp || params.size() > 0)
throw std::runtime_error(
"checkwallet\n"
"Check wallet for integrity.\n");
int nMismatchSpent;
int64_t nBalanceInQuestion;
pwalletMain->FixSpentCoins(nMismatchSpent, nBalanceInQuestion, true);
Object result;
if (nMismatchSpent == 0)
{
result.push_back(Pair("wallet check passed", true));
} else
{
result.push_back(Pair("mismatched spent coins", nMismatchSpent));
result.push_back(Pair("amount in question", ValueFromAmount(nBalanceInQuestion)));
};
return result;
}
// ppcoin: repair wallet
Value repairwallet(const Array& params, bool fHelp)
{
if (fHelp || params.size() > 0)
throw std::runtime_error(
"repairwallet\n"
"Repair wallet if checkwallet reports any problem.\n");
int nMismatchSpent;
int64_t nBalanceInQuestion;
pwalletMain->FixSpentCoins(nMismatchSpent, nBalanceInQuestion);
Object result;
if (nMismatchSpent == 0)
{
result.push_back(Pair("wallet check passed", true));
} else
{
result.push_back(Pair("mismatched spent coins", nMismatchSpent));
result.push_back(Pair("amount affected by repair", ValueFromAmount(nBalanceInQuestion)));
}
return result;
}
// NovaCoin: resend unconfirmed wallet transactions
Value resendtx(const Array& params, bool fHelp)
{
if (fHelp || params.size() > 1)
throw std::runtime_error(
"resendtx\n"
"Re-send unconfirmed transactions.\n"
);
ResendWalletTransactions(true);
return Value::null;
}
// ppcoin: make a public-private key pair
Value makekeypair(const Array& params, bool fHelp)
{
if (fHelp || params.size() > 1)
throw std::runtime_error(
"makekeypair [prefix]\n"
"Make a public/private key pair.\n"
"[prefix] is optional preferred prefix for the public key.\n");
std::string strPrefix = "";
if (params.size() > 0)
strPrefix = params[0].get_str();
CKey key;
key.MakeNewKey(false);
CPrivKey vchPrivKey = key.GetPrivKey();
Object result;
result.push_back(Pair("PrivateKey", HexStr<CPrivKey::iterator>(vchPrivKey.begin(), vchPrivKey.end())));
result.push_back(Pair("PublicKey", HexStr(key.GetPubKey())));
return result;
}
Value getnewstealthaddress(const Array& params, bool fHelp)
{
if (fHelp || params.size() > 1)
throw std::runtime_error(
"getnewstealthaddress [label]\n"
"Returns a new EclipseCrypto stealth address for receiving payments anonymously."
+ HelpRequiringPassphrase());
if (pwalletMain->IsLocked())<|fim▁hole|> std::string sLabel;
if (params.size() > 0)
sLabel = params[0].get_str();
CEKAStealthKey akStealth;
std::string sError;
if (0 != pwalletMain->NewStealthKeyFromAccount(sLabel, akStealth))
throw std::runtime_error("NewStealthKeyFromAccount failed.");
return akStealth.ToStealthAddress();
}
Value liststealthaddresses(const Array& params, bool fHelp)
{
if (fHelp || params.size() > 1)
throw std::runtime_error(
"liststealthaddresses [show_secrets=0]\n"
"List owned stealth addresses.");
bool fShowSecrets = false;
if (params.size() > 0)
{
std::string str = params[0].get_str();
if (IsStringBoolNegative(str))
fShowSecrets = false;
else
fShowSecrets = true;
};
if (fShowSecrets)
EnsureWalletIsUnlocked();
Object result;
ExtKeyAccountMap::const_iterator mi;
for (mi = pwalletMain->mapExtAccounts.begin(); mi != pwalletMain->mapExtAccounts.end(); ++mi)
{
CExtKeyAccount *ea = mi->second;
if (ea->mapStealthKeys.size() < 1)
continue;
result.push_back(Pair("Account", ea->sLabel));
AccStealthKeyMap::iterator it;
for (it = ea->mapStealthKeys.begin(); it != ea->mapStealthKeys.end(); ++it)
{
const CEKAStealthKey &aks = it->second;
if (fShowSecrets)
{
Object objA;
objA.push_back(Pair("Label ", aks.sLabel));
objA.push_back(Pair("Address ", aks.ToStealthAddress()));
objA.push_back(Pair("Scan Secret ", HexStr(aks.skScan.begin(), aks.skScan.end())));
std::string sSpend;
CStoredExtKey *sekAccount = ea->ChainAccount();
if (sekAccount && !sekAccount->fLocked)
{
CKey skSpend;
if (ea->GetKey(aks.akSpend, skSpend))
sSpend = HexStr(skSpend.begin(), skSpend.end());
else
sSpend = "Extract failed.";
} else
{
sSpend = "Account Locked.";
};
objA.push_back(Pair("Spend Secret ", sSpend));
result.push_back(Pair("Stealth Address", objA));
} else
{
result.push_back(Pair("Stealth Address", aks.ToStealthAddress() + " - " + aks.sLabel));
};
};
};
if (pwalletMain->stealthAddresses.size() > 0)
result.push_back(Pair("Account", "Legacy"));
std::set<CStealthAddress>::iterator it;
for (it = pwalletMain->stealthAddresses.begin(); it != pwalletMain->stealthAddresses.end(); ++it)
{
if (it->scan_secret.size() < 1)
continue; // stealth address is not owned
if (fShowSecrets)
{
Object objA;
objA.push_back(Pair("Label ", it->label));
objA.push_back(Pair("Address ", it->Encoded()));
objA.push_back(Pair("Scan Secret ", HexStr(it->scan_secret.begin(), it->scan_secret.end())));
objA.push_back(Pair("Spend Secret ", HexStr(it->spend_secret.begin(), it->spend_secret.end())));
result.push_back(Pair("Stealth Address", objA));
} else
{
result.push_back(Pair("Stealth Address", it->Encoded() + " - " + it->label));
};
};
return result;
}
Value importstealthaddress(const Array& params, bool fHelp)
{
if (fHelp || params.size() < 2)
throw std::runtime_error(
"importstealthaddress <scan_secret> <spend_secret> [label]\n"
"Import an owned stealth addresses."
+ HelpRequiringPassphrase());
if (pwalletMain->IsLocked())
throw std::runtime_error("Failed: Wallet must be unlocked.");
std::string sScanSecret = params[0].get_str();
std::string sSpendSecret = params[1].get_str();
std::string sLabel;
if (params.size() > 2)
{
sLabel = params[2].get_str();
};
std::vector<uint8_t> vchScanSecret;
std::vector<uint8_t> vchSpendSecret;
if (IsHex(sScanSecret))
{
vchScanSecret = ParseHex(sScanSecret);
} else
{
if (!DecodeBase58(sScanSecret, vchScanSecret))
throw std::runtime_error("Could not decode scan secret as hex or base58.");
};
if (IsHex(sSpendSecret))
{
vchSpendSecret = ParseHex(sSpendSecret);
} else
{
if (!DecodeBase58(sSpendSecret, vchSpendSecret))
throw std::runtime_error("Could not decode spend secret as hex or base58.");
};
if (vchScanSecret.size() != 32)
throw std::runtime_error("Scan secret is not 32 bytes.");
if (vchSpendSecret.size() != 32)
throw std::runtime_error("Spend secret is not 32 bytes.");
ec_secret scan_secret;
ec_secret spend_secret;
memcpy(&scan_secret.e[0], &vchScanSecret[0], 32);
memcpy(&spend_secret.e[0], &vchSpendSecret[0], 32);
ec_point scan_pubkey, spend_pubkey;
if (SecretToPublicKey(scan_secret, scan_pubkey) != 0)
throw std::runtime_error("Could not get scan public key.");
if (SecretToPublicKey(spend_secret, spend_pubkey) != 0)
throw std::runtime_error("Could not get spend public key.");
CStealthAddress sxAddr;
sxAddr.label = sLabel;
sxAddr.scan_pubkey = scan_pubkey;
sxAddr.spend_pubkey = spend_pubkey;
sxAddr.scan_secret = vchScanSecret;
sxAddr.spend_secret = vchSpendSecret;
Object result;
bool fFound = false;
// -- find if address already exists
std::set<CStealthAddress>::iterator it;
for (it = pwalletMain->stealthAddresses.begin(); it != pwalletMain->stealthAddresses.end(); ++it)
{
CStealthAddress &sxAddrIt = const_cast<CStealthAddress&>(*it);
if (sxAddrIt.scan_pubkey == sxAddr.scan_pubkey
&& sxAddrIt.spend_pubkey == sxAddr.spend_pubkey)
{
if (sxAddrIt.scan_secret.size() < 1)
{
sxAddrIt.scan_secret = sxAddr.scan_secret;
sxAddrIt.spend_secret = sxAddr.spend_secret;
fFound = true; // update stealth address with secrets
break;
};
result.push_back(Pair("result", "Import failed - stealth address exists."));
return result;
};
};
if (fFound)
{
result.push_back(Pair("result", "Success, updated " + sxAddr.Encoded()));
} else
{
pwalletMain->stealthAddresses.insert(sxAddr);
result.push_back(Pair("result", "Success, imported " + sxAddr.Encoded()));
};
if (!pwalletMain->AddStealthAddress(sxAddr))
throw std::runtime_error("Could not save to wallet.");
return result;
}
Value sendtostealthaddress(const Array& params, bool fHelp)
{
if (fHelp || params.size() < 2 || params.size() > 5)
throw std::runtime_error(
"sendtostealthaddress <stealth_address> <amount> [comment] [comment-to] [narration]\n"
"sendtostealthaddress <stealth_address> <amount> [narration]\n"
"<amount> is a real and is rounded to the nearest 0.000001"
+ HelpRequiringPassphrase());
if (pwalletMain->IsLocked())
throw JSONRPCError(RPC_WALLET_UNLOCK_NEEDED, "Error: Please enter the wallet passphrase with walletpassphrase first.");
std::string sEncoded = params[0].get_str();
int64_t nAmount = AmountFromValue(params[1]);
std::string sNarr;
if (params.size() == 3 || params.size() == 5)
{
int nNarr = params.size() - 1;
if(params[nNarr].type() != null_type && !params[nNarr].get_str().empty())
sNarr = params[nNarr].get_str();
}
if (sNarr.length() > 24)
throw std::runtime_error("Narration must be 24 characters or less.");
CStealthAddress sxAddr;
if (!sxAddr.SetEncoded(sEncoded))
throw std::runtime_error("Invalid EclipseCrypto stealth address.");
CWalletTx wtx;
if (params.size() > 3 && params[3].type() != null_type && !params[3].get_str().empty())
wtx.mapValue["comment"] = params[3].get_str();
if (params.size() > 4 && params[4].type() != null_type && !params[4].get_str().empty())
wtx.mapValue["to"] = params[4].get_str();
std::string sError;
if (!pwalletMain->SendStealthMoneyToDestination(sxAddr, nAmount, sNarr, wtx, sError))
throw JSONRPCError(RPC_WALLET_ERROR, sError);
return wtx.GetHash().GetHex();
}
Value clearwallettransactions(const Array& params, bool fHelp)
{
if (fHelp || params.size() > 0)
throw std::runtime_error(
"clearwallettransactions \n"
"delete all transactions from wallet - reload with reloadanondata\n"
"Warning: Backup your wallet first!");
Object result;
uint32_t nTransactions = 0;
char cbuf[256];
{
LOCK2(cs_main, pwalletMain->cs_wallet);
CWalletDB walletdb(pwalletMain->strWalletFile);
walletdb.TxnBegin();
Dbc* pcursor = walletdb.GetTxnCursor();
if (!pcursor)
throw std::runtime_error("Cannot get wallet DB cursor");
Dbt datKey;
Dbt datValue;
datKey.set_flags(DB_DBT_USERMEM);
datValue.set_flags(DB_DBT_USERMEM);
std::vector<unsigned char> vchKey;
std::vector<unsigned char> vchType;
std::vector<unsigned char> vchKeyData;
std::vector<unsigned char> vchValueData;
vchKeyData.resize(100);
vchValueData.resize(100);
datKey.set_ulen(vchKeyData.size());
datKey.set_data(&vchKeyData[0]);
datValue.set_ulen(vchValueData.size());
datValue.set_data(&vchValueData[0]);
unsigned int fFlags = DB_NEXT; // same as using DB_FIRST for new cursor
while (true)
{
int ret = pcursor->get(&datKey, &datValue, fFlags);
if (ret == ENOMEM
|| ret == DB_BUFFER_SMALL)
{
if (datKey.get_size() > datKey.get_ulen())
{
vchKeyData.resize(datKey.get_size());
datKey.set_ulen(vchKeyData.size());
datKey.set_data(&vchKeyData[0]);
};
if (datValue.get_size() > datValue.get_ulen())
{
vchValueData.resize(datValue.get_size());
datValue.set_ulen(vchValueData.size());
datValue.set_data(&vchValueData[0]);
};
// -- try once more, when DB_BUFFER_SMALL cursor is not expected to move
ret = pcursor->get(&datKey, &datValue, fFlags);
};
if (ret == DB_NOTFOUND)
break;
else
if (datKey.get_data() == NULL || datValue.get_data() == NULL
|| ret != 0)
{
snprintf(cbuf, sizeof(cbuf), "wallet DB error %d, %s", ret, db_strerror(ret));
throw std::runtime_error(cbuf);
};
CDataStream ssValue(SER_DISK, CLIENT_VERSION);
ssValue.SetType(SER_DISK);
ssValue.clear();
ssValue.write((char*)datKey.get_data(), datKey.get_size());
ssValue >> vchType;
std::string strType(vchType.begin(), vchType.end());
//LogPrintf("strType %s\n", strType.c_str());
if (strType == "tx")
{
uint256 hash;
ssValue >> hash;
if ((ret = pcursor->del(0)) != 0)
{
LogPrintf("Delete transaction failed %d, %s\n", ret, db_strerror(ret));
continue;
};
pwalletMain->mapWallet.erase(hash);
pwalletMain->NotifyTransactionChanged(pwalletMain, hash, CT_DELETED);
nTransactions++;
};
};
pcursor->close();
walletdb.TxnCommit();
//pwalletMain->mapWallet.clear();
if (nNodeMode == NT_THIN)
{
// reset LastFilteredHeight
walletdb.WriteLastFilteredHeight(0);
}
}
snprintf(cbuf, sizeof(cbuf), "Removed %u transactions.", nTransactions);
result.push_back(Pair("complete", std::string(cbuf)));
result.push_back(Pair("", "Reload with reloadanondata, reindex or re-download blockchain."));
return result;
}
Value scanforalltxns(const Array& params, bool fHelp)
{
if (fHelp || params.size() > 1)
throw std::runtime_error(
"scanforalltxns [fromHeight]\n"
"Scan blockchain for owned transactions.");
if (nNodeMode != NT_FULL)
throw std::runtime_error("Can't run in thin mode.");
Object result;
int32_t nFromHeight = 0;
CBlockIndex *pindex = pindexGenesisBlock;
if (params.size() > 0)
nFromHeight = params[0].get_int();
if (nFromHeight > 0)
{
pindex = mapBlockIndex[hashBestChain];
while (pindex->nHeight > nFromHeight
&& pindex->pprev)
pindex = pindex->pprev;
};
if (pindex == NULL)
throw std::runtime_error("Genesis Block is not set.");
{
LOCK2(cs_main, pwalletMain->cs_wallet);
pwalletMain->MarkDirty();
pwalletMain->ScanForWalletTransactions(pindex, true);
pwalletMain->ReacceptWalletTransactions();
} // cs_main, pwalletMain->cs_wallet
result.push_back(Pair("result", "Scan complete."));
return result;
}
Value scanforstealthtxns(const Array& params, bool fHelp)
{
if (fHelp || params.size() > 1)
throw std::runtime_error(
"scanforstealthtxns [fromHeight]\n"
"Scan blockchain for owned stealth transactions.");
Object result;
uint32_t nBlocks = 0;
uint32_t nTransactions = 0;
int32_t nFromHeight = 0;
CBlockIndex *pindex = pindexGenesisBlock;
if (params.size() > 0)
nFromHeight = params[0].get_int();
if (nFromHeight > 0)
{
pindex = mapBlockIndex[hashBestChain];
while (pindex->nHeight > nFromHeight
&& pindex->pprev)
pindex = pindex->pprev;
};
if (pindex == NULL)
throw std::runtime_error("Genesis Block is not set.");
// -- locks in AddToWalletIfInvolvingMe
bool fUpdate = true; // todo: option?
pwalletMain->nStealth = 0;
pwalletMain->nFoundStealth = 0;
while (pindex)
{
nBlocks++;
CBlock block;
block.ReadFromDisk(pindex, true);
BOOST_FOREACH(CTransaction& tx, block.vtx)
{
if (!tx.IsStandard())
continue; // leave out coinbase and others
nTransactions++;
uint256 hash = tx.GetHash();
pwalletMain->AddToWalletIfInvolvingMe(tx, hash, &block, fUpdate);
};
pindex = pindex->pnext;
};
LogPrintf("Scanned %u blocks, %u transactions\n", nBlocks, nTransactions);
LogPrintf("Found %u stealth transactions in blockchain.\n", pwalletMain->nStealth);
LogPrintf("Found %u new owned stealth transactions.\n", pwalletMain->nFoundStealth);
char cbuf[256];
snprintf(cbuf, sizeof(cbuf), "%u new stealth transactions.", pwalletMain->nFoundStealth);
result.push_back(Pair("result", "Scan complete."));
result.push_back(Pair("found", std::string(cbuf)));
return result;
}
Value sendsdctoanon(const Array& params, bool fHelp)
{
if (fHelp || params.size() < 2 || params.size() > 5)
throw std::runtime_error(
"sendsdctoanon <stealth_address> <amount> [narration] [comment] [comment-to]\n"
"<amount> is a real number and is rounded to the nearest 0.000001"
+ HelpRequiringPassphrase());
if (pwalletMain->IsLocked())
throw JSONRPCError(RPC_WALLET_UNLOCK_NEEDED, "Error: Please enter the wallet passphrase with walletpassphrase first.");
std::string sEncoded = params[0].get_str();
int64_t nAmount = AmountFromValue(params[1]);
std::string sNarr;
if (params.size() > 2 && params[2].type() != null_type && !params[2].get_str().empty())
sNarr = params[2].get_str();
if (sNarr.length() > 24)
throw std::runtime_error("Narration must be 24 characters or less.");
CStealthAddress sxAddr;
if (!sxAddr.SetEncoded(sEncoded))
throw std::runtime_error("Invalid EclipseCrypto stealth address.");
CWalletTx wtx;
if (params.size() > 3 && params[3].type() != null_type && !params[3].get_str().empty())
wtx.mapValue["comment"] = params[3].get_str();
if (params.size() > 4 && params[4].type() != null_type && !params[4].get_str().empty())
wtx.mapValue["to"] = params[4].get_str();
std::string sError;
if (!pwalletMain->SendSdcToAnon(sxAddr, nAmount, sNarr, wtx, sError))
{
LogPrintf("SendSdcToAnon failed %s\n", sError.c_str());
throw JSONRPCError(RPC_WALLET_ERROR, sError);
};
return wtx.GetHash().GetHex();
}
Value sendanontoanon(const Array& params, bool fHelp)
{
if (fHelp || params.size() < 3 || params.size() > 6)
throw std::runtime_error(
"sendanontoanon <stealth_address> <amount> <ring_size> [narration] [comment] [comment-to]\n"
"<amount> is a real number and is rounded to the nearest 0.000001\n"
"<ring_size> is a number of outputs of the same amount to include in the signature"
+ HelpRequiringPassphrase());
if (pwalletMain->IsLocked())
throw JSONRPCError(RPC_WALLET_UNLOCK_NEEDED, "Error: Please enter the wallet passphrase with walletpassphrase first.");
std::string sEncoded = params[0].get_str();
int64_t nAmount = AmountFromValue(params[1]);
uint32_t nRingSize = (uint32_t)params[2].get_int();
std::ostringstream ssThrow;
if (nRingSize < MIN_RING_SIZE || nRingSize > MAX_RING_SIZE)
ssThrow << "Ring size must be >= " << MIN_RING_SIZE << " and <= " << MAX_RING_SIZE << ".", throw std::runtime_error(ssThrow.str());
std::string sNarr;
if (params.size() > 3 && params[3].type() != null_type && !params[3].get_str().empty())
sNarr = params[3].get_str();
if (sNarr.length() > 24)
throw std::runtime_error("Narration must be 24 characters or less.");
CStealthAddress sxAddr;
if (!sxAddr.SetEncoded(sEncoded))
throw std::runtime_error("Invalid EclipseCrypto stealth address.");
CWalletTx wtx;
if (params.size() > 4 && params[4].type() != null_type && !params[4].get_str().empty())
wtx.mapValue["comment"] = params[4].get_str();
if (params.size() > 5 && params[5].type() != null_type && !params[5].get_str().empty())
wtx.mapValue["to"] = params[5].get_str();
std::string sError;
if (!pwalletMain->SendAnonToAnon(sxAddr, nAmount, nRingSize, sNarr, wtx, sError))
{
LogPrintf("SendAnonToAnon failed %s\n", sError.c_str());
throw JSONRPCError(RPC_WALLET_ERROR, sError);
};
return wtx.GetHash().GetHex();
}
Value sendanontosdc(const Array& params, bool fHelp)
{
if (fHelp || params.size() < 3 || params.size() > 6)
throw std::runtime_error(
"sendanontosdc <stealth_address> <amount> <ring_size> [narration] [comment] [comment-to]\n"
"<amount> is a real number and is rounded to the nearest 0.000001\n"
"<ring_size> is a number of outputs of the same amount to include in the signature"
+ HelpRequiringPassphrase());
if (pwalletMain->IsLocked())
throw JSONRPCError(RPC_WALLET_UNLOCK_NEEDED, "Error: Please enter the wallet passphrase with walletpassphrase first.");
std::string sEncoded = params[0].get_str();
int64_t nAmount = AmountFromValue(params[1]);
uint32_t nRingSize = (uint32_t)params[2].get_int();
std::ostringstream ssThrow;
if (nRingSize < 1 || nRingSize > MAX_RING_SIZE)
ssThrow << "Ring size must be >= 1 and <= " << MAX_RING_SIZE << ".", throw std::runtime_error(ssThrow.str());
std::string sNarr;
if (params.size() > 3 && params[3].type() != null_type && !params[3].get_str().empty())
sNarr = params[3].get_str();
if (sNarr.length() > 24)
throw std::runtime_error("Narration must be 24 characters or less.");
CStealthAddress sxAddr;
if (!sxAddr.SetEncoded(sEncoded))
throw std::runtime_error("Invalid EclipseCrypto stealth address.");
CWalletTx wtx;
if (params.size() > 4 && params[4].type() != null_type && !params[4].get_str().empty())
wtx.mapValue["comment"] = params[4].get_str();
if (params.size() > 5 && params[5].type() != null_type && !params[5].get_str().empty())
wtx.mapValue["to"] = params[5].get_str();
std::string sError;
if (!pwalletMain->SendAnonToSdc(sxAddr, nAmount, nRingSize, sNarr, wtx, sError))
{
LogPrintf("SendAnonToSdc failed %s\n", sError.c_str());
throw JSONRPCError(RPC_WALLET_ERROR, sError);
};
return wtx.GetHash().GetHex();
}
Value estimateanonfee(const Array& params, bool fHelp)
{
if (fHelp || params.size() < 2 || params.size() > 3)
throw std::runtime_error(
"estimateanonfee <amount> <ring_size> [narration]\n"
"<amount>is a real number and is rounded to the nearest 0.000001\n"
"<ring_size> is a number of outputs of the same amount to include in the signature");
int64_t nAmount = AmountFromValue(params[0]);
uint32_t nRingSize = (uint32_t)params[1].get_int();
std::ostringstream ssThrow;
if (nRingSize < MIN_RING_SIZE || nRingSize > MAX_RING_SIZE)
ssThrow << "Ring size must be >= " << MIN_RING_SIZE << " and <= " << MAX_RING_SIZE << ".", throw std::runtime_error(ssThrow.str());
std::string sNarr;
if (params.size() > 2 && params[2].type() != null_type && !params[2].get_str().empty())
sNarr = params[2].get_str();
if (sNarr.length() > 24)
throw std::runtime_error("Narration must be 24 characters or less.");
CWalletTx wtx;
int64_t nFee = 0;
std::string sError;
if (!pwalletMain->EstimateAnonFee(nAmount, nRingSize, sNarr, wtx, nFee, sError))
{
LogPrintf("EstimateAnonFee failed %s\n", sError.c_str());
throw JSONRPCError(RPC_WALLET_ERROR, sError);
};
uint32_t nBytes = ::GetSerializeSize(*(CTransaction*)&wtx, SER_NETWORK, PROTOCOL_VERSION);
Object result;
result.push_back(Pair("Estimated bytes", (int)nBytes));
result.push_back(Pair("Estimated inputs", (int)wtx.vin.size()));
result.push_back(Pair("Estimated outputs", (int)wtx.vout.size()));
result.push_back(Pair("Estimated fee", ValueFromAmount(nFee)));
return result;
}
Value anonoutputs(const Array& params, bool fHelp)
{
if (fHelp || params.size() > 2)
throw std::runtime_error(
"anonoutputs [systemTotals] [show_immature_outputs]\n"
"[systemTotals] if true displays the total no. of coins in the system.");
if (nNodeMode != NT_FULL)
throw std::runtime_error("Must be in full mode.");
bool fSystemTotals = false;
if (params.size() > 0)
{
std::string value = params[0].get_str();
if (IsStringBoolPositive(value))
fSystemTotals = true;
};
bool fMatureOnly = true;
if (params.size() > 1)
{
std::string value = params[1].get_str();
if (IsStringBoolPositive(value))
fMatureOnly = false;
};
std::list<COwnedAnonOutput> lAvailableCoins;
if (pwalletMain->ListUnspentAnonOutputs(lAvailableCoins, fMatureOnly) != 0)
throw std::runtime_error("ListUnspentAnonOutputs() failed.");
Object result;
if (!fSystemTotals)
{
result.push_back(Pair("No. of coins", "amount"));
// -- mAvailableCoins is ordered by value
char cbuf[256];
int64_t nTotal = 0;
int64_t nLast = 0;
int nCount = 0;
for (std::list<COwnedAnonOutput>::iterator it = lAvailableCoins.begin(); it != lAvailableCoins.end(); ++it)
{
if (nLast > 0 && it->nValue != nLast)
{
snprintf(cbuf, sizeof(cbuf), "%03d", nCount);
result.push_back(Pair(cbuf, ValueFromAmount(nLast)));
nCount = 0;
};
nCount++;
nLast = it->nValue;
nTotal += it->nValue;
};
if (nCount > 0)
{
snprintf(cbuf, sizeof(cbuf), "%03d", nCount);
result.push_back(Pair(cbuf, ValueFromAmount(nLast)));
};
result.push_back(Pair("total", ValueFromAmount(nTotal)));
} else
{
std::map<int64_t, int> mOutputCounts;
for (std::list<COwnedAnonOutput>::iterator it = lAvailableCoins.begin(); it != lAvailableCoins.end(); ++it)
mOutputCounts[it->nValue] = 0;
if (pwalletMain->CountAnonOutputs(mOutputCounts, fMatureOnly) != 0)
throw std::runtime_error("CountAnonOutputs() failed.");
result.push_back(Pair("No. of coins owned, No. of system coins", "amount"));
// -- lAvailableCoins is ordered by value
char cbuf[256];
int64_t nTotal = 0;
int64_t nLast = 0;
int64_t nCount = 0;
int64_t nSystemCount;
for (std::list<COwnedAnonOutput>::iterator it = lAvailableCoins.begin(); it != lAvailableCoins.end(); ++it)
{
if (nLast > 0 && it->nValue != nLast)
{
nSystemCount = mOutputCounts[nLast];
std::string str = strprintf(cbuf, sizeof(cbuf), "%04d, %04d", nCount, nSystemCount);
result.push_back(Pair(str, ValueFromAmount(nLast)));
nCount = 0;
};
nCount++;
nLast = it->nValue;
nTotal += it->nValue;
};
if (nCount > 0)
{
nSystemCount = mOutputCounts[nLast];
std::string str = strprintf(cbuf, sizeof(cbuf), "%04d, %04d", nCount, nSystemCount);
result.push_back(Pair(str, ValueFromAmount(nLast)));
};
result.push_back(Pair("total currency owned", ValueFromAmount(nTotal)));
}
return result;
}
Value anoninfo(const Array& params, bool fHelp)
{
if (fHelp || params.size() > 1)
throw std::runtime_error(
"anoninfo [recalculate]\n"
"list outputs in system.");
if (nNodeMode != NT_FULL)
throw std::runtime_error("Must be in full mode.");
bool fMatureOnly = false; // TODO: add parameter
bool fRecalculate = false;
if (params.size() > 0)
{
std::string value = params[0].get_str();
if (IsStringBoolPositive(value))
fRecalculate = true;
};
Object result;
std::list<CAnonOutputCount> lOutputCounts;
if (fRecalculate)
{
if (pwalletMain->CountAllAnonOutputs(lOutputCounts, fMatureOnly) != 0)
throw std::runtime_error("CountAllAnonOutputs() failed.");
} else
{
// TODO: make mapAnonOutputStats a vector preinitialised with all possible coin values?
for (std::map<int64_t, CAnonOutputCount>::iterator mi = mapAnonOutputStats.begin(); mi != mapAnonOutputStats.end(); ++mi)
{
bool fProcessed = false;
CAnonOutputCount aoc = mi->second;
if (aoc.nLeastDepth > 0)
aoc.nLeastDepth = nBestHeight - aoc.nLeastDepth;
for (std::list<CAnonOutputCount>::iterator it = lOutputCounts.begin(); it != lOutputCounts.end(); ++it)
{
if (aoc.nValue > it->nValue)
continue;
lOutputCounts.insert(it, aoc);
fProcessed = true;
break;
};
if (!fProcessed)
lOutputCounts.push_back(aoc);
};
};
result.push_back(Pair("No. Exists, No. Spends, Least Depth", "value"));
// -- lOutputCounts is ordered by value
char cbuf[256];
int64_t nTotalIn = 0;
int64_t nTotalOut = 0;
int64_t nTotalCoins = 0;
for (std::list<CAnonOutputCount>::iterator it = lOutputCounts.begin(); it != lOutputCounts.end(); ++it)
{
snprintf(cbuf, sizeof(cbuf), "%05d, %05d, %05d", it->nExists, it->nSpends, it->nLeastDepth);
result.push_back(Pair(cbuf, ValueFromAmount(it->nValue)));
nTotalIn += it->nValue * it->nExists;
nTotalOut += it->nValue * it->nSpends;
nTotalCoins += it->nExists;
};
result.push_back(Pair("total anon value in", ValueFromAmount(nTotalIn)));
result.push_back(Pair("total anon value out", ValueFromAmount(nTotalOut)));
result.push_back(Pair("total anon outputs", nTotalCoins));
return result;
}
Value reloadanondata(const Array& params, bool fHelp)
{
if (fHelp || params.size() > 0)
throw std::runtime_error(
"reloadanondata \n"
"clears all anon txn data from system, and runs scanforalltxns.\n"
"WARNING: Intended for development use only."
+ HelpRequiringPassphrase());
if (nNodeMode != NT_FULL)
throw std::runtime_error("Must be in full mode.");
CBlockIndex *pindex = pindexGenesisBlock;
// check from 257000, once anon transactions started
while (pindex->nHeight < (fTestNet ? 68000 : 257000) && pindex->pnext)
pindex = pindex->pnext;
Object result;
if (pindex)
{
LOCK2(cs_main, pwalletMain->cs_wallet);
if (!pwalletMain->EraseAllAnonData())
throw std::runtime_error("EraseAllAnonData() failed.");
pwalletMain->MarkDirty();
pwalletMain->ScanForWalletTransactions(pindex, true);
pwalletMain->ReacceptWalletTransactions();
pwalletMain->CacheAnonStats();
result.push_back(Pair("result", "reloadanondata complete."));
} else
{
result.push_back(Pair("result", "reloadanondata failed - !pindex."));
};
return result;
}
static bool compareTxnTime(const CWalletTx* pa, const CWalletTx* pb)
{
return pa->nTime < pb->nTime;
};
Value txnreport(const Array& params, bool fHelp)
{
if (fHelp || params.size() > 2)
throw std::runtime_error(
"txnreport [collate_amounts] [show_key_images]\n"
"List transactions at output level.\n");
bool fCollateAmounts = false;
bool fShowKeyImage = false;
// TODO: trust CWalletTx::vfSpent?
if (params.size() > 0)
{
std::string value = params[0].get_str();
if (IsStringBoolPositive(value))
fCollateAmounts = true;
};
if (params.size() > 1)
{
std::string value = params[1].get_str();
if (IsStringBoolPositive(value))
fShowKeyImage = true;
};
int64_t nWalletIn = 0; // total inputs from owned addresses
int64_t nWalletOut = 0; // total outputs from owned addresses
Object result;
{
LOCK2(cs_main, pwalletMain->cs_wallet);
std::list<CWalletTx*> listOrdered;
for (std::map<uint256, CWalletTx>::iterator it = pwalletMain->mapWallet.begin(); it != pwalletMain->mapWallet.end(); ++it)
{
if (it->second.GetDepthInMainChain() > 0) // exclude txns not in the chain
listOrdered.push_back(&it->second);
};
listOrdered.sort(compareTxnTime);
std::list<CWalletTx*>::iterator it;
Array headings;
headings.push_back("When");
headings.push_back("Txn Hash");
headings.push_back("In/Output Type");
headings.push_back("Txn Type");
headings.push_back("Address");
headings.push_back("Ring Size");
if (fShowKeyImage)
headings.push_back("Key Image");
headings.push_back("Owned");
headings.push_back("Spent");
headings.push_back("Value In");
headings.push_back("Value Out");
if (fCollateAmounts)
{
headings.push_back("Wallet In");
headings.push_back("Wallet Out");
};
result.push_back(Pair("headings", headings));
if (pwalletMain->IsLocked())
{
result.push_back(Pair("warning", "Wallet is locked - owned inputs may not be detected correctly."));
};
Array lines;
CTxDB txdb("r");
CWalletDB walletdb(pwalletMain->strWalletFile, "r");
char cbuf[256];
for (it = listOrdered.begin(); it != listOrdered.end(); ++it)
{
CWalletTx* pwtx = (*it);
Array entryTxn;
entryTxn.push_back(getTimeString(pwtx->nTime, cbuf, sizeof(cbuf)));
entryTxn.push_back(pwtx->GetHash().GetHex());
bool fCoinBase = pwtx->IsCoinBase();
bool fCoinStake = pwtx->IsCoinStake();
for (uint32_t i = 0; i < pwtx->vin.size(); ++i)
{
const CTxIn& txin = pwtx->vin[i];
int64_t nInputValue = 0;
Array entry = entryTxn;
std::string sAddr = "";
std::string sKeyImage = "";
bool fOwnCoin = false;
int nRingSize = 0;
if (pwtx->nVersion == ANON_TXN_VERSION
&& txin.IsAnonInput())
{
entry.push_back("eclipse in");
entry.push_back("");
std::vector<uint8_t> vchImage;
txin.ExtractKeyImage(vchImage);
nRingSize = txin.ExtractRingSize();
sKeyImage = HexStr(vchImage);
CKeyImageSpent ski;
bool fInMemPool;
if (GetKeyImage(&txdb, vchImage, ski, fInMemPool))
nInputValue = ski.nValue;
COwnedAnonOutput oao;
if (walletdb.ReadOwnedAnonOutput(vchImage, oao))
{
fOwnCoin = true;
} else
if (pwalletMain->IsCrypted())
{
// - tokens received with locked wallet won't have oao until wallet unlocked
// No way to tell if locked input is owned
// need vchImage
// TODO, closest would be to tell if it's possible for the input to be owned
sKeyImage = "locked?";
};
} else
{
if (txin.prevout.IsNull()) // coinbase
continue;
entry.push_back("sdc in");
entry.push_back(fCoinBase ? "coinbase" : fCoinStake ? "coinstake" : "");
if (pwalletMain->IsMine(txin))
fOwnCoin = true;
CTransaction prevTx;
if (txdb.ReadDiskTx(txin.prevout.hash, prevTx))
{
if (txin.prevout.n < prevTx.vout.size())
{
const CTxOut &vout = prevTx.vout[txin.prevout.n];
nInputValue = vout.nValue;
CTxDestination address;
if (ExtractDestination(vout.scriptPubKey, address))
sAddr = CBitcoinAddress(address).ToString();
} else
{
nInputValue = 0;
};
};
};
if (fOwnCoin)
nWalletIn += nInputValue;
entry.push_back(sAddr);
entry.push_back(nRingSize == 0 ? "" : strprintf("%d", nRingSize));
if (fShowKeyImage)
entry.push_back(sKeyImage);
entry.push_back(fOwnCoin);
entry.push_back(""); // spent
entry.push_back(strprintf("%f", (double)nInputValue / (double)COIN));
entry.push_back(""); // out
if (fCollateAmounts)
{
entry.push_back(strprintf("%f", (double)nWalletIn / (double)COIN));
entry.push_back(strprintf("%f", (double)nWalletOut / (double)COIN));
};
lines.push_back(entry);
};
for (uint32_t i = 0; i < pwtx->vout.size(); i++)
{
const CTxOut& txout = pwtx->vout[i];
if (txout.nValue < 1) // metadata output, narration or stealth
continue;
Array entry = entryTxn;
std::string sAddr = "";
std::string sKeyImage = "";
bool fOwnCoin = false;
bool fSpent = false;
if (pwtx->nVersion == ANON_TXN_VERSION
&& txout.IsAnonOutput())
{
entry.push_back("eclipse out");
entry.push_back("");
CPubKey pkCoin = txout.ExtractAnonPk();
std::vector<uint8_t> vchImage;
COwnedAnonOutput oao;
if (walletdb.ReadOwnedAnonOutputLink(pkCoin, vchImage)
&& walletdb.ReadOwnedAnonOutput(vchImage, oao))
{
sKeyImage = HexStr(vchImage);
fOwnCoin = true;
} else
if (pwalletMain->IsCrypted())
{
// - tokens received with locked wallet won't have oao until wallet unlocked
CKeyID ckCoinId = pkCoin.GetID();
CLockedAnonOutput lockedAo;
if (walletdb.ReadLockedAnonOutput(ckCoinId, lockedAo))
fOwnCoin = true;
sKeyImage = "locked?";
};
} else
{
entry.push_back("sdc out");
entry.push_back(fCoinBase ? "coinbase" : fCoinStake ? "coinstake" : "");
CTxDestination address;
if (ExtractDestination(txout.scriptPubKey, address))
sAddr = CBitcoinAddress(address).ToString();
if (pwalletMain->IsMine(txout))
fOwnCoin = true;
};
if (fOwnCoin)
{
nWalletOut += txout.nValue;
fSpent = pwtx->IsSpent(i);
};
entry.push_back(sAddr);
entry.push_back(""); // ring size (only for inputs)
if (fShowKeyImage)
entry.push_back(sKeyImage);
entry.push_back(fOwnCoin);
entry.push_back(fSpent);
entry.push_back(""); // in
entry.push_back(ValueFromAmount(txout.nValue));
if (fCollateAmounts)
{
entry.push_back(strprintf("%f", (double)nWalletIn / (double)COIN));
entry.push_back(strprintf("%f", (double)nWalletOut / (double)COIN));
};
lines.push_back(entry);
};
};
result.push_back(Pair("data", lines));
}
result.push_back(Pair("result", "txnreport complete."));
return result;
}<|fim▁end|>
|
throw std::runtime_error("Failed: Wallet must be unlocked.");
|
<|file_name|>chat.py<|end_file_name|><|fim▁begin|>#
# Copyright (c) 2011, Regents of the University of California
# BSD license, See the COPYING file for more information
# Written by: Derek Kulinski <[email protected]>
#
import curses, curses.wrapper, curses.textpad, threading, time, sys
from ChatNet import ChatNet, ChatServer
class ChatGUI(object):
def __init__(self, prefix):
self.prefix = prefix
self.stdscr = None
self.max_size = None
self.chat_sc_border = None
self.chat_sc = None
self.input_sc_border = None
self.input_sc = None
self.textbox = None
def window_setup(self):
self.max_size = self.stdscr.getmaxyx()
max_y, max_x = self.max_size
# Input
self.input_sc_border = curses.newwin(3, max_x, max_y - 3, 0)
self.input_sc_border.border()
self.input_sc_border.noutrefresh()
self.input_sc = curses.newwin(1, max_x - 2, max_y - 2, 1)
self.textbox = curses.textpad.Textbox(self.input_sc)
# Output
self.chat_sc_border = curses.newwin(max_y - 3, max_x)
self.chat_sc_border.border()
self.chat_sc_border.noutrefresh()
self.chat_sc = curses.newwin(max_y - 5, max_x - 2, 1, 1)
self.chat_sc.scrollok(True)
self.chat_sc.noutrefresh()
def write(self, text):
self.chat_sc.addstr(text + "\n")
self.chat_sc.noutrefresh()
def callback(self, nick, text):
self.write("<%s> %s" % (nick, text))
curses.doupdate()
def input_thread(self):
server = ChatServer(self.prefix)
thread = threading.Thread(target=server.listen)
thread.start()
while True:
text = self.textbox.edit()
self.input_sc.erase()
if text == "":
continue
#self.write(text)
server.send_message(text)
def curses_code(self, stdscr):
self.stdscr = stdscr
self.window_setup()
curses.doupdate()
chatnet = ChatNet(self.prefix, self.callback)
thread = threading.Thread(target=self.input_thread)
thread.start()
while True:
chatnet.pullData()
time.sleep(1)
def usage():
#print(("Usage: %s <URI>" % sys.argv[0]), file=sys.stderr)
sys.stderr.write("Usage: %s <URI>\n" % sys.argv[0])
sys.exit(1)
if __name__ == '__main__':
if len(sys.argv) != 2:
usage()<|fim▁hole|><|fim▁end|>
|
gui = ChatGUI(sys.argv[1])
curses.wrapper(gui.curses_code)
|
<|file_name|>rev_jac_sparsity.hpp<|end_file_name|><|fim▁begin|># ifndef CPPAD_CORE_REV_JAC_SPARSITY_HPP
# define CPPAD_CORE_REV_JAC_SPARSITY_HPP
/* --------------------------------------------------------------------------
CppAD: C++ Algorithmic Differentiation: Copyright (C) 2003-17 Bradley M. Bell
CppAD is distributed under multiple licenses. This distribution is under
the terms of the
GNU General Public License Version 3.
A copy of this license is included in the COPYING file of this distribution.
Please visit http://www.coin-or.org/CppAD/ for information on other licenses.
-------------------------------------------------------------------------- */
/*
$begin rev_jac_sparsity$$
$spell
Jacobian
jac
bool
const
rc
cpp
$$
$section Reverse Mode Jacobian Sparsity Patterns$$
$head Syntax$$
$icode%f%.rev_jac_sparsity(
%pattern_in%, %transpose%, %dependency%, %internal_bool%, %pattern_out%
)%$$
$head Purpose$$
We use $latex F : \B{R}^n \rightarrow \B{R}^m$$ to denote the
$cref/AD function/glossary/AD Function/$$ corresponding to
the operation sequence stored in $icode f$$.
Fix $latex R \in \B{R}^{\ell \times m}$$ and define the function
$latex \[
J(x) = R * F^{(1)} ( x )
\] $$
Given the $cref/sparsity pattern/glossary/Sparsity Pattern/$$ for $latex R$$,
$code rev_jac_sparsity$$ computes a sparsity pattern for $latex J(x)$$.
$head x$$
Note that the sparsity pattern $latex J(x)$$ corresponds to the
operation sequence stored in $icode f$$ and does not depend on
the argument $icode x$$.
(The operation sequence may contain
$cref CondExp$$ and $cref VecAD$$ operations.)
$head SizeVector$$
The type $icode SizeVector$$ is a $cref SimpleVector$$ class with
$cref/elements of type/SimpleVector/Elements of Specified Type/$$
$code size_t$$.
$head f$$
The object $icode f$$ has prototype
$codei%
ADFun<%Base%> %f%
%$$
$head pattern_in$$
The argument $icode pattern_in$$ has prototype
$codei%
const sparse_rc<%SizeVector%>& %pattern_in%
%$$
see $cref sparse_rc$$.
If $icode transpose$$ it is false (true),
$icode pattern_in$$ is a sparsity pattern for $latex R$$ ($latex R^\R{T}$$).
$head transpose$$
This argument has prototype
$codei%
bool %transpose%
%$$
See $cref/pattern_in/rev_jac_sparsity/pattern_in/$$ above and
$cref/pattern_out/rev_jac_sparsity/pattern_out/$$ below.
$head dependency$$
This argument has prototype
$codei%
bool %dependency%
%$$
see $cref/pattern_out/rev_jac_sparsity/pattern_out/$$ below.
$head internal_bool$$
If this is true, calculations are done with sets represented by a vector
of boolean values. Otherwise, a vector of sets of integers is used.
$head pattern_out$$
This argument has prototype
$codei%
sparse_rc<%SizeVector%>& %pattern_out%
%$$
This input value of $icode pattern_out$$ does not matter.
If $icode transpose$$ it is false (true),
upon return $icode pattern_out$$ is a sparsity pattern for
$latex J(x)$$ ($latex J(x)^\R{T}$$).
If $icode dependency$$ is true, $icode pattern_out$$ is a
$cref/dependency pattern/dependency.cpp/Dependency Pattern/$$
instead of sparsity pattern.
$head Sparsity for Entire Jacobian$$
Suppose that
$latex R$$ is the $latex m \times m$$ identity matrix.
In this case, $icode pattern_out$$ is a sparsity pattern for
$latex F^{(1)} ( x )$$ ( $latex F^{(1)} (x)^\R{T}$$ )
if $icode transpose$$ is false (true).
$head Example$$
$children%
example/sparse/rev_jac_sparsity.cpp
%$$
The file
$cref rev_jac_sparsity.cpp$$
contains an example and test of this operation.
It returns true if it succeeds and false otherwise.
$end
-----------------------------------------------------------------------------
*/
# include <cppad/core/ad_fun.hpp>
# include <cppad/local/sparse_internal.hpp>
namespace CppAD { // BEGIN_CPPAD_NAMESPACE
/*!
Reverse Jacobian sparsity patterns.
\tparam Base
is the base type for this recording.
\tparam SizeVector
is the simple vector with elements of type size_t that is used for
row, column index sparsity patterns.
\param pattern_in
is the sparsity pattern for for R or R^T depending on transpose.
\param transpose
Is the input and returned sparsity pattern transposed.
\param dependency
Are the derivatives with respect to left and right of the expression below
considered to be non-zero:
\code
CondExpRel(left, right, if_true, if_false)
\endcode
This is used by the optimizer to obtain the correct dependency relations.
\param internal_bool
If this is true, calculations are done with sets represented by a vector
of boolean values. Otherwise, a vector of standard sets is used.
\param pattern_out
The value of transpose is false (true),
the return value is a sparsity pattern for J(x) ( J(x)^T ) where
\f[
J(x) = R * F^{(1)} (x)
\f]
Here F is the function corresponding to the operation sequence
and x is any argument value.
*/
template <class Base>
template <class SizeVector>
void ADFun<Base>::rev_jac_sparsity(
const sparse_rc<SizeVector>& pattern_in ,
bool transpose ,
bool dependency ,
bool internal_bool ,
sparse_rc<SizeVector>& pattern_out )
{ // number or rows, columns, and non-zeros in pattern_in
size_t nr_in = pattern_in.nr();
size_t nc_in = pattern_in.nc();
//
size_t ell = nr_in;
size_t m = nc_in;
if( transpose )
std::swap(ell, m);
//
CPPAD_ASSERT_KNOWN(
m == Range() ,
"rev_jac_sparsity: number columns in R "
"is not equal number of dependent variables."
);
// number of independent variables
size_t n = Domain();
//
bool zero_empty = true;
bool input_empty = true;
if( internal_bool )
{ // allocate memory for bool sparsity calculation
// (sparsity pattern is emtpy after a resize)
local::sparse_pack internal_jac;
internal_jac.resize(num_var_tape_, ell);
//
// set sparsity patttern for dependent variables
local::set_internal_sparsity(
zero_empty ,<|fim▁hole|> dep_taddr_ ,
internal_jac ,
pattern_in
);
// compute sparsity for other variables
local::RevJacSweep(
dependency,
n,
num_var_tape_,
&play_,
internal_jac
);
// get sparstiy pattern for independent variables
local::get_internal_sparsity(
! transpose, ind_taddr_, internal_jac, pattern_out
);
}
else
{ // allocate memory for bool sparsity calculation
// (sparsity pattern is emtpy after a resize)
local::sparse_list internal_jac;
internal_jac.resize(num_var_tape_, ell);
//
// set sparsity patttern for dependent variables
local::set_internal_sparsity(
zero_empty ,
input_empty ,
! transpose ,
dep_taddr_ ,
internal_jac ,
pattern_in
);
// compute sparsity for other variables
local::RevJacSweep(
dependency,
n,
num_var_tape_,
&play_,
internal_jac
);
// get sparstiy pattern for independent variables
local::get_internal_sparsity(
! transpose, ind_taddr_, internal_jac, pattern_out
);
}
return;
}
} // END_CPPAD_NAMESPACE
# endif<|fim▁end|>
|
input_empty ,
! transpose ,
|
<|file_name|>todo.model.ts<|end_file_name|><|fim▁begin|>'use strict';<|fim▁hole|> done: boolean;
text: string;
}<|fim▁end|>
|
export interface ITodo {
|
<|file_name|>strconv.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#[allow(missing_doc)];
use clone::Clone;
use container::Container;
use std::cmp::{Ord, Eq};
use ops::{Add, Sub, Mul, Div, Rem, Neg};
use option::{None, Option, Some};
use char;
use str::{StrSlice};
use str;
use vec::{CopyableVector, ImmutableVector, MutableVector};
use vec::OwnedVector;
use num::{NumCast, Zero, One, cast, pow_with_uint, Integer};
use num::{Round, Float, FPNaN, FPInfinite};
pub enum ExponentFormat {
ExpNone,
ExpDec,
ExpBin
}
pub enum SignificantDigits {
DigAll,
DigMax(uint),
DigExact(uint)
}
pub enum SignFormat {
SignNone,
SignNeg,
SignAll
}
pub trait NumStrConv {
fn nan() -> Option<Self>;
fn inf() -> Option<Self>;
fn neg_inf() -> Option<Self>;
fn neg_zero() -> Option<Self>;
fn round_to_zero(&self) -> Self;
fn fractional_part(&self) -> Self;
}
macro_rules! impl_NumStrConv_Floating (($t:ty) => (
impl NumStrConv for $t {
#[inline]
fn nan() -> Option<$t> { Some( 0.0 / 0.0) }
#[inline]
fn inf() -> Option<$t> { Some( 1.0 / 0.0) }
#[inline]
fn neg_inf() -> Option<$t> { Some(-1.0 / 0.0) }
#[inline]
fn neg_zero() -> Option<$t> { Some(-0.0 ) }
#[inline]
fn round_to_zero(&self) -> $t { self.trunc() }
#[inline]
fn fractional_part(&self) -> $t { self.fract() }
}
))
macro_rules! impl_NumStrConv_Integer (($t:ty) => (
impl NumStrConv for $t {
#[inline] fn nan() -> Option<$t> { None }
#[inline] fn inf() -> Option<$t> { None }
#[inline] fn neg_inf() -> Option<$t> { None }
#[inline] fn neg_zero() -> Option<$t> { None }
#[inline] fn round_to_zero(&self) -> $t { *self }
#[inline] fn fractional_part(&self) -> $t { 0 }
}
))
// FIXME: #4955
// Replace by two generic impls for traits 'Integral' and 'Floating'
impl_NumStrConv_Floating!(float)
impl_NumStrConv_Floating!(f32)
impl_NumStrConv_Floating!(f64)
impl_NumStrConv_Integer!(int)
impl_NumStrConv_Integer!(i8)
impl_NumStrConv_Integer!(i16)
impl_NumStrConv_Integer!(i32)
impl_NumStrConv_Integer!(i64)
impl_NumStrConv_Integer!(uint)
impl_NumStrConv_Integer!(u8)
impl_NumStrConv_Integer!(u16)
impl_NumStrConv_Integer!(u32)
impl_NumStrConv_Integer!(u64)
// Special value strings as [u8] consts.
static INF_BUF: [u8, ..3] = ['i' as u8, 'n' as u8, 'f' as u8];
static POS_INF_BUF: [u8, ..4] = ['+' as u8, 'i' as u8, 'n' as u8,
'f' as u8];
static NEG_INF_BUF: [u8, ..4] = ['-' as u8, 'i' as u8, 'n' as u8,
'f' as u8];
static NAN_BUF: [u8, ..3] = ['N' as u8, 'a' as u8, 'N' as u8];
/**
* Converts an integral number to its string representation as a byte vector.
* This is meant to be a common base implementation for all integral string
* conversion functions like `to_str()` or `to_str_radix()`.
*
* # Arguments
* - `num` - The number to convert. Accepts any number that
* implements the numeric traits.
* - `radix` - Base to use. Accepts only the values 2-36.
* - `sign` - How to emit the sign. Options are:
* - `SignNone`: No sign at all. Basically emits `abs(num)`.
* - `SignNeg`: Only `-` on negative values.
* - `SignAll`: Both `+` on positive, and `-` on negative numbers.
* - `f` - a callback which will be invoked for each ascii character
* which composes the string representation of this integer
*
* # Return value
* A tuple containing the byte vector, and a boolean flag indicating
* whether it represents a special value like `inf`, `-inf`, `NaN` or not.
* It returns a tuple because there can be ambiguity between a special value
* and a number representation at higher bases.
*
* # Failure
* - Fails if `radix` < 2 or `radix` > 36.
*/
pub fn int_to_str_bytes_common<T:NumCast+Zero+Eq+Ord+Integer+
Div<T,T>+Neg<T>+Rem<T,T>+Mul<T,T>>(
num: T, radix: uint, sign: SignFormat, f: &fn(u8)) {
assert!(2 <= radix && radix <= 36);
let _0: T = Zero::zero();
let neg = num < _0;
let radix_gen: T = cast(radix);
let mut deccum = num;
// This is just for integral types, the largest of which is a u64. The
// smallest base that we can have is 2, so the most number of digits we're
// ever going to have is 64
let mut buf = [0u8, ..64];
let mut cur = 0;
// Loop at least once to make sure at least a `0` gets emitted.
loop {
// Calculate the absolute value of each digit instead of only
// doing it once for the whole number because a
// representable negative number doesn't necessary have an
// representable additive inverse of the same type
// (See twos complement). But we assume that for the
// numbers [-35 .. 0] we always have [0 .. 35].
let current_digit_signed = deccum % radix_gen;
let current_digit = if current_digit_signed < _0 {
-current_digit_signed
} else {
current_digit_signed
};
buf[cur] = match current_digit.to_u8() {
i @ 0..9 => '0' as u8 + i,
i => 'a' as u8 + (i - 10),
};
cur += 1;
deccum = deccum / radix_gen;
// No more digits to calculate for the non-fractional part -> break
if deccum == _0 { break; }
}
// Decide what sign to put in front
match sign {
SignNeg | SignAll if neg => { f('-' as u8); }
SignAll => { f('+' as u8); }
_ => ()
}
// We built the number in reverse order, so un-reverse it here
while cur > 0 {
cur -= 1;
f(buf[cur]);
}
}
/**
* Converts a number to its string representation as a byte vector.
* This is meant to be a common base implementation for all numeric string
* conversion functions like `to_str()` or `to_str_radix()`.
*
* # Arguments
* - `num` - The number to convert. Accepts any number that
* implements the numeric traits.
* - `radix` - Base to use. Accepts only the values 2-36.
* - `negative_zero` - Whether to treat the special value `-0` as
* `-0` or as `+0`.
* - `sign` - How to emit the sign. Options are:
* - `SignNone`: No sign at all. Basically emits `abs(num)`.
* - `SignNeg`: Only `-` on negative values.
* - `SignAll`: Both `+` on positive, and `-` on negative numbers.
* - `digits` - The amount of digits to use for emitting the
* fractional part, if any. Options are:
* - `DigAll`: All calculatable digits. Beware of bignums or
* fractions!
* - `DigMax(uint)`: Maximum N digits, truncating any trailing zeros.
* - `DigExact(uint)`: Exactly N digits.
*
* # Return value
* A tuple containing the byte vector, and a boolean flag indicating
* whether it represents a special value like `inf`, `-inf`, `NaN` or not.
* It returns a tuple because there can be ambiguity between a special value
* and a number representation at higher bases.
*
* # Failure
* - Fails if `radix` < 2 or `radix` > 36.
*/
pub fn float_to_str_bytes_common<T:NumCast+Zero+One+Eq+Ord+Float+Round+
Div<T,T>+Neg<T>+Rem<T,T>+Mul<T,T>>(
num: T, radix: uint, negative_zero: bool,
sign: SignFormat, digits: SignificantDigits) -> (~[u8], bool) {
assert!(2 <= radix && radix <= 36);
let _0: T = Zero::zero();
let _1: T = One::one();
match num.classify() {
FPNaN => { return ("NaN".as_bytes().to_owned(), true); }
FPInfinite if num > _0 => {
return match sign {
SignAll => ("+inf".as_bytes().to_owned(), true),
_ => ("inf".as_bytes().to_owned(), true)
};
}
FPInfinite if num < _0 => {
return match sign {
SignNone => ("inf".as_bytes().to_owned(), true),
_ => ("-inf".as_bytes().to_owned(), true),
};
}
_ => {}
}
let neg = num < _0 || (negative_zero && _1 / num == Float::neg_infinity());
let mut buf: ~[u8] = ~[];
let radix_gen: T = cast(radix as int);
// First emit the non-fractional part, looping at least once to make
// sure at least a `0` gets emitted.
let mut deccum = num.trunc();
loop {
// Calculate the absolute value of each digit instead of only
// doing it once for the whole number because a
// representable negative number doesn't necessary have an
// representable additive inverse of the same type
// (See twos complement). But we assume that for the
// numbers [-35 .. 0] we always have [0 .. 35].
let current_digit = (deccum % radix_gen).abs();
// Decrease the deccumulator one digit at a time
deccum = deccum / radix_gen;
deccum = deccum.trunc();
buf.push(char::from_digit(current_digit.to_int() as uint, radix)
.unwrap() as u8);
// No more digits to calculate for the non-fractional part -> break
if deccum == _0 { break; }
}
// If limited digits, calculate one digit more for rounding.
let (limit_digits, digit_count, exact) = match digits {
DigAll => (false, 0u, false),
DigMax(count) => (true, count+1, false),
DigExact(count) => (true, count+1, true)
};
// Decide what sign to put in front
match sign {
SignNeg | SignAll if neg => {
buf.push('-' as u8);
}
SignAll => {
buf.push('+' as u8);
}
_ => ()
}
buf.reverse();
// Remember start of the fractional digits.
// Points one beyond end of buf if none get generated,
// or at the '.' otherwise.
let start_fractional_digits = buf.len();
// Now emit the fractional part, if any
deccum = num.fract();
if deccum != _0 || (limit_digits && exact && digit_count > 0) {
buf.push('.' as u8);
let mut dig = 0u;
// calculate new digits while
// - there is no limit and there are digits left
// - or there is a limit, it's not reached yet and
// - it's exact
// - or it's a maximum, and there are still digits left
while (!limit_digits && deccum != _0)
|| (limit_digits && dig < digit_count && (
exact
|| (!exact && deccum != _0)
)
) {
// Shift first fractional digit into the integer part
deccum = deccum * radix_gen;
// Calculate the absolute value of each digit.
// See note in first loop.
let current_digit = deccum.trunc().abs();
buf.push(char::from_digit(
current_digit.to_int() as uint, radix).unwrap() as u8);
// Decrease the deccumulator one fractional digit at a time
deccum = deccum.fract();
dig += 1u;
}
// If digits are limited, and that limit has been reached,
// cut off the one extra digit, and depending on its value
// round the remaining ones.
if limit_digits && dig == digit_count {
let ascii2value = |chr: u8| {
char::to_digit(chr as char, radix).unwrap() as uint
};
let value2ascii = |val: uint| {
char::from_digit(val, radix).unwrap() as u8
};
let extra_digit = ascii2value(buf.pop());
if extra_digit >= radix / 2 { // -> need to round
let mut i: int = buf.len() as int - 1;
loop {
// If reached left end of number, have to
// insert additional digit:
if i < 0
|| buf[i] == '-' as u8
|| buf[i] == '+' as u8 {
buf.insert((i + 1) as uint, value2ascii(1));
break;
}
// Skip the '.'
if buf[i] == '.' as u8 { i -= 1; loop; }
// Either increment the digit,
// or set to 0 if max and carry the 1.
let current_digit = ascii2value(buf[i]);
if current_digit < (radix - 1) {
buf[i] = value2ascii(current_digit+1);
break;
} else {
buf[i] = value2ascii(0);
i -= 1;
}
}
}
}
}
// if number of digits is not exact, remove all trailing '0's up to
// and including the '.'
if !exact {
let buf_max_i = buf.len() - 1;
// index to truncate from
let mut i = buf_max_i;
// discover trailing zeros of fractional part
while i > start_fractional_digits && buf[i] == '0' as u8 {
i -= 1;
}
// Only attempt to truncate digits if buf has fractional digits
if i >= start_fractional_digits {
// If buf ends with '.', cut that too.
if buf[i] == '.' as u8 { i -= 1 }
// only resize buf if we actually remove digits
if i < buf_max_i {
buf = buf.slice(0, i + 1).to_owned();
}
}
} // If exact and trailing '.', just cut that
else {
let max_i = buf.len() - 1;
if buf[max_i] == '.' as u8 {
buf = buf.slice(0, max_i).to_owned();
}
}
(buf, false)
}
/**
* Converts a number to its string representation. This is a wrapper for
* `to_str_bytes_common()`, for details see there.
*/
#[inline]
pub fn float_to_str_common<T:NumCast+Zero+One+Eq+Ord+NumStrConv+Float+Round+
Div<T,T>+Neg<T>+Rem<T,T>+Mul<T,T>>(
num: T, radix: uint, negative_zero: bool,
sign: SignFormat, digits: SignificantDigits) -> (~str, bool) {
let (bytes, special) = float_to_str_bytes_common(num, radix,
negative_zero, sign, digits);
(str::from_utf8(bytes), special)
}
// Some constants for from_str_bytes_common's input validation,
// they define minimum radix values for which the character is a valid digit.
static DIGIT_P_RADIX: uint = ('p' as uint) - ('a' as uint) + 11u;
static DIGIT_I_RADIX: uint = ('i' as uint) - ('a' as uint) + 11u;
static DIGIT_E_RADIX: uint = ('e' as uint) - ('a' as uint) + 11u;
/**
* Parses a byte slice as a number. This is meant to
* be a common base implementation for all numeric string conversion
* functions like `from_str()` or `from_str_radix()`.
*
* # Arguments
* - `buf` - The byte slice to parse.
* - `radix` - Which base to parse the number as. Accepts 2-36.
* - `negative` - Whether to accept negative numbers.
* - `fractional` - Whether to accept numbers with fractional parts.
* - `special` - Whether to accept special values like `inf`
* and `NaN`. Can conflict with `radix`, see Failure.
* - `exponent` - Which exponent format to accept. Options are:
* - `ExpNone`: No Exponent, accepts just plain numbers like `42` or
* `-8.2`.
* - `ExpDec`: Accepts numbers with a decimal exponent like `42e5` or
* `8.2E-2`. The exponent string itself is always base 10.
* Can conflict with `radix`, see Failure.
* - `ExpBin`: Accepts numbers with a binary exponent like `42P-8` or
* `FFp128`. The exponent string itself is always base 10.
* Can conflict with `radix`, see Failure.
* - `empty_zero` - Whether to accept a empty `buf` as a 0 or not.
* - `ignore_underscores` - Whether all underscores within the string should
* be ignored.
*
* # Return value
* Returns `Some(n)` if `buf` parses to a number n without overflowing, and
* `None` otherwise, depending on the constraints set by the remaining
* arguments.
*
* # Failure
* - Fails if `radix` < 2 or `radix` > 36.
* - Fails if `radix` > 14 and `exponent` is `ExpDec` due to conflict
* between digit and exponent sign `'e'`.
* - Fails if `radix` > 25 and `exponent` is `ExpBin` due to conflict
* between digit and exponent sign `'p'`.
* - Fails if `radix` > 18 and `special == true` due to conflict
* between digit and lowest first character in `inf` and `NaN`, the `'i'`.
*/
pub fn from_str_bytes_common<T:NumCast+Zero+One+Eq+Ord+Div<T,T>+
Mul<T,T>+Sub<T,T>+Neg<T>+Add<T,T>+
NumStrConv+Clone>(
buf: &[u8], radix: uint, negative: bool, fractional: bool,
special: bool, exponent: ExponentFormat, empty_zero: bool,
ignore_underscores: bool
) -> Option<T> {
match exponent {
ExpDec if radix >= DIGIT_E_RADIX // decimal exponent 'e'
=> fail!("from_str_bytes_common: radix %? incompatible with \
use of 'e' as decimal exponent", radix),
ExpBin if radix >= DIGIT_P_RADIX // binary exponent 'p'
=> fail!("from_str_bytes_common: radix %? incompatible with \
use of 'p' as binary exponent", radix),
_ if special && radix >= DIGIT_I_RADIX // first digit of 'inf'
=> fail!("from_str_bytes_common: radix %? incompatible with \
special values 'inf' and 'NaN'", radix),
_ if (radix as int) < 2
=> fail!("from_str_bytes_common: radix %? to low, \
must lie in the range [2, 36]", radix),
_ if (radix as int) > 36
=> fail!("from_str_bytes_common: radix %? to high, \
must lie in the range [2, 36]", radix),
_ => ()
}
let _0: T = Zero::zero();
let _1: T = One::one();
let radix_gen: T = cast(radix as int);
let len = buf.len();
if len == 0 {
if empty_zero {
return Some(_0);
} else {
return None;
}
}
if special {
if buf == INF_BUF || buf == POS_INF_BUF {
return NumStrConv::inf();
} else if buf == NEG_INF_BUF {
if negative {
return NumStrConv::neg_inf();
} else {
return None;
}
} else if buf == NAN_BUF {
return NumStrConv::nan();
}
}
let (start, accum_positive) = match buf[0] as char {
'-' if !negative => return None,
'-' => (1u, false),
'+' => (1u, true),
_ => (0u, true)
};
// Initialize accumulator with signed zero for floating point parsing to
// work
let mut accum = if accum_positive { _0.clone() } else { -_1 * _0};
let mut last_accum = accum.clone(); // Necessary to detect overflow
let mut i = start;
let mut exp_found = false;
// Parse integer part of number
while i < len {
let c = buf[i] as char;
match char::to_digit(c, radix) {
Some(digit) => {
// shift accum one digit left
accum = accum * radix_gen.clone();
// add/subtract current digit depending on sign
if accum_positive {
accum = accum + cast(digit as int);
} else {
accum = accum - cast(digit as int);
}
// Detect overflow by comparing to last value, except
// if we've not seen any non-zero digits.
if last_accum != _0 {
if accum_positive && accum <= last_accum { return NumStrConv::inf(); }
if !accum_positive && accum >= last_accum { return NumStrConv::neg_inf(); }
// Detect overflow by reversing the shift-and-add proccess
if accum_positive &&
(last_accum != ((accum - cast(digit as int))/radix_gen.clone())) {
return NumStrConv::inf();
}
if !accum_positive &&
(last_accum != ((accum + cast(digit as int))/radix_gen.clone())) {
return NumStrConv::neg_inf();
}
}
last_accum = accum.clone();
}
None => match c {
'_' if ignore_underscores => {}<|fim▁hole|> }
'.' if fractional => {
i += 1u; // skip the '.'
break; // start of fractional part
}
_ => return None // invalid number
}
}
i += 1u;
}
// Parse fractional part of number
// Skip if already reached start of exponent
if !exp_found {
let mut power = _1.clone();
while i < len {
let c = buf[i] as char;
match char::to_digit(c, radix) {
Some(digit) => {
// Decrease power one order of magnitude
power = power / radix_gen;
let digit_t: T = cast(digit);
// add/subtract current digit depending on sign
if accum_positive {
accum = accum + digit_t * power;
} else {
accum = accum - digit_t * power;
}
// Detect overflow by comparing to last value
if accum_positive && accum < last_accum { return NumStrConv::inf(); }
if !accum_positive && accum > last_accum { return NumStrConv::neg_inf(); }
last_accum = accum.clone();
}
None => match c {
'_' if ignore_underscores => {}
'e' | 'E' | 'p' | 'P' => {
exp_found = true;
break; // start of exponent
}
_ => return None // invalid number
}
}
i += 1u;
}
}
// Special case: buf not empty, but does not contain any digit in front
// of the exponent sign -> number is empty string
if i == start {
if empty_zero {
return Some(_0);
} else {
return None;
}
}
let mut multiplier = _1.clone();
if exp_found {
let c = buf[i] as char;
let base = match (c, exponent) {
// c is never _ so don't need to handle specially
('e', ExpDec) | ('E', ExpDec) => 10u,
('p', ExpBin) | ('P', ExpBin) => 2u,
_ => return None // char doesn't fit given exponent format
};
// parse remaining bytes as decimal integer,
// skipping the exponent char
let exp: Option<int> = from_str_bytes_common(
buf.slice(i+1, len), 10, true, false, false, ExpNone, false,
ignore_underscores);
match exp {
Some(exp_pow) => {
multiplier = if exp_pow < 0 {
_1 / pow_with_uint::<T>(base, (-exp_pow.to_int()) as uint)
} else {
pow_with_uint::<T>(base, exp_pow.to_int() as uint)
}
}
None => return None // invalid exponent -> invalid number
}
}
Some(accum * multiplier)
}
/**
* Parses a string as a number. This is a wrapper for
* `from_str_bytes_common()`, for details see there.
*/
#[inline]
pub fn from_str_common<T:NumCast+Zero+One+Eq+Ord+Div<T,T>+Mul<T,T>+
Sub<T,T>+Neg<T>+Add<T,T>+NumStrConv+Clone>(
buf: &str, radix: uint, negative: bool, fractional: bool,
special: bool, exponent: ExponentFormat, empty_zero: bool,
ignore_underscores: bool
) -> Option<T> {
from_str_bytes_common(buf.as_bytes(), radix, negative,
fractional, special, exponent, empty_zero,
ignore_underscores)
}
#[cfg(test)]
mod test {
use super::*;
use option::*;
#[test]
fn from_str_ignore_underscores() {
let s : Option<u8> = from_str_common("__1__", 2, false, false, false,
ExpNone, false, true);
assert_eq!(s, Some(1u8));
let n : Option<u8> = from_str_common("__1__", 2, false, false, false,
ExpNone, false, false);
assert_eq!(n, None);
let f : Option<f32> = from_str_common("_1_._5_e_1_", 10, false, true, false,
ExpDec, false, true);
assert_eq!(f, Some(1.5e1f32));
}
#[test]
fn from_str_issue5770() {
// try to parse 0b1_1111_1111 = 511 as a u8. Caused problems
// since 255*2+1 == 255 (mod 256) so the overflow wasn't
// detected.
let n : Option<u8> = from_str_common("111111111", 2, false, false, false,
ExpNone, false, false);
assert_eq!(n, None);
}
#[test]
fn from_str_issue7588() {
let u : Option<u8> = from_str_common("1000", 10, false, false, false,
ExpNone, false, false);
assert_eq!(u, None);
let s : Option<i16> = from_str_common("80000", 10, false, false, false,
ExpNone, false, false);
assert_eq!(s, None);
let f : Option<f32> = from_str_common(
"10000000000000000000000000000000000000000", 10, false, false, false,
ExpNone, false, false);
assert_eq!(f, NumStrConv::inf())
let fe : Option<f32> = from_str_common("1e40", 10, false, false, false,
ExpDec, false, false);
assert_eq!(fe, NumStrConv::inf())
}
}
#[cfg(test)]
mod bench {
use extra::test::BenchHarness;
use rand::{XorShiftRng, Rng};
use float;
use to_str::ToStr;
#[bench]
fn uint_to_str_rand(bh: &mut BenchHarness) {
let mut rng = XorShiftRng::new();
do bh.iter {
rng.gen::<uint>().to_str();
}
}
#[bench]
fn float_to_str_rand(bh: &mut BenchHarness) {
let mut rng = XorShiftRng::new();
do bh.iter {
float::to_str(rng.gen());
}
}
}<|fim▁end|>
|
'e' | 'E' | 'p' | 'P' => {
exp_found = true;
break; // start of exponent
|
<|file_name|>jvm.py<|end_file_name|><|fim▁begin|>"""
pygments.lexers.jvm
~~~~~~~~~~~~~~~~~~~
Pygments lexers for JVM languages.
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, \
this, combined, default, words
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation
from pygments.util import shebang_matches
from pygments import unistring as uni
__all__ = ['JavaLexer', 'ScalaLexer', 'GosuLexer', 'GosuTemplateLexer',
'GroovyLexer', 'IokeLexer', 'ClojureLexer', 'ClojureScriptLexer',
'KotlinLexer', 'XtendLexer', 'AspectJLexer', 'CeylonLexer',
'PigLexer', 'GoloLexer', 'JasminLexer', 'SarlLexer']
class JavaLexer(RegexLexer):
"""
For `Java <https://www.oracle.com/technetwork/java/>`_ source code.
"""
name = 'Java'
aliases = ['java']
filenames = ['*.java']
mimetypes = ['text/x-java']
flags = re.MULTILINE | re.DOTALL | re.UNICODE
tokens = {
'root': [
(r'[^\S\n]+', Text),
(r'//.*?\n', Comment.Single),
(r'/\*.*?\*/', Comment.Multiline),
# keywords: go before method names to avoid lexing "throw new XYZ"
# as a method signature
(r'(assert|break|case|catch|continue|default|do|else|finally|for|'
r'if|goto|instanceof|new|return|switch|this|throw|try|while)\b',
Keyword),
# method names
(r'((?:(?:[^\W\d]|\$)[\w.\[\]$<>]*\s+)+?)' # return arguments
r'((?:[^\W\d]|\$)[\w$]*)' # method name
r'(\s*)(\()', # signature start
bygroups(using(this), Name.Function, Text, Punctuation)),
(r'@[^\W\d][\w.]*', Name.Decorator),
(r'(abstract|const|enum|extends|final|implements|native|private|'
r'protected|public|static|strictfp|super|synchronized|throws|'
r'transient|volatile)\b', Keyword.Declaration),
(r'(boolean|byte|char|double|float|int|long|short|void)\b',
Keyword.Type),
(r'(package)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
(r'(true|false|null)\b', Keyword.Constant),
(r'(class|interface)(\s+)', bygroups(Keyword.Declaration, Text),
'class'),
(r'(var)(\s+)', bygroups(Keyword.Declaration, Text),
'var'),
(r'(import(?:\s+static)?)(\s+)', bygroups(Keyword.Namespace, Text),
'import'),
(r'"', String, 'string'),
(r"'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'", String.Char),
(r'(\.)((?:[^\W\d]|\$)[\w$]*)', bygroups(Punctuation,
Name.Attribute)),
(r'^(\s*)(default)(:)', bygroups(Text, Keyword, Punctuation)),
(r'^(\s*)((?:[^\W\d]|\$)[\w$]*)(:)', bygroups(Text, Name.Label,
Punctuation)),
(r'([^\W\d]|\$)[\w$]*', Name),
(r'([0-9][0-9_]*\.([0-9][0-9_]*)?|'
r'\.[0-9][0-9_]*)'
r'([eE][+\-]?[0-9][0-9_]*)?[fFdD]?|'
r'[0-9][eE][+\-]?[0-9][0-9_]*[fFdD]?|'
r'[0-9]([eE][+\-]?[0-9][0-9_]*)?[fFdD]|'
r'0[xX]([0-9a-fA-F][0-9a-fA-F_]*\.?|'
r'([0-9a-fA-F][0-9a-fA-F_]*)?\.[0-9a-fA-F][0-9a-fA-F_]*)'
r'[pP][+\-]?[0-9][0-9_]*[fFdD]?', Number.Float),
(r'0[xX][0-9a-fA-F][0-9a-fA-F_]*[lL]?', Number.Hex),
(r'0[bB][01][01_]*[lL]?', Number.Bin),
(r'0[0-7_]+[lL]?', Number.Oct),
(r'0|[1-9][0-9_]*[lL]?', Number.Integer),
(r'[~^*!%&\[\]<>|+=/?-]', Operator),
(r'[{}();:.,]', Punctuation),
(r'\n', Text)
],
'class': [
(r'([^\W\d]|\$)[\w$]*', Name.Class, '#pop')
],
'var': [
(r'([^\W\d]|\$)[\w$]*', Name, '#pop')
],
'import': [
(r'[\w.]+\*?', Name.Namespace, '#pop')
],
'string': [
(r'[^\\"]+', String),
(r'\\\\', String), # Escaped backslash
(r'\\"', String), # Escaped quote
(r'\\', String), # Bare backslash
(r'"', String, '#pop'), # Closing quote
],
}
class AspectJLexer(JavaLexer):
"""
For `AspectJ <http://www.eclipse.org/aspectj/>`_ source code.
.. versionadded:: 1.6
"""
name = 'AspectJ'
aliases = ['aspectj']
filenames = ['*.aj']
mimetypes = ['text/x-aspectj']
aj_keywords = {
'aspect', 'pointcut', 'privileged', 'call', 'execution',
'initialization', 'preinitialization', 'handler', 'get', 'set',
'staticinitialization', 'target', 'args', 'within', 'withincode',
'cflow', 'cflowbelow', 'annotation', 'before', 'after', 'around',
'proceed', 'throwing', 'returning', 'adviceexecution', 'declare',
'parents', 'warning', 'error', 'soft', 'precedence', 'thisJoinPoint',
'thisJoinPointStaticPart', 'thisEnclosingJoinPointStaticPart',
'issingleton', 'perthis', 'pertarget', 'percflow', 'percflowbelow',
'pertypewithin', 'lock', 'unlock', 'thisAspectInstance'
}
aj_inter_type = {'parents:', 'warning:', 'error:', 'soft:', 'precedence:'}
aj_inter_type_annotation = {'@type', '@method', '@constructor', '@field'}
def get_tokens_unprocessed(self, text):
for index, token, value in JavaLexer.get_tokens_unprocessed(self, text):
if token is Name and value in self.aj_keywords:
yield index, Keyword, value
elif token is Name.Label and value in self.aj_inter_type:
yield index, Keyword, value[:-1]
yield index, Operator, value[-1]
elif token is Name.Decorator and value in self.aj_inter_type_annotation:
yield index, Keyword, value
else:
yield index, token, value
class ScalaLexer(RegexLexer):
"""
For `Scala <http://www.scala-lang.org>`_ source code.
"""
name = 'Scala'
aliases = ['scala']
filenames = ['*.scala']
mimetypes = ['text/x-scala']
flags = re.MULTILINE | re.DOTALL
opchar = '[!#%&*\\-\\/:?@^' + uni.combine('Sm', 'So') + ']'
letter = '[_\\$' + uni.combine('Ll', 'Lu', 'Lo', 'Nl', 'Lt') + ']'
upperLetter = '[' + uni.combine('Lu', 'Lt') + ']'
letterOrDigit = '(?:%s|[0-9])' % letter
letterOrDigitNoDollarSign = '(?:%s|[0-9])' % letter.replace('\\$', '')
alphaId = '%s+' % letter
simpleInterpolatedVariable = '%s%s*' % (letter, letterOrDigitNoDollarSign)
idrest = '%s%s*(?:(?<=_)%s+)?' % (letter, letterOrDigit, opchar)
idUpper = '%s%s*(?:(?<=_)%s+)?' % (upperLetter, letterOrDigit, opchar)
plainid = '(?:%s|%s+)' % (idrest, opchar)
backQuotedId = r'`[^`]+`'
anyId = r'(?:%s|%s)' % (plainid, backQuotedId)
notStartOfComment = r'(?!//|/\*)'
endOfLineMaybeWithComment = r'(?=\s*(//|$))'
keywords = (
'new', 'return', 'throw', 'classOf', 'isInstanceOf', 'asInstanceOf',
'else', 'if', 'then', 'do', 'while', 'for', 'yield', 'match', 'case',
'catch', 'finally', 'try'
)
operators = (
'<%', '=:=', '<:<', '<%<', '>:', '<:', '=', '==', '!=', '<=', '>=',
'<>', '<', '>', '<-', '←', '->', '→', '=>', '⇒', '?', '@', '|', '-',
'+', '*', '%', '~', '\\'
)
storage_modifiers = (
'private', 'protected', 'synchronized', '@volatile', 'abstract',
'final', 'lazy', 'sealed', 'implicit', 'override', '@transient',
'@native'
)
tokens = {
'root': [
include('whitespace'),
include('comments'),
include('script-header'),
include('imports'),
include('exports'),
include('storage-modifiers'),
include('annotations'),
include('using'),
include('declarations'),
include('inheritance'),
include('extension'),
include('end'),
include('constants'),
include('strings'),
include('symbols'),
include('singleton-type'),
include('inline'),
include('quoted'),
include('keywords'),
include('operators'),
include('punctuation'),
include('names'),
],
# Includes:
'whitespace': [
(r'\s+', Text),
],
'comments': [
(r'//.*?\n', Comment.Single),
(r'/\*', Comment.Multiline, 'comment'),
],
'script-header': [
(r'^#!([^\n]*)$', Comment.Hashbang),
],
'imports': [
(r'\b(import)(\s+)', bygroups(Keyword, Text), 'import-path'),
],
'exports': [
(r'\b(export)(\s+)(given)(\s+)',
bygroups(Keyword, Text, Keyword, Text), 'export-path'),
(r'\b(export)(\s+)', bygroups(Keyword, Text), 'export-path'),
],
'storage-modifiers': [
(words(storage_modifiers, prefix=r'\b', suffix=r'\b'), Keyword),
# Only highlight soft modifiers if they are eventually followed by
# the correct keyword. Note that soft modifiers can be followed by a
# sequence of regular modifiers; [a-z\s]* skips those, and we just
# check that the soft modifier is applied to a supported statement.
(r'\b(transparent|opaque|infix|open|inline)\b(?=[a-z\s]*\b'
r'(def|val|var|given|type|class|trait|object|enum)\b)', Keyword),
],
'annotations': [
(r'@%s' % idrest, Name.Decorator),
],
'using': [
# using is a soft keyword, can only be used in the first position of
# a parameter or argument list.
(r'(\()(\s*)(using)(\s)', bygroups(Punctuation, Text, Keyword, Text)),
],
'declarations': [
(r'\b(def)\b(\s*)%s(%s)?' % (notStartOfComment, anyId),
bygroups(Keyword, Text, Name.Function)),
(r'\b(trait)\b(\s*)%s(%s)?' % (notStartOfComment, anyId),
bygroups(Keyword, Text, Name.Class)),
(r'\b(?:(case)(\s+))?(class|object|enum)\b(\s*)%s(%s)?' %
(notStartOfComment, anyId),
bygroups(Keyword, Text, Keyword, Text, Name.Class)),
(r'(?<!\.)\b(type)\b(\s*)%s(%s)?' % (notStartOfComment, anyId),
bygroups(Keyword, Text, Name.Class)),
(r'\b(val|var)\b', Keyword.Declaration),
(r'\b(package)(\s+)(object)\b(\s*)%s(%s)?' %
(notStartOfComment, anyId),
bygroups(Keyword, Text, Keyword, Text, Name.Namespace)),
(r'\b(package)(\s+)', bygroups(Keyword, Text), 'package'),
(r'\b(given)\b(\s*)(%s)' % idUpper,
bygroups(Keyword, Text, Name.Class)),
(r'\b(given)\b(\s*)(%s)?' % anyId,
bygroups(Keyword, Text, Name)),
],
'inheritance': [
(r'\b(extends|with|derives)\b(\s*)'
r'(%s|%s|(?=\([^\)]+=>)|(?=%s)|(?="))?' %
(idUpper, backQuotedId, plainid),
bygroups(Keyword, Text, Name.Class)),
],
'extension': [
(r'\b(extension)(\s+)(?=[\[\(])', bygroups(Keyword, Text)),
],
'end': [
# end is a soft keyword, should only be highlighted in certain cases
(r'\b(end)(\s+)(if|while|for|match|new|extension|val|var)\b',
bygroups(Keyword, Text, Keyword)),
(r'\b(end)(\s+)(%s)%s' % (idUpper, endOfLineMaybeWithComment),
bygroups(Keyword, Text, Name.Class)),
(r'\b(end)(\s+)(%s|%s)?%s' %
(backQuotedId, plainid, endOfLineMaybeWithComment),
bygroups(Keyword, Text, Name.Namespace)),
],
'punctuation': [
(r'[{}()\[\];,.]', Punctuation),
(r'(?<!:):(?!:)', Punctuation),
],
'keywords': [
(words(keywords, prefix=r'\b', suffix=r'\b'), Keyword),
],
'operators': [
(r'(%s{2,})(\s+)' % opchar, bygroups(Operator, Text)),
(r'/(?![/*])', Operator),
(words(operators), Operator),
(r'(?<!%s)(!|&&|\|\|)(?!%s)' % (opchar, opchar), Operator),
],
'constants': [
(r'\b(this|super)\b', Name.Builtin.Pseudo),
(r'(true|false|null)\b', Keyword.Constant),
(r'0[xX][0-9a-fA-F_]*', Number.Hex),
(r'([0-9][0-9_]*\.[0-9][0-9_]*|\.[0-9][0-9_]*)'
r'([eE][+-]?[0-9][0-9_]*)?[fFdD]?', Number.Float),
(r'[0-9]+([eE][+-]?[0-9]+)?[fFdD]', Number.Float),
(r'[0-9]+([eE][+-]?[0-9]+)[fFdD]?', Number.Float),
(r'[0-9]+[lL]', Number.Integer.Long),
(r'[0-9]+', Number.Integer),
(r'""".*?"""(?!")', String),
(r'"(\\\\|\\"|[^"])*"', String),
(r"(')(\\.)(')", bygroups(String.Char, String.Escape, String.Char)),
(r"'[^\\]'|'\\u[0-9a-fA-F]{4}'", String.Char),
],
"strings": [
(r'[fs]"""', String, 'interpolated-string-triple'),
(r'[fs]"', String, 'interpolated-string'),
(r'raw"(\\\\|\\"|[^"])*"', String),
],
'symbols': [
(r"('%s)(?!')" % plainid, String.Symbol),
],
'singleton-type': [
(r'(\.)(type)\b', bygroups(Punctuation, Keyword)),
],
'inline': [
# inline is a soft modifer, only highlighted if followed by if,
# match or parameters.
(r'\b(inline)(?=\s+(%s|%s)\s*:)' % (plainid, backQuotedId),
Keyword),
(r'\b(inline)\b(?=(?:.(?!\b(?:val|def|given)\b))*\b(if|match)\b)',
Keyword),
],
'quoted': [
# '{...} or ${...}
(r"['$]\{(?!')", Punctuation),
# '[...]
(r"'\[(?!')", Punctuation),
],
'names': [
(idUpper, Name.Class),
(anyId, Name),
],
# States
'comment': [
(r'[^/*]+', Comment.Multiline),
(r'/\*', Comment.Multiline, '#push'),
(r'\*/', Comment.Multiline, '#pop'),
(r'[*/]', Comment.Multiline),
],
'import-path': [
(r'(?<=[\n;:])', Text, '#pop'),
include('comments'),
(r'\b(given)\b', Keyword),
include('qualified-name'),
(r'\{', Punctuation, 'import-path-curly-brace'),
],
'import-path-curly-brace': [
include('whitespace'),
include('comments'),
(r'\b(given)\b', Keyword),
(r'=>', Operator),
(r'\}', Punctuation, '#pop'),
(r',', Punctuation),
(r'[\[\]]', Punctuation),
include('qualified-name'),
],
'export-path': [
(r'(?<=[\n;:])', Text, '#pop'),
include('comments'),
include('qualified-name'),
(r'\{', Punctuation, 'export-path-curly-brace'),
],
'export-path-curly-brace': [
include('whitespace'),
include('comments'),
(r'=>', Operator),
(r'\}', Punctuation, '#pop'),
(r',', Punctuation),
include('qualified-name'),
],
'package': [
(r'(?<=[\n;])', Text, '#pop'),
(r':', Punctuation, '#pop'),
include('comments'),
include('qualified-name'),
],
'interpolated-string-triple': [
(r'"""(?!")', String, '#pop'),
(r'"', String),
include('interpolated-string-common'),
],
'interpolated-string': [
(r'"', String, '#pop'),
include('interpolated-string-common'),
],
'interpolated-string-brace': [
(r'\}', String.Interpol, '#pop'),
(r'\{', Punctuation, 'interpolated-string-nested-brace'),
include('root'),
],
'interpolated-string-nested-brace': [
(r'\{', Punctuation, '#push'),
(r'\}', Punctuation, '#pop'),
include('root'),
],
# Helpers
'qualified-name': [
(idUpper, Name.Class),
(r'(%s)(\.)' % anyId, bygroups(Name.Namespace, Punctuation)),
(r'\.', Punctuation),
(anyId, Name),
(r'[^\S\n]+', Text),
],
'interpolated-string-common': [
(r'[^"$\\]+', String),
(r'\$\$', String.Escape),
(r'(\$)(%s)' % simpleInterpolatedVariable,
bygroups(String.Interpol, Name)),
(r'\$\{', String.Interpol, 'interpolated-string-brace'),
(r'\\.', String),
],
}
class GosuLexer(RegexLexer):
"""
For Gosu source code.
.. versionadded:: 1.5
"""
name = 'Gosu'
aliases = ['gosu']
filenames = ['*.gs', '*.gsx', '*.gsp', '*.vark']
mimetypes = ['text/x-gosu']
flags = re.MULTILINE | re.DOTALL
tokens = {
'root': [
# method names
(r'^(\s*(?:[a-zA-Z_][\w.\[\]]*\s+)+?)' # modifiers etc.
r'([a-zA-Z_]\w*)' # method name
r'(\s*)(\()', # signature start
bygroups(using(this), Name.Function, Text, Operator)),
(r'[^\S\n]+', Text),
(r'//.*?\n', Comment.Single),
(r'/\*.*?\*/', Comment.Multiline),
(r'@[a-zA-Z_][\w.]*', Name.Decorator),
(r'(in|as|typeof|statictypeof|typeis|typeas|if|else|foreach|for|'
r'index|while|do|continue|break|return|try|catch|finally|this|'
r'throw|new|switch|case|default|eval|super|outer|classpath|'
r'using)\b', Keyword),
(r'(var|delegate|construct|function|private|internal|protected|'
r'public|abstract|override|final|static|extends|transient|'
r'implements|represents|readonly)\b', Keyword.Declaration),
(r'(property\s+)(get|set)?', Keyword.Declaration),
(r'(boolean|byte|char|double|float|int|long|short|void|block)\b',
Keyword.Type),
(r'(package)(\s+)', bygroups(Keyword.Namespace, Text)),
(r'(true|false|null|NaN|Infinity)\b', Keyword.Constant),
(r'(class|interface|enhancement|enum)(\s+)([a-zA-Z_]\w*)',
bygroups(Keyword.Declaration, Text, Name.Class)),
(r'(uses)(\s+)([\w.]+\*?)',
bygroups(Keyword.Namespace, Text, Name.Namespace)),
(r'"', String, 'string'),
(r'(\??[.#])([a-zA-Z_]\w*)',
bygroups(Operator, Name.Attribute)),
(r'(:)([a-zA-Z_]\w*)',
bygroups(Operator, Name.Attribute)),
(r'[a-zA-Z_$]\w*', Name),
(r'and|or|not|[\\~^*!%&\[\](){}<>|+=:;,./?-]', Operator),
(r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
(r'[0-9]+', Number.Integer),
(r'\n', Text)
],
'templateText': [
(r'(\\<)|(\\\$)', String),
(r'(<%@\s+)(extends|params)',
bygroups(Operator, Name.Decorator), 'stringTemplate'),
(r'<%!--.*?--%>', Comment.Multiline),
(r'(<%)|(<%=)', Operator, 'stringTemplate'),
(r'\$\{', Operator, 'stringTemplateShorthand'),
(r'.', String)
],
'string': [
(r'"', String, '#pop'),
include('templateText')
],
'stringTemplate': [
(r'"', String, 'string'),
(r'%>', Operator, '#pop'),
include('root')
],
'stringTemplateShorthand': [
(r'"', String, 'string'),
(r'\{', Operator, 'stringTemplateShorthand'),
(r'\}', Operator, '#pop'),
include('root')
],
}
class GosuTemplateLexer(Lexer):
"""
For Gosu templates.
.. versionadded:: 1.5
"""
name = 'Gosu Template'
aliases = ['gst']
filenames = ['*.gst']
mimetypes = ['text/x-gosu-template']
def get_tokens_unprocessed(self, text):
lexer = GosuLexer()
stack = ['templateText']
yield from lexer.get_tokens_unprocessed(text, stack)
class GroovyLexer(RegexLexer):
"""
For `Groovy <http://groovy.codehaus.org/>`_ source code.
.. versionadded:: 1.5
"""
name = 'Groovy'
aliases = ['groovy']
filenames = ['*.groovy','*.gradle']
mimetypes = ['text/x-groovy']
flags = re.MULTILINE | re.DOTALL
tokens = {
'root': [
# Groovy allows a file to start with a shebang
(r'#!(.*?)$', Comment.Preproc, 'base'),
default('base'),
],
'base': [
(r'[^\S\n]+', Text),
(r'//.*?\n', Comment.Single),
(r'/\*.*?\*/', Comment.Multiline),
# keywords: go before method names to avoid lexing "throw new XYZ"
# as a method signature
(r'(assert|break|case|catch|continue|default|do|else|finally|for|'
r'if|goto|instanceof|new|return|switch|this|throw|try|while|in|as)\b',
Keyword),
# method names
(r'^(\s*(?:[a-zA-Z_][\w.\[\]]*\s+)+?)' # return arguments
r'('
r'[a-zA-Z_]\w*' # method name
r'|"(?:\\\\|\\[^\\]|[^"\\])*"' # or double-quoted method name
r"|'(?:\\\\|\\[^\\]|[^'\\])*'" # or single-quoted method name
r')'
r'(\s*)(\()', # signature start
bygroups(using(this), Name.Function, Text, Operator)),
(r'@[a-zA-Z_][\w.]*', Name.Decorator),
(r'(abstract|const|enum|extends|final|implements|native|private|'
r'protected|public|static|strictfp|super|synchronized|throws|'
r'transient|volatile)\b', Keyword.Declaration),
(r'(def|boolean|byte|char|double|float|int|long|short|void)\b',
Keyword.Type),
(r'(package)(\s+)', bygroups(Keyword.Namespace, Text)),
(r'(true|false|null)\b', Keyword.Constant),
(r'(class|interface)(\s+)', bygroups(Keyword.Declaration, Text),
'class'),
(r'(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
(r'""".*?"""', String.Double),
(r"'''.*?'''", String.Single),
(r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
(r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
(r'\$/((?!/\$).)*/\$', String),
(r'/(\\\\|\\[^\\]|[^/\\])*/', String),
(r"'\\.'|'[^\\]'|'\\u[0-9a-fA-F]{4}'", String.Char),
(r'(\.)([a-zA-Z_]\w*)', bygroups(Operator, Name.Attribute)),
(r'[a-zA-Z_]\w*:', Name.Label),
(r'[a-zA-Z_$]\w*', Name),
(r'[~^*!%&\[\](){}<>|+=:;,./?-]', Operator),
(r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
(r'0x[0-9a-fA-F]+', Number.Hex),
(r'[0-9]+L?', Number.Integer),
(r'\n', Text)
],
'class': [
(r'[a-zA-Z_]\w*', Name.Class, '#pop')
],
'import': [
(r'[\w.]+\*?', Name.Namespace, '#pop')
],
}
def analyse_text(text):
return shebang_matches(text, r'groovy')
class IokeLexer(RegexLexer):
"""
For `Ioke <http://ioke.org/>`_ (a strongly typed, dynamic,
prototype based programming language) source.
.. versionadded:: 1.4
"""
name = 'Ioke'
filenames = ['*.ik']
aliases = ['ioke', 'ik']
mimetypes = ['text/x-iokesrc']
tokens = {
'interpolatableText': [
(r'(\\b|\\e|\\t|\\n|\\f|\\r|\\"|\\\\|\\#|\\\Z|\\u[0-9a-fA-F]{1,4}'
r'|\\[0-3]?[0-7]?[0-7])', String.Escape),
(r'#\{', Punctuation, 'textInterpolationRoot')
],
'text': [
(r'(?<!\\)"', String, '#pop'),
include('interpolatableText'),
(r'[^"]', String)
],
'documentation': [
(r'(?<!\\)"', String.Doc, '#pop'),
include('interpolatableText'),
(r'[^"]', String.Doc)
],
'textInterpolationRoot': [
(r'\}', Punctuation, '#pop'),
include('root')
],
'slashRegexp': [
(r'(?<!\\)/[im-psux]*', String.Regex, '#pop'),
include('interpolatableText'),
(r'\\/', String.Regex),
(r'[^/]', String.Regex)
],
'squareRegexp': [
(r'(?<!\\)][im-psux]*', String.Regex, '#pop'),
include('interpolatableText'),
(r'\\]', String.Regex),
(r'[^\]]', String.Regex)
],
'squareText': [
(r'(?<!\\)]', String, '#pop'),
include('interpolatableText'),
(r'[^\]]', String)
],
'root': [
(r'\n', Text),
(r'\s+', Text),
# Comments
(r';(.*?)\n', Comment),
(r'\A#!(.*?)\n', Comment),
# Regexps
(r'#/', String.Regex, 'slashRegexp'),
(r'#r\[', String.Regex, 'squareRegexp'),
# Symbols
(r':[\w!:?]+', String.Symbol),
(r'[\w!:?]+:(?![\w!?])', String.Other),
(r':"(\\\\|\\[^\\]|[^"\\])*"', String.Symbol),
# Documentation
(r'((?<=fn\()|(?<=fnx\()|(?<=method\()|(?<=macro\()|(?<=lecro\()'
r'|(?<=syntax\()|(?<=dmacro\()|(?<=dlecro\()|(?<=dlecrox\()'
r'|(?<=dsyntax\())\s*"', String.Doc, 'documentation'),
# Text
(r'"', String, 'text'),
(r'#\[', String, 'squareText'),
# Mimic
(r'\w[\w!:?]+(?=\s*=.*mimic\s)', Name.Entity),
# Assignment
(r'[a-zA-Z_][\w!:?]*(?=[\s]*[+*/-]?=[^=].*($|\.))',
Name.Variable),
# keywords
(r'(break|cond|continue|do|ensure|for|for:dict|for:set|if|let|'
r'loop|p:for|p:for:dict|p:for:set|return|unless|until|while|'
r'with)(?![\w!:?])', Keyword.Reserved),
# Origin
(r'(eval|mimic|print|println)(?![\w!:?])', Keyword),
# Base
(r'(cell\?|cellNames|cellOwner\?|cellOwner|cells|cell|'
r'documentation|hash|identity|mimic|removeCell\!|undefineCell\!)'
r'(?![\w!:?])', Keyword),
# Ground
(r'(stackTraceAsText)(?![\w!:?])', Keyword),
# DefaultBehaviour Literals
(r'(dict|list|message|set)(?![\w!:?])', Keyword.Reserved),
# DefaultBehaviour Case
(r'(case|case:and|case:else|case:nand|case:nor|case:not|case:or|'
r'case:otherwise|case:xor)(?![\w!:?])', Keyword.Reserved),
# DefaultBehaviour Reflection
(r'(asText|become\!|derive|freeze\!|frozen\?|in\?|is\?|kind\?|'
r'mimic\!|mimics|mimics\?|prependMimic\!|removeAllMimics\!|'
r'removeMimic\!|same\?|send|thaw\!|uniqueHexId)'
r'(?![\w!:?])', Keyword),
# DefaultBehaviour Aspects
(r'(after|around|before)(?![\w!:?])', Keyword.Reserved),
# DefaultBehaviour
(r'(kind|cellDescriptionDict|cellSummary|genSym|inspect|notice)'
r'(?![\w!:?])', Keyword),
(r'(use|destructuring)', Keyword.Reserved),
# DefaultBehavior BaseBehavior
(r'(cell\?|cellOwner\?|cellOwner|cellNames|cells|cell|'
r'documentation|identity|removeCell!|undefineCell)'
r'(?![\w!:?])', Keyword),
# DefaultBehavior Internal
(r'(internal:compositeRegexp|internal:concatenateText|'
r'internal:createDecimal|internal:createNumber|'
r'internal:createRegexp|internal:createText)'
r'(?![\w!:?])', Keyword.Reserved),
# DefaultBehaviour Conditions
(r'(availableRestarts|bind|error\!|findRestart|handle|'
r'invokeRestart|rescue|restart|signal\!|warn\!)'
r'(?![\w!:?])', Keyword.Reserved),
# constants
(r'(nil|false|true)(?![\w!:?])', Name.Constant),
# names
(r'(Arity|Base|Call|Condition|DateTime|Aspects|Pointcut|'
r'Assignment|BaseBehavior|Boolean|Case|AndCombiner|Else|'
r'NAndCombiner|NOrCombiner|NotCombiner|OrCombiner|XOrCombiner|'
r'Conditions|Definitions|FlowControl|Internal|Literals|'
r'Reflection|DefaultMacro|DefaultMethod|DefaultSyntax|Dict|'
r'FileSystem|Ground|Handler|Hook|IO|IokeGround|Struct|'
r'LexicalBlock|LexicalMacro|List|Message|Method|Mixins|'
r'NativeMethod|Number|Origin|Pair|Range|Reflector|Regexp Match|'
r'Regexp|Rescue|Restart|Runtime|Sequence|Set|Symbol|'
r'System|Text|Tuple)(?![\w!:?])', Name.Builtin),
# functions
('(generateMatchMethod|aliasMethod|\u03bb|\u028E|fnx|fn|method|'
'dmacro|dlecro|syntax|macro|dlecrox|lecrox|lecro|syntax)'
'(?![\\w!:?])', Name.Function),
# Numbers
(r'-?0[xX][0-9a-fA-F]+', Number.Hex),
(r'-?(\d+\.?\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
(r'-?\d+', Number.Integer),
(r'#\(', Punctuation),
# Operators
(r'(&&>>|\|\|>>|\*\*>>|:::|::|\.\.\.|===|\*\*>|\*\*=|&&>|&&=|'
r'\|\|>|\|\|=|\->>|\+>>|!>>|<>>>|<>>|&>>|%>>|#>>|@>>|/>>|\*>>|'
r'\?>>|\|>>|\^>>|~>>|\$>>|=>>|<<=|>>=|<=>|<\->|=~|!~|=>|\+\+|'
r'\-\-|<=|>=|==|!=|&&|\.\.|\+=|\-=|\*=|\/=|%=|&=|\^=|\|=|<\-|'
r'\+>|!>|<>|&>|%>|#>|\@>|\/>|\*>|\?>|\|>|\^>|~>|\$>|<\->|\->|'
r'<<|>>|\*\*|\?\||\?&|\|\||>|<|\*|\/|%|\+|\-|&|\^|\||=|\$|!|~|'
r'\?|#|\u2260|\u2218|\u2208|\u2209)', Operator),
(r'(and|nand|or|xor|nor|return|import)(?![\w!?])',
Operator),
# Punctuation
(r'(\`\`|\`|\'\'|\'|\.|\,|@@|@|\[|\]|\(|\)|\{|\})', Punctuation),
# kinds
(r'[A-Z][\w!:?]*', Name.Class),
# default cellnames
(r'[a-z_][\w!:?]*', Name)
]
}
class ClojureLexer(RegexLexer):
"""
Lexer for `Clojure <http://clojure.org/>`_ source code.
.. versionadded:: 0.11
"""
name = 'Clojure'
aliases = ['clojure', 'clj']
filenames = ['*.clj']
mimetypes = ['text/x-clojure', 'application/x-clojure']
special_forms = (
'.', 'def', 'do', 'fn', 'if', 'let', 'new', 'quote', 'var', 'loop'
)
# It's safe to consider 'ns' a declaration thing because it defines a new
# namespace.
declarations = (
'def-', 'defn', 'defn-', 'defmacro', 'defmulti', 'defmethod',
'defstruct', 'defonce', 'declare', 'definline', 'definterface',
'defprotocol', 'defrecord', 'deftype', 'defproject', 'ns'
)
builtins = (
'*', '+', '-', '->', '/', '<', '<=', '=', '==', '>', '>=', '..',
'accessor', 'agent', 'agent-errors', 'aget', 'alength', 'all-ns',
'alter', 'and', 'append-child', 'apply', 'array-map', 'aset',
'aset-boolean', 'aset-byte', 'aset-char', 'aset-double', 'aset-float',
'aset-int', 'aset-long', 'aset-short', 'assert', 'assoc', 'await',
'await-for', 'bean', 'binding', 'bit-and', 'bit-not', 'bit-or',
'bit-shift-left', 'bit-shift-right', 'bit-xor', 'boolean', 'branch?',
'butlast', 'byte', 'cast', 'char', 'children', 'class',
'clear-agent-errors', 'comment', 'commute', 'comp', 'comparator',
'complement', 'concat', 'conj', 'cons', 'constantly', 'cond', 'if-not',
'construct-proxy', 'contains?', 'count', 'create-ns', 'create-struct',
'cycle', 'dec', 'deref', 'difference', 'disj', 'dissoc', 'distinct',
'doall', 'doc', 'dorun', 'doseq', 'dosync', 'dotimes', 'doto',
'double', 'down', 'drop', 'drop-while', 'edit', 'end?', 'ensure',
'eval', 'every?', 'false?', 'ffirst', 'file-seq', 'filter', 'find',
'find-doc', 'find-ns', 'find-var', 'first', 'float', 'flush', 'for',
'fnseq', 'frest', 'gensym', 'get-proxy-class', 'get',
'hash-map', 'hash-set', 'identical?', 'identity', 'if-let', 'import',
'in-ns', 'inc', 'index', 'insert-child', 'insert-left', 'insert-right',
'inspect-table', 'inspect-tree', 'instance?', 'int', 'interleave',
'intersection', 'into', 'into-array', 'iterate', 'join', 'key', 'keys',
'keyword', 'keyword?', 'last', 'lazy-cat', 'lazy-cons', 'left',
'lefts', 'line-seq', 'list*', 'list', 'load', 'load-file',
'locking', 'long', 'loop', 'macroexpand', 'macroexpand-1',
'make-array', 'make-node', 'map', 'map-invert', 'map?', 'mapcat',
'max', 'max-key', 'memfn', 'merge', 'merge-with', 'meta', 'min',
'min-key', 'name', 'namespace', 'neg?', 'new', 'newline', 'next',
'nil?', 'node', 'not', 'not-any?', 'not-every?', 'not=', 'ns-imports',
'ns-interns', 'ns-map', 'ns-name', 'ns-publics', 'ns-refers',
'ns-resolve', 'ns-unmap', 'nth', 'nthrest', 'or', 'parse', 'partial',
'path', 'peek', 'pop', 'pos?', 'pr', 'pr-str', 'print', 'print-str',
'println', 'println-str', 'prn', 'prn-str', 'project', 'proxy',
'proxy-mappings', 'quot', 'rand', 'rand-int', 'range', 're-find',
're-groups', 're-matcher', 're-matches', 're-pattern', 're-seq',
'read', 'read-line', 'reduce', 'ref', 'ref-set', 'refer', 'rem',
'remove', 'remove-method', 'remove-ns', 'rename', 'rename-keys',
'repeat', 'replace', 'replicate', 'resolve', 'rest', 'resultset-seq',
'reverse', 'rfirst', 'right', 'rights', 'root', 'rrest', 'rseq',
'second', 'select', 'select-keys', 'send', 'send-off', 'seq',
'seq-zip', 'seq?', 'set', 'short', 'slurp', 'some', 'sort',
'sort-by', 'sorted-map', 'sorted-map-by', 'sorted-set',
'special-symbol?', 'split-at', 'split-with', 'str', 'string?',
'struct', 'struct-map', 'subs', 'subvec', 'symbol', 'symbol?',
'sync', 'take', 'take-nth', 'take-while', 'test', 'time', 'to-array',
'to-array-2d', 'tree-seq', 'true?', 'union', 'up', 'update-proxy',
'val', 'vals', 'var-get', 'var-set', 'var?', 'vector', 'vector-zip',
'vector?', 'when', 'when-first', 'when-let', 'when-not',
'with-local-vars', 'with-meta', 'with-open', 'with-out-str',
'xml-seq', 'xml-zip', 'zero?', 'zipmap', 'zipper')
# valid names for identifiers
# well, names can only not consist fully of numbers
# but this should be good enough for now
# TODO / should divide keywords/symbols into namespace/rest
# but that's hard, so just pretend / is part of the name
valid_name = r'(?!#)[\w!$%*+<=>?/.#|-]+'
tokens = {
'root': [
# the comments - always starting with semicolon
# and going to the end of the line
(r';.*$', Comment.Single),
# whitespaces - usually not relevant
(r'[,\s]+', Text),
# numbers
(r'-?\d+\.\d+', Number.Float),
(r'-?\d+', Number.Integer),
(r'0x-?[abcdef\d]+', Number.Hex),
# strings, symbols and characters
(r'"(\\\\|\\[^\\]|[^"\\])*"', String),
(r"'" + valid_name, String.Symbol),
(r"\\(.|[a-z]+)", String.Char),
# keywords
(r'::?#?' + valid_name, String.Symbol),
# special operators
(r'~@|[`\'#^~&@]', Operator),
# highlight the special forms
(words(special_forms, suffix=' '), Keyword),
# Technically, only the special forms are 'keywords'. The problem
# is that only treating them as keywords means that things like
# 'defn' and 'ns' need to be highlighted as builtins. This is ugly
# and weird for most styles. So, as a compromise we're going to
# highlight them as Keyword.Declarations.
(words(declarations, suffix=' '), Keyword.Declaration),
# highlight the builtins
(words(builtins, suffix=' '), Name.Builtin),
# the remaining functions
(r'(?<=\()' + valid_name, Name.Function),<|fim▁hole|> (valid_name, Name.Variable),
# Clojure accepts vector notation
(r'(\[|\])', Punctuation),
# Clojure accepts map notation
(r'(\{|\})', Punctuation),
# the famous parentheses!
(r'(\(|\))', Punctuation),
],
}
class ClojureScriptLexer(ClojureLexer):
"""
Lexer for `ClojureScript <http://clojure.org/clojurescript>`_
source code.
.. versionadded:: 2.0
"""
name = 'ClojureScript'
aliases = ['clojurescript', 'cljs']
filenames = ['*.cljs']
mimetypes = ['text/x-clojurescript', 'application/x-clojurescript']
class TeaLangLexer(RegexLexer):
"""
For `Tea <http://teatrove.org/>`_ source code. Only used within a
TeaTemplateLexer.
.. versionadded:: 1.5
"""
flags = re.MULTILINE | re.DOTALL
tokens = {
'root': [
# method names
(r'^(\s*(?:[a-zA-Z_][\w\.\[\]]*\s+)+?)' # return arguments
r'([a-zA-Z_]\w*)' # method name
r'(\s*)(\()', # signature start
bygroups(using(this), Name.Function, Text, Operator)),
(r'[^\S\n]+', Text),
(r'//.*?\n', Comment.Single),
(r'/\*.*?\*/', Comment.Multiline),
(r'@[a-zA-Z_][\w\.]*', Name.Decorator),
(r'(and|break|else|foreach|if|in|not|or|reverse)\b',
Keyword),
(r'(as|call|define)\b', Keyword.Declaration),
(r'(true|false|null)\b', Keyword.Constant),
(r'(template)(\s+)', bygroups(Keyword.Declaration, Text), 'template'),
(r'(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
(r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
(r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
(r'(\.)([a-zA-Z_]\w*)', bygroups(Operator, Name.Attribute)),
(r'[a-zA-Z_]\w*:', Name.Label),
(r'[a-zA-Z_\$]\w*', Name),
(r'(isa|[.]{3}|[.]{2}|[=#!<>+-/%&;,.\*\\\(\)\[\]\{\}])', Operator),
(r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
(r'0x[0-9a-fA-F]+', Number.Hex),
(r'[0-9]+L?', Number.Integer),
(r'\n', Text)
],
'template': [
(r'[a-zA-Z_]\w*', Name.Class, '#pop')
],
'import': [
(r'[\w.]+\*?', Name.Namespace, '#pop')
],
}
class CeylonLexer(RegexLexer):
"""
For `Ceylon <http://ceylon-lang.org/>`_ source code.
.. versionadded:: 1.6
"""
name = 'Ceylon'
aliases = ['ceylon']
filenames = ['*.ceylon']
mimetypes = ['text/x-ceylon']
flags = re.MULTILINE | re.DOTALL
#: optional Comment or Whitespace
_ws = r'(?:\s|//.*?\n|/[*].*?[*]/)+'
tokens = {
'root': [
# method names
(r'^(\s*(?:[a-zA-Z_][\w.\[\]]*\s+)+?)' # return arguments
r'([a-zA-Z_]\w*)' # method name
r'(\s*)(\()', # signature start
bygroups(using(this), Name.Function, Text, Operator)),
(r'[^\S\n]+', Text),
(r'//.*?\n', Comment.Single),
(r'/\*', Comment.Multiline, 'comment'),
(r'(shared|abstract|formal|default|actual|variable|deprecated|small|'
r'late|literal|doc|by|see|throws|optional|license|tagged|final|native|'
r'annotation|sealed)\b', Name.Decorator),
(r'(break|case|catch|continue|else|finally|for|in|'
r'if|return|switch|this|throw|try|while|is|exists|dynamic|'
r'nonempty|then|outer|assert|let)\b', Keyword),
(r'(abstracts|extends|satisfies|'
r'super|given|of|out|assign)\b', Keyword.Declaration),
(r'(function|value|void|new)\b',
Keyword.Type),
(r'(assembly|module|package)(\s+)', bygroups(Keyword.Namespace, Text)),
(r'(true|false|null)\b', Keyword.Constant),
(r'(class|interface|object|alias)(\s+)',
bygroups(Keyword.Declaration, Text), 'class'),
(r'(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
(r'"(\\\\|\\[^\\]|[^"\\])*"', String),
(r"'\\.'|'[^\\]'|'\\\{#[0-9a-fA-F]{4}\}'", String.Char),
(r'(\.)([a-z_]\w*)',
bygroups(Operator, Name.Attribute)),
(r'[a-zA-Z_]\w*:', Name.Label),
(r'[a-zA-Z_]\w*', Name),
(r'[~^*!%&\[\](){}<>|+=:;,./?-]', Operator),
(r'\d{1,3}(_\d{3})+\.\d{1,3}(_\d{3})+[kMGTPmunpf]?', Number.Float),
(r'\d{1,3}(_\d{3})+\.[0-9]+([eE][+-]?[0-9]+)?[kMGTPmunpf]?',
Number.Float),
(r'[0-9][0-9]*\.\d{1,3}(_\d{3})+[kMGTPmunpf]?', Number.Float),
(r'[0-9][0-9]*\.[0-9]+([eE][+-]?[0-9]+)?[kMGTPmunpf]?',
Number.Float),
(r'#([0-9a-fA-F]{4})(_[0-9a-fA-F]{4})+', Number.Hex),
(r'#[0-9a-fA-F]+', Number.Hex),
(r'\$([01]{4})(_[01]{4})+', Number.Bin),
(r'\$[01]+', Number.Bin),
(r'\d{1,3}(_\d{3})+[kMGTP]?', Number.Integer),
(r'[0-9]+[kMGTP]?', Number.Integer),
(r'\n', Text)
],
'class': [
(r'[A-Za-z_]\w*', Name.Class, '#pop')
],
'import': [
(r'[a-z][\w.]*',
Name.Namespace, '#pop')
],
'comment': [
(r'[^*/]', Comment.Multiline),
(r'/\*', Comment.Multiline, '#push'),
(r'\*/', Comment.Multiline, '#pop'),
(r'[*/]', Comment.Multiline)
],
}
class KotlinLexer(RegexLexer):
"""
For `Kotlin <http://kotlinlang.org/>`_
source code.
.. versionadded:: 1.5
"""
name = 'Kotlin'
aliases = ['kotlin']
filenames = ['*.kt', '*.kts']
mimetypes = ['text/x-kotlin']
flags = re.MULTILINE | re.DOTALL | re.UNICODE
kt_name = ('@?[_' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl') + ']' +
'[' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl', 'Nd', 'Pc', 'Cf',
'Mn', 'Mc') + ']*')
kt_space_name = ('@?[_' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl') + ']' +
'[' + uni.combine('Lu', 'Ll', 'Lt', 'Lm', 'Nl', 'Nd', 'Pc', 'Cf',
'Mn', 'Mc', 'Zs')
+ r'\'~!%^&*()+=|\[\]:;,.<>/\?-]*')
kt_id = '(' + kt_name + '|`' + kt_space_name + '`)'
modifiers = (r'actual|abstract|annotation|companion|const|crossinline|'
r'data|enum|expect|external|final|infix|inline|inner|'
r'internal|lateinit|noinline|open|operator|override|private|'
r'protected|public|sealed|suspend|tailrec')
tokens = {
'root': [
# Whitespaces
(r'[^\S\n]+', Text),
(r'\s+', Text),
(r'\\\n', Text), # line continuation
(r'\n', Text),
# Comments
(r'//.*?\n', Comment.Single),
(r'^#!/.+?\n', Comment.Single), # shebang for kotlin scripts
(r'/[*].*?[*]/', Comment.Multiline),
# Keywords
(r'as\?', Keyword),
(r'(as|break|by|catch|constructor|continue|do|dynamic|else|finally|'
r'get|for|if|init|[!]*in|[!]*is|out|reified|return|set|super|this|'
r'throw|try|typealias|typeof|vararg|when|where|while)\b', Keyword),
(r'it\b', Name.Builtin),
# Built-in types
(words(('Boolean?', 'Byte?', 'Char?', 'Double?', 'Float?',
'Int?', 'Long?', 'Short?', 'String?', 'Any?', 'Unit?')), Keyword.Type),
(words(('Boolean', 'Byte', 'Char', 'Double', 'Float',
'Int', 'Long', 'Short', 'String', 'Any', 'Unit'), suffix=r'\b'), Keyword.Type),
# Constants
(r'(true|false|null)\b', Keyword.Constant),
# Imports
(r'(package|import)(\s+)(\S+)', bygroups(Keyword, Text, Name.Namespace)),
# Dot access
(r'(\?\.)((?:[^\W\d]|\$)[\w$]*)', bygroups(Operator, Name.Attribute)),
(r'(\.)((?:[^\W\d]|\$)[\w$]*)', bygroups(Punctuation, Name.Attribute)),
# Annotations
(r'@[^\W\d][\w.]*', Name.Decorator),
# Labels
(r'[^\W\d][\w.]+@', Name.Decorator),
# Object expression
(r'(object)(\s+)(:)(\s+)', bygroups(Keyword, Text, Punctuation, Text), 'class'),
# Types
(r'((?:(?:' + modifiers + r'|fun)\s+)*)(class|interface|object)(\s+)',
bygroups(using(this, state='modifiers'), Keyword.Declaration, Text), 'class'),
# Variables
(r'(var|val)(\s+)(\()', bygroups(Keyword.Declaration, Text, Punctuation),
'destructuring_assignment'),
(r'((?:(?:' + modifiers + r')\s+)*)(var|val)(\s+)',
bygroups(using(this, state='modifiers'), Keyword.Declaration, Text), 'variable'),
# Functions
(r'((?:(?:' + modifiers + r')\s+)*)(fun)(\s+)',
bygroups(using(this, state='modifiers'), Keyword.Declaration, Text), 'function'),
# Operators
(r'::|!!|\?[:.]', Operator),
(r'[~^*!%&\[\]<>|+=/?-]', Operator),
# Punctuation
(r'[{}();:.,]', Punctuation),
# Strings
(r'"""', String, 'multiline_string'),
(r'"', String, 'string'),
(r"'\\.'|'[^\\]'", String.Char),
# Numbers
(r"[0-9](\.[0-9]*)?([eE][+-][0-9]+)?[flFL]?|"
r"0[xX][0-9a-fA-F]+[Ll]?", Number),
# Identifiers
(r'' + kt_id + r'((\?[^.])?)', Name) # additionally handle nullable types
],
'class': [
(kt_id, Name.Class, '#pop')
],
'variable': [
(kt_id, Name.Variable, '#pop')
],
'destructuring_assignment': [
(r',', Punctuation),
(r'\s+', Text),
(kt_id, Name.Variable),
(r'(:)(\s+)(' + kt_id + ')', bygroups(Punctuation, Text, Name)),
(r'<', Operator, 'generic'),
(r'\)', Punctuation, '#pop')
],
'function': [
(r'<', Operator, 'generic'),
(r'' + kt_id + r'(\.)' + kt_id, bygroups(Name, Punctuation, Name.Function), '#pop'),
(kt_id, Name.Function, '#pop')
],
'generic': [
(r'(>)(\s*)', bygroups(Operator, Text), '#pop'),
(r':', Punctuation),
(r'(reified|out|in)\b', Keyword),
(r',', Punctuation),
(r'\s+', Text),
(kt_id, Name)
],
'modifiers': [
(r'\w+', Keyword.Declaration),
(r'\s+', Text),
default('#pop')
],
'string': [
(r'"', String, '#pop'),
include('string_common')
],
'multiline_string': [
(r'"""', String, '#pop'),
(r'"', String),
include('string_common')
],
'string_common': [
(r'\\\\', String), # escaped backslash
(r'\\"', String), # escaped quote
(r'\\', String), # bare backslash
(r'\$\{', String.Interpol, 'interpolation'),
(r'(\$)(\w+)', bygroups(String.Interpol, Name)),
(r'[^\\"$]+', String)
],
'interpolation': [
(r'"', String),
(r'\$\{', String.Interpol, 'interpolation'),
(r'\{', Punctuation, 'scope'),
(r'\}', String.Interpol, '#pop'),
include('root')
],
'scope': [
(r'\{', Punctuation, 'scope'),
(r'\}', Punctuation, '#pop'),
include('root')
]
}
class XtendLexer(RegexLexer):
"""
For `Xtend <http://xtend-lang.org/>`_ source code.
.. versionadded:: 1.6
"""
name = 'Xtend'
aliases = ['xtend']
filenames = ['*.xtend']
mimetypes = ['text/x-xtend']
flags = re.MULTILINE | re.DOTALL
tokens = {
'root': [
# method names
(r'^(\s*(?:[a-zA-Z_][\w.\[\]]*\s+)+?)' # return arguments
r'([a-zA-Z_$][\w$]*)' # method name
r'(\s*)(\()', # signature start
bygroups(using(this), Name.Function, Text, Operator)),
(r'[^\S\n]+', Text),
(r'//.*?\n', Comment.Single),
(r'/\*.*?\*/', Comment.Multiline),
(r'@[a-zA-Z_][\w.]*', Name.Decorator),
(r'(assert|break|case|catch|continue|default|do|else|finally|for|'
r'if|goto|instanceof|new|return|switch|this|throw|try|while|IF|'
r'ELSE|ELSEIF|ENDIF|FOR|ENDFOR|SEPARATOR|BEFORE|AFTER)\b',
Keyword),
(r'(def|abstract|const|enum|extends|final|implements|native|private|'
r'protected|public|static|strictfp|super|synchronized|throws|'
r'transient|volatile)\b', Keyword.Declaration),
(r'(boolean|byte|char|double|float|int|long|short|void)\b',
Keyword.Type),
(r'(package)(\s+)', bygroups(Keyword.Namespace, Text)),
(r'(true|false|null)\b', Keyword.Constant),
(r'(class|interface)(\s+)', bygroups(Keyword.Declaration, Text),
'class'),
(r'(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
(r"(''')", String, 'template'),
(r'(\u00BB)', String, 'template'),
(r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
(r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
(r'[a-zA-Z_]\w*:', Name.Label),
(r'[a-zA-Z_$]\w*', Name),
(r'[~^*!%&\[\](){}<>\|+=:;,./?-]', Operator),
(r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
(r'0x[0-9a-fA-F]+', Number.Hex),
(r'[0-9]+L?', Number.Integer),
(r'\n', Text)
],
'class': [
(r'[a-zA-Z_]\w*', Name.Class, '#pop')
],
'import': [
(r'[\w.]+\*?', Name.Namespace, '#pop')
],
'template': [
(r"'''", String, '#pop'),
(r'\u00AB', String, '#pop'),
(r'.', String)
],
}
class PigLexer(RegexLexer):
"""
For `Pig Latin <https://pig.apache.org/>`_ source code.
.. versionadded:: 2.0
"""
name = 'Pig'
aliases = ['pig']
filenames = ['*.pig']
mimetypes = ['text/x-pig']
flags = re.MULTILINE | re.IGNORECASE
tokens = {
'root': [
(r'\s+', Text),
(r'--.*', Comment),
(r'/\*[\w\W]*?\*/', Comment.Multiline),
(r'\\\n', Text),
(r'\\', Text),
(r'\'(?:\\[ntbrf\\\']|\\u[0-9a-f]{4}|[^\'\\\n\r])*\'', String),
include('keywords'),
include('types'),
include('builtins'),
include('punct'),
include('operators'),
(r'[0-9]*\.[0-9]+(e[0-9]+)?[fd]?', Number.Float),
(r'0x[0-9a-f]+', Number.Hex),
(r'[0-9]+L?', Number.Integer),
(r'\n', Text),
(r'([a-z_]\w*)(\s*)(\()',
bygroups(Name.Function, Text, Punctuation)),
(r'[()#:]', Text),
(r'[^(:#\'")\s]+', Text),
(r'\S+\s+', Text) # TODO: make tests pass without \s+
],
'keywords': [
(r'(assert|and|any|all|arrange|as|asc|bag|by|cache|CASE|cat|cd|cp|'
r'%declare|%default|define|dense|desc|describe|distinct|du|dump|'
r'eval|exex|explain|filter|flatten|foreach|full|generate|group|'
r'help|if|illustrate|import|inner|input|into|is|join|kill|left|'
r'limit|load|ls|map|matches|mkdir|mv|not|null|onschema|or|order|'
r'outer|output|parallel|pig|pwd|quit|register|returns|right|rm|'
r'rmf|rollup|run|sample|set|ship|split|stderr|stdin|stdout|store|'
r'stream|through|union|using|void)\b', Keyword)
],
'builtins': [
(r'(AVG|BinStorage|cogroup|CONCAT|copyFromLocal|copyToLocal|COUNT|'
r'cross|DIFF|MAX|MIN|PigDump|PigStorage|SIZE|SUM|TextLoader|'
r'TOKENIZE)\b', Name.Builtin)
],
'types': [
(r'(bytearray|BIGINTEGER|BIGDECIMAL|chararray|datetime|double|float|'
r'int|long|tuple)\b', Keyword.Type)
],
'punct': [
(r'[;(){}\[\]]', Punctuation),
],
'operators': [
(r'[#=,./%+\-?]', Operator),
(r'(eq|gt|lt|gte|lte|neq|matches)\b', Operator),
(r'(==|<=|<|>=|>|!=)', Operator),
],
}
class GoloLexer(RegexLexer):
"""
For `Golo <http://golo-lang.org/>`_ source code.
.. versionadded:: 2.0
"""
name = 'Golo'
filenames = ['*.golo']
aliases = ['golo']
tokens = {
'root': [
(r'[^\S\n]+', Text),
(r'#.*$', Comment),
(r'(\^|\.\.\.|:|\?:|->|==|!=|=|\+|\*|%|/|<=|<|>=|>|=|\.)',
Operator),
(r'(?<=[^-])(-)(?=[^-])', Operator),
(r'(?<=[^`])(is|isnt|and|or|not|oftype|in|orIfNull)\b', Operator.Word),
(r'[]{}|(),[]', Punctuation),
(r'(module|import)(\s+)',
bygroups(Keyword.Namespace, Text),
'modname'),
(r'\b([a-zA-Z_][\w$.]*)(::)', bygroups(Name.Namespace, Punctuation)),
(r'\b([a-zA-Z_][\w$]*(?:\.[a-zA-Z_][\w$]*)+)\b', Name.Namespace),
(r'(let|var)(\s+)',
bygroups(Keyword.Declaration, Text),
'varname'),
(r'(struct)(\s+)',
bygroups(Keyword.Declaration, Text),
'structname'),
(r'(function)(\s+)',
bygroups(Keyword.Declaration, Text),
'funcname'),
(r'(null|true|false)\b', Keyword.Constant),
(r'(augment|pimp'
r'|if|else|case|match|return'
r'|case|when|then|otherwise'
r'|while|for|foreach'
r'|try|catch|finally|throw'
r'|local'
r'|continue|break)\b', Keyword),
(r'(map|array|list|set|vector|tuple)(\[)',
bygroups(Name.Builtin, Punctuation)),
(r'(print|println|readln|raise|fun'
r'|asInterfaceInstance)\b', Name.Builtin),
(r'(`?[a-zA-Z_][\w$]*)(\()',
bygroups(Name.Function, Punctuation)),
(r'-?[\d_]*\.[\d_]*([eE][+-]?\d[\d_]*)?F?', Number.Float),
(r'0[0-7]+j?', Number.Oct),
(r'0[xX][a-fA-F0-9]+', Number.Hex),
(r'-?\d[\d_]*L', Number.Integer.Long),
(r'-?\d[\d_]*', Number.Integer),
(r'`?[a-zA-Z_][\w$]*', Name),
(r'@[a-zA-Z_][\w$.]*', Name.Decorator),
(r'"""', String, combined('stringescape', 'triplestring')),
(r'"', String, combined('stringescape', 'doublestring')),
(r"'", String, combined('stringescape', 'singlestring')),
(r'----((.|\n)*?)----', String.Doc)
],
'funcname': [
(r'`?[a-zA-Z_][\w$]*', Name.Function, '#pop'),
],
'modname': [
(r'[a-zA-Z_][\w$.]*\*?', Name.Namespace, '#pop')
],
'structname': [
(r'`?[\w.]+\*?', Name.Class, '#pop')
],
'varname': [
(r'`?[a-zA-Z_][\w$]*', Name.Variable, '#pop'),
],
'string': [
(r'[^\\\'"\n]+', String),
(r'[\'"\\]', String)
],
'stringescape': [
(r'\\([\\abfnrtv"\']|\n|N\{.*?\}|u[a-fA-F0-9]{4}|'
r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
],
'triplestring': [
(r'"""', String, '#pop'),
include('string'),
(r'\n', String),
],
'doublestring': [
(r'"', String.Double, '#pop'),
include('string'),
],
'singlestring': [
(r"'", String, '#pop'),
include('string'),
],
'operators': [
(r'[#=,./%+\-?]', Operator),
(r'(eq|gt|lt|gte|lte|neq|matches)\b', Operator),
(r'(==|<=|<|>=|>|!=)', Operator),
],
}
class JasminLexer(RegexLexer):
"""
For `Jasmin <http://jasmin.sourceforge.net/>`_ assembly code.
.. versionadded:: 2.0
"""
name = 'Jasmin'
aliases = ['jasmin', 'jasminxt']
filenames = ['*.j']
_whitespace = r' \n\t\r'
_ws = r'(?:[%s]+)' % _whitespace
_separator = r'%s:=' % _whitespace
_break = r'(?=[%s]|$)' % _separator
_name = r'[^%s]+' % _separator
_unqualified_name = r'(?:[^%s.;\[/]+)' % _separator
tokens = {
'default': [
(r'\n', Text, '#pop'),
(r"'", String.Single, ('#pop', 'quote')),
(r'"', String.Double, 'string'),
(r'=', Punctuation),
(r':', Punctuation, 'label'),
(_ws, Text),
(r';.*', Comment.Single),
(r'(\$[-+])?0x-?[\da-fA-F]+%s' % _break, Number.Hex),
(r'(\$[-+]|\+)?-?\d+%s' % _break, Number.Integer),
(r'-?(\d+\.\d*|\.\d+)([eE][-+]?\d+)?[fFdD]?'
r'[\x00-\x08\x0b\x0c\x0e-\x1f]*%s' % _break, Number.Float),
(r'\$%s' % _name, Name.Variable),
# Directives
(r'\.annotation%s' % _break, Keyword.Reserved, 'annotation'),
(r'(\.attribute|\.bytecode|\.debug|\.deprecated|\.enclosing|'
r'\.interface|\.line|\.signature|\.source|\.stack|\.var|abstract|'
r'annotation|bridge|class|default|enum|field|final|fpstrict|'
r'interface|native|private|protected|public|signature|static|'
r'synchronized|synthetic|transient|varargs|volatile)%s' % _break,
Keyword.Reserved),
(r'\.catch%s' % _break, Keyword.Reserved, 'caught-exception'),
(r'(\.class|\.implements|\.inner|\.super|inner|invisible|'
r'invisibleparam|outer|visible|visibleparam)%s' % _break,
Keyword.Reserved, 'class/convert-dots'),
(r'\.field%s' % _break, Keyword.Reserved,
('descriptor/convert-dots', 'field')),
(r'(\.end|\.limit|use)%s' % _break, Keyword.Reserved,
'no-verification'),
(r'\.method%s' % _break, Keyword.Reserved, 'method'),
(r'\.set%s' % _break, Keyword.Reserved, 'var'),
(r'\.throws%s' % _break, Keyword.Reserved, 'exception'),
(r'(from|offset|to|using)%s' % _break, Keyword.Reserved, 'label'),
(r'is%s' % _break, Keyword.Reserved,
('descriptor/convert-dots', 'var')),
(r'(locals|stack)%s' % _break, Keyword.Reserved, 'verification'),
(r'method%s' % _break, Keyword.Reserved, 'enclosing-method'),
# Instructions
(words((
'aaload', 'aastore', 'aconst_null', 'aload', 'aload_0', 'aload_1', 'aload_2',
'aload_3', 'aload_w', 'areturn', 'arraylength', 'astore', 'astore_0', 'astore_1',
'astore_2', 'astore_3', 'astore_w', 'athrow', 'baload', 'bastore', 'bipush',
'breakpoint', 'caload', 'castore', 'd2f', 'd2i', 'd2l', 'dadd', 'daload', 'dastore',
'dcmpg', 'dcmpl', 'dconst_0', 'dconst_1', 'ddiv', 'dload', 'dload_0', 'dload_1',
'dload_2', 'dload_3', 'dload_w', 'dmul', 'dneg', 'drem', 'dreturn', 'dstore', 'dstore_0',
'dstore_1', 'dstore_2', 'dstore_3', 'dstore_w', 'dsub', 'dup', 'dup2', 'dup2_x1',
'dup2_x2', 'dup_x1', 'dup_x2', 'f2d', 'f2i', 'f2l', 'fadd', 'faload', 'fastore', 'fcmpg',
'fcmpl', 'fconst_0', 'fconst_1', 'fconst_2', 'fdiv', 'fload', 'fload_0', 'fload_1',
'fload_2', 'fload_3', 'fload_w', 'fmul', 'fneg', 'frem', 'freturn', 'fstore', 'fstore_0',
'fstore_1', 'fstore_2', 'fstore_3', 'fstore_w', 'fsub', 'i2b', 'i2c', 'i2d', 'i2f', 'i2l',
'i2s', 'iadd', 'iaload', 'iand', 'iastore', 'iconst_0', 'iconst_1', 'iconst_2',
'iconst_3', 'iconst_4', 'iconst_5', 'iconst_m1', 'idiv', 'iinc', 'iinc_w', 'iload',
'iload_0', 'iload_1', 'iload_2', 'iload_3', 'iload_w', 'imul', 'ineg', 'int2byte',
'int2char', 'int2short', 'ior', 'irem', 'ireturn', 'ishl', 'ishr', 'istore', 'istore_0',
'istore_1', 'istore_2', 'istore_3', 'istore_w', 'isub', 'iushr', 'ixor', 'l2d', 'l2f',
'l2i', 'ladd', 'laload', 'land', 'lastore', 'lcmp', 'lconst_0', 'lconst_1', 'ldc2_w',
'ldiv', 'lload', 'lload_0', 'lload_1', 'lload_2', 'lload_3', 'lload_w', 'lmul', 'lneg',
'lookupswitch', 'lor', 'lrem', 'lreturn', 'lshl', 'lshr', 'lstore', 'lstore_0',
'lstore_1', 'lstore_2', 'lstore_3', 'lstore_w', 'lsub', 'lushr', 'lxor',
'monitorenter', 'monitorexit', 'nop', 'pop', 'pop2', 'ret', 'ret_w', 'return', 'saload',
'sastore', 'sipush', 'swap'), suffix=_break), Keyword.Reserved),
(r'(anewarray|checkcast|instanceof|ldc|ldc_w|new)%s' % _break,
Keyword.Reserved, 'class/no-dots'),
(r'invoke(dynamic|interface|nonvirtual|special|'
r'static|virtual)%s' % _break, Keyword.Reserved,
'invocation'),
(r'(getfield|putfield)%s' % _break, Keyword.Reserved,
('descriptor/no-dots', 'field')),
(r'(getstatic|putstatic)%s' % _break, Keyword.Reserved,
('descriptor/no-dots', 'static')),
(words((
'goto', 'goto_w', 'if_acmpeq', 'if_acmpne', 'if_icmpeq',
'if_icmpge', 'if_icmpgt', 'if_icmple', 'if_icmplt', 'if_icmpne',
'ifeq', 'ifge', 'ifgt', 'ifle', 'iflt', 'ifne', 'ifnonnull',
'ifnull', 'jsr', 'jsr_w'), suffix=_break),
Keyword.Reserved, 'label'),
(r'(multianewarray|newarray)%s' % _break, Keyword.Reserved,
'descriptor/convert-dots'),
(r'tableswitch%s' % _break, Keyword.Reserved, 'table')
],
'quote': [
(r"'", String.Single, '#pop'),
(r'\\u[\da-fA-F]{4}', String.Escape),
(r"[^'\\]+", String.Single)
],
'string': [
(r'"', String.Double, '#pop'),
(r'\\([nrtfb"\'\\]|u[\da-fA-F]{4}|[0-3]?[0-7]{1,2})',
String.Escape),
(r'[^"\\]+', String.Double)
],
'root': [
(r'\n+', Text),
(r"'", String.Single, 'quote'),
include('default'),
(r'(%s)([ \t\r]*)(:)' % _name,
bygroups(Name.Label, Text, Punctuation)),
(_name, String.Other)
],
'annotation': [
(r'\n', Text, ('#pop', 'annotation-body')),
(r'default%s' % _break, Keyword.Reserved,
('#pop', 'annotation-default')),
include('default')
],
'annotation-body': [
(r'\n+', Text),
(r'\.end%s' % _break, Keyword.Reserved, '#pop'),
include('default'),
(_name, String.Other, ('annotation-items', 'descriptor/no-dots'))
],
'annotation-default': [
(r'\n+', Text),
(r'\.end%s' % _break, Keyword.Reserved, '#pop'),
include('default'),
default(('annotation-items', 'descriptor/no-dots'))
],
'annotation-items': [
(r"'", String.Single, 'quote'),
include('default'),
(_name, String.Other)
],
'caught-exception': [
(r'all%s' % _break, Keyword, '#pop'),
include('exception')
],
'class/convert-dots': [
include('default'),
(r'(L)((?:%s[/.])*)(%s)(;)' % (_unqualified_name, _name),
bygroups(Keyword.Type, Name.Namespace, Name.Class, Punctuation),
'#pop'),
(r'((?:%s[/.])*)(%s)' % (_unqualified_name, _name),
bygroups(Name.Namespace, Name.Class), '#pop')
],
'class/no-dots': [
include('default'),
(r'\[+', Punctuation, ('#pop', 'descriptor/no-dots')),
(r'(L)((?:%s/)*)(%s)(;)' % (_unqualified_name, _name),
bygroups(Keyword.Type, Name.Namespace, Name.Class, Punctuation),
'#pop'),
(r'((?:%s/)*)(%s)' % (_unqualified_name, _name),
bygroups(Name.Namespace, Name.Class), '#pop')
],
'descriptor/convert-dots': [
include('default'),
(r'\[+', Punctuation),
(r'(L)((?:%s[/.])*)(%s?)(;)' % (_unqualified_name, _name),
bygroups(Keyword.Type, Name.Namespace, Name.Class, Punctuation),
'#pop'),
(r'[^%s\[)L]+' % _separator, Keyword.Type, '#pop'),
default('#pop')
],
'descriptor/no-dots': [
include('default'),
(r'\[+', Punctuation),
(r'(L)((?:%s/)*)(%s)(;)' % (_unqualified_name, _name),
bygroups(Keyword.Type, Name.Namespace, Name.Class, Punctuation),
'#pop'),
(r'[^%s\[)L]+' % _separator, Keyword.Type, '#pop'),
default('#pop')
],
'descriptors/convert-dots': [
(r'\)', Punctuation, '#pop'),
default('descriptor/convert-dots')
],
'enclosing-method': [
(_ws, Text),
(r'(?=[^%s]*\()' % _separator, Text, ('#pop', 'invocation')),
default(('#pop', 'class/convert-dots'))
],
'exception': [
include('default'),
(r'((?:%s[/.])*)(%s)' % (_unqualified_name, _name),
bygroups(Name.Namespace, Name.Exception), '#pop')
],
'field': [
(r'static%s' % _break, Keyword.Reserved, ('#pop', 'static')),
include('default'),
(r'((?:%s[/.](?=[^%s]*[/.]))*)(%s[/.])?(%s)' %
(_unqualified_name, _separator, _unqualified_name, _name),
bygroups(Name.Namespace, Name.Class, Name.Variable.Instance),
'#pop')
],
'invocation': [
include('default'),
(r'((?:%s[/.](?=[^%s(]*[/.]))*)(%s[/.])?(%s)(\()' %
(_unqualified_name, _separator, _unqualified_name, _name),
bygroups(Name.Namespace, Name.Class, Name.Function, Punctuation),
('#pop', 'descriptor/convert-dots', 'descriptors/convert-dots',
'descriptor/convert-dots'))
],
'label': [
include('default'),
(_name, Name.Label, '#pop')
],
'method': [
include('default'),
(r'(%s)(\()' % _name, bygroups(Name.Function, Punctuation),
('#pop', 'descriptor/convert-dots', 'descriptors/convert-dots',
'descriptor/convert-dots'))
],
'no-verification': [
(r'(locals|method|stack)%s' % _break, Keyword.Reserved, '#pop'),
include('default')
],
'static': [
include('default'),
(r'((?:%s[/.](?=[^%s]*[/.]))*)(%s[/.])?(%s)' %
(_unqualified_name, _separator, _unqualified_name, _name),
bygroups(Name.Namespace, Name.Class, Name.Variable.Class), '#pop')
],
'table': [
(r'\n+', Text),
(r'default%s' % _break, Keyword.Reserved, '#pop'),
include('default'),
(_name, Name.Label)
],
'var': [
include('default'),
(_name, Name.Variable, '#pop')
],
'verification': [
include('default'),
(r'(Double|Float|Integer|Long|Null|Top|UninitializedThis)%s' %
_break, Keyword, '#pop'),
(r'Object%s' % _break, Keyword, ('#pop', 'class/no-dots')),
(r'Uninitialized%s' % _break, Keyword, ('#pop', 'label'))
]
}
def analyse_text(text):
score = 0
if re.search(r'^\s*\.class\s', text, re.MULTILINE):
score += 0.5
if re.search(r'^\s*[a-z]+_[a-z]+\b', text, re.MULTILINE):
score += 0.3
if re.search(r'^\s*\.(attribute|bytecode|debug|deprecated|enclosing|'
r'inner|interface|limit|set|signature|stack)\b', text,
re.MULTILINE):
score += 0.6
return min(score, 1.0)
class SarlLexer(RegexLexer):
"""
For `SARL <http://www.sarl.io>`_ source code.
.. versionadded:: 2.4
"""
name = 'SARL'
aliases = ['sarl']
filenames = ['*.sarl']
mimetypes = ['text/x-sarl']
flags = re.MULTILINE | re.DOTALL
tokens = {
'root': [
# method names
(r'^(\s*(?:[a-zA-Z_][\w.\[\]]*\s+)+?)' # return arguments
r'([a-zA-Z_$][\w$]*)' # method name
r'(\s*)(\()', # signature start
bygroups(using(this), Name.Function, Text, Operator)),
(r'[^\S\n]+', Text),
(r'//.*?\n', Comment.Single),
(r'/\*.*?\*/', Comment.Multiline),
(r'@[a-zA-Z_][\w.]*', Name.Decorator),
(r'(as|break|case|catch|default|do|else|extends|extension|finally|'
r'fires|for|if|implements|instanceof|new|on|requires|return|super|'
r'switch|throw|throws|try|typeof|uses|while|with)\b',
Keyword),
(r'(abstract|def|dispatch|final|native|override|private|protected|'
r'public|static|strictfp|synchronized|transient|val|var|volatile)\b',
Keyword.Declaration),
(r'(boolean|byte|char|double|float|int|long|short|void)\b',
Keyword.Type),
(r'(package)(\s+)', bygroups(Keyword.Namespace, Text)),
(r'(false|it|null|occurrence|this|true|void)\b', Keyword.Constant),
(r'(agent|annotation|artifact|behavior|capacity|class|enum|event|'
r'interface|skill|space)(\s+)', bygroups(Keyword.Declaration, Text),
'class'),
(r'(import)(\s+)', bygroups(Keyword.Namespace, Text), 'import'),
(r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
(r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
(r'[a-zA-Z_]\w*:', Name.Label),
(r'[a-zA-Z_$]\w*', Name),
(r'[~^*!%&\[\](){}<>\|+=:;,./?-]', Operator),
(r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
(r'0x[0-9a-fA-F]+', Number.Hex),
(r'[0-9]+L?', Number.Integer),
(r'\n', Text)
],
'class': [
(r'[a-zA-Z_]\w*', Name.Class, '#pop')
],
'import': [
(r'[\w.]+\*?', Name.Namespace, '#pop')
],
}<|fim▁end|>
|
# find the remaining variables
|
<|file_name|>StandardDeviationExecutionModelRegressionAlgorithm.py<|end_file_name|><|fim▁begin|># QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals.
# Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from clr import AddReference
AddReference("System")
AddReference("QuantConnect.Algorithm")
AddReference("QuantConnect.Common")
from System import *
from QuantConnect import *
from QuantConnect.Orders import *
from QuantConnect.Algorithm import *
from QuantConnect.Algorithm.Framework import *
from QuantConnect.Algorithm.Framework.Selection import *
from Alphas.RsiAlphaModel import RsiAlphaModel
from Portfolio.EqualWeightingPortfolioConstructionModel import EqualWeightingPortfolioConstructionModel
from Execution.StandardDeviationExecutionModel import StandardDeviationExecutionModel
from datetime import timedelta
<|fim▁hole|>### Regression algorithm for the StandardDeviationExecutionModel.
### This algorithm shows how the execution model works to split up orders and submit them
### only when the price is 2 standard deviations from the 60min mean (default model settings).
### </summary>
### <meta name="tag" content="using data" />
### <meta name="tag" content="using quantconnect" />
### <meta name="tag" content="trading and orders" />
class StandardDeviationExecutionModelRegressionAlgorithm(QCAlgorithm):
'''Regression algorithm for the StandardDeviationExecutionModel.
This algorithm shows how the execution model works to split up orders and submit them
only when the price is 2 standard deviations from the 60min mean (default model settings).'''
def Initialize(self):
''' Initialise the data and resolution required, as well as the cash and start-end dates for your algorithm. All algorithms must initialized.'''
# Set requested data resolution
self.UniverseSettings.Resolution = Resolution.Minute
self.SetStartDate(2013,10,7)
self.SetEndDate(2013,10,11)
self.SetCash(1000000)
self.SetUniverseSelection(ManualUniverseSelectionModel([
Symbol.Create('AIG', SecurityType.Equity, Market.USA),
Symbol.Create('BAC', SecurityType.Equity, Market.USA),
Symbol.Create('IBM', SecurityType.Equity, Market.USA),
Symbol.Create('SPY', SecurityType.Equity, Market.USA)
]))
self.SetAlpha(RsiAlphaModel(14, Resolution.Hour))
self.SetPortfolioConstruction(EqualWeightingPortfolioConstructionModel())
self.SetExecution(StandardDeviationExecutionModel())
def OnOrderEvent(self, orderEvent):
self.Log(f"{self.Time}: {orderEvent}")<|fim▁end|>
|
### <summary>
|
<|file_name|>unwind-box.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(managed_boxes)]
use std::task;
fn f() {
let _a = @0;
fail!();<|fim▁hole|>pub fn main() {
task::spawn(f);
}<|fim▁end|>
|
}
|
<|file_name|>conf.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# pysia documentation build configuration file, created by
# sphinx-quickstart on Tue Jul 9 22:26:36 2013.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another
# directory, add these directories to sys.path here. If the directory is
# relative to the documentation root, use os.path.abspath to make it
# absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# Get the project root dir, which is the parent dir of this
cwd = os.getcwd()
project_root = os.path.dirname(cwd)
# Insert the project root dir as the first element in the PYTHONPATH.
# This lets us ensure that the source package is imported, and that its
# version is used.
sys.path.insert(0, project_root)
import pysia
# -- General configuration ---------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'PySia'
copyright = u"2017, Jeffrey McLarty"
# The version info for the project you're documenting, acts as replacement
# for |version| and |release|, also used in various other places throughout
# the built documents.
#
# The short X.Y version.
version = pysia.__version__
# The full version, including alpha/beta/rc tags.
release = pysia.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to
# some non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built
# documents.
#keep_warnings = False
# -- Options for HTML output -------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a
# theme further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as
# html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the
# top of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon
# of the docs. This file should be a Windows icon file (.ico) being
# 16x16 or 32x32 pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets)
# here, relative to this directory. They are copied after the builtin
# static files, so a file named "default.css" will overwrite the builtin
# "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names
# to template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer.
# Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer.
# Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages
# will contain a <link> tag referring to it. The value of this option
# must be the base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'pysiadoc'
# -- Options for LaTeX output ------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass
# [howto/manual]).
latex_documents = [
('index', 'pysia.tex',
u'PySia Documentation',
u'Jeffrey McLarty', 'manual'),
]
# The name of an image file (relative to this directory) to place at
# the top of the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings
# are parts, not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'pysia',
u'PySia Documentation',
[u'Jeffrey McLarty'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ----------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'pysia',
u'PySia Documentation',
u'Jeffrey McLarty',
'pysia',
'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.<|fim▁hole|><|fim▁end|>
|
#texinfo_no_detailmenu = False
|
<|file_name|>development.js<|end_file_name|><|fim▁begin|>function normalizePort(val) {
var port = parseInt(val, 10);
if (isNaN(port)) {
// named pipe
return val;
}
if (port >= 0) {
// port number
return port;
}
return false;
}
var port = normalizePort(process.env.PORT || '3000');
<|fim▁hole|>};<|fim▁end|>
|
module.exports = {
port: port,
db: 'mongodb://'+process.env.IP+'/nexus'
|
<|file_name|>size_hint.rs<|end_file_name|><|fim▁begin|>#![feature(core)]
extern crate core;
#[cfg(test)]
mod tests {
use core::slice::SliceExt;
use core::slice::Iter;
// fn size_from_ptr<T>(_: *const T) -> usize {
// mem::size_of::<T>()
// }
// macro_rules! slice_offset {
// ($ptr:expr, $by:expr) => {{
// let ptr = $ptr;
// if size_from_ptr(ptr) == 0 {
// ::intrinsics::arith_offset(ptr as *mut i8, $by) as *mut _
// } else {
// ptr.offset($by)
// }
// }};
// }
// macro_rules! slice_ref {
// ($ptr:expr) => {{
// let ptr = $ptr;
// if size_from_ptr(ptr) == 0 {
// // Use a non-null pointer value
// &mut *(1 as *mut _)
// } else {
// transmute(ptr)
// }
// }};
// }
// pub unsafe fn from_raw_parts<'a, T>(p: *const T, len: usize) -> &'a [T] {
// transmute(RawSlice { data: p, len: len })
// }
// macro_rules! make_slice {
// ($start: expr, $end: expr) => {{
// let start = $start;
// let diff = ($end as usize).wrapping_sub(start as usize);
// if size_from_ptr(start) == 0 {
// // use a non-null pointer value
// unsafe { from_raw_parts(1 as *const _, diff) }
// } else {
// let len = diff / size_from_ptr(start);
// unsafe { from_raw_parts(start, len) }
// }
// }}
// }
// impl<T> SliceExt for [T] {
// type Item = T;
//
// #[inline]
// fn split_at(&self, mid: usize) -> (&[T], &[T]) {
// (&self[..mid], &self[mid..])
// }
//
// #[inline]
// fn iter<'a>(&'a self) -> Iter<'a, T> {
// unsafe {
// let p = if mem::size_of::<T>() == 0 {
// 1 as *const _
// } else {
// let p = self.as_ptr();
// assume(!p.is_null());
// p
// };
//
// Iter {
// ptr: p,
// end: slice_offset!(p, self.len() as isize),
// _marker: marker::PhantomData
// }
// }
// }
//
// #[inline]
// fn split<'a, P>(&'a self, pred: P) -> Split<'a, T, P> where P: FnMut(&T) -> bool {
// Split {
// v: self,
// pred: pred,
// finished: false
// }
// }
//
// #[inline]
// fn splitn<'a, P>(&'a self, n: usize, pred: P) -> SplitN<'a, T, P> where
// P: FnMut(&T) -> bool,
// {
// SplitN {
// inner: GenericSplitN {
// iter: self.split(pred),
// count: n,
// invert: false
// }
// }
// }
//
// #[inline]
// fn rsplitn<'a, P>(&'a self, n: usize, pred: P) -> RSplitN<'a, T, P> where
// P: FnMut(&T) -> bool,
// {
// RSplitN {
// inner: GenericSplitN {
// iter: self.split(pred),
// count: n,
// invert: true
// }
// }
// }
//
// #[inline]
// fn windows(&self, size: usize) -> Windows<T> {
// assert!(size != 0);
// Windows { v: self, size: size }
// }
//
// #[inline]
// fn chunks(&self, size: usize) -> Chunks<T> {
// assert!(size != 0);
// Chunks { v: self, size: size }
// }
//
// #[inline]
// fn get(&self, index: usize) -> Option<&T> {
// if index < self.len() { Some(&self[index]) } else { None }
// }
//
// #[inline]
// fn first(&self) -> Option<&T> {
// if self.is_empty() { None } else { Some(&self[0]) }
// }
//
// #[inline]
// fn tail(&self) -> &[T] { &self[1..] }
//
// #[inline]
// fn init(&self) -> &[T] {
// &self[..self.len() - 1]
// }
//
// #[inline]
// fn last(&self) -> Option<&T> {
// if self.is_empty() { None } else { Some(&self[self.len() - 1]) }
// }
//
// #[inline]
// unsafe fn get_unchecked(&self, index: usize) -> &T {
// transmute(self.repr().data.offset(index as isize))
// }
//
// #[inline]
// fn as_ptr(&self) -> *const T {
// self.repr().data
// }
//
// #[unstable(feature = "core")]
// fn binary_search_by<F>(&self, mut f: F) -> Result<usize, usize> where
// F: FnMut(&T) -> Ordering
// {
// let mut base : usize = 0;
// let mut lim : usize = self.len();
//
// while lim != 0 {
// let ix = base + (lim >> 1);
// match f(&self[ix]) {
// Equal => return Ok(ix),
// Less => {
// base = ix + 1;
// lim -= 1;
// }
// Greater => ()
// }
// lim >>= 1;
// }
// Err(base)
// }
//
// #[inline]
// fn len(&self) -> usize { self.repr().len }
//
// #[inline]
// fn get_mut(&mut self, index: usize) -> Option<&mut T> {
// if index < self.len() { Some(&mut self[index]) } else { None }
// }
//
// #[inline]
// fn split_at_mut(&mut self, mid: usize) -> (&mut [T], &mut [T]) {
// unsafe {
// let self2: &mut [T] = mem::transmute_copy(&self);
//
// (ops::IndexMut::index_mut(self, ops::RangeTo { end: mid } ),
// ops::IndexMut::index_mut(self2, ops::RangeFrom { start: mid } ))
// }
// }
//
// #[inline]
// fn iter_mut<'a>(&'a mut self) -> IterMut<'a, T> {
// unsafe {
// let p = if mem::size_of::<T>() == 0 {
// 1 as *mut _
// } else {
// let p = self.as_mut_ptr();
// assume(!p.is_null());
// p
// };
//
// IterMut {
// ptr: p,
// end: slice_offset!(p, self.len() as isize),
// _marker: marker::PhantomData
// }
// }
// }
//
// #[inline]
// fn last_mut(&mut self) -> Option<&mut T> {
// let len = self.len();
// if len == 0 { return None; }
// Some(&mut self[len - 1])
// }
//
// #[inline]
// fn first_mut(&mut self) -> Option<&mut T> {
// if self.is_empty() { None } else { Some(&mut self[0]) }
// }
//
// #[inline]
// fn tail_mut(&mut self) -> &mut [T] {
// &mut self[1 ..]
// }
//
// #[inline]
// fn init_mut(&mut self) -> &mut [T] {
// let len = self.len();
// &mut self[.. (len - 1)]
// }
//
// #[inline]
// fn split_mut<'a, P>(&'a mut self, pred: P) -> SplitMut<'a, T, P> where P: FnMut(&T) -> bool {
// SplitMut { v: self, pred: pred, finished: false }
// }
//
// #[inline]
// fn splitn_mut<'a, P>(&'a mut self, n: usize, pred: P) -> SplitNMut<'a, T, P> where
// P: FnMut(&T) -> bool
// {
// SplitNMut {
// inner: GenericSplitN {
// iter: self.split_mut(pred),
// count: n,
// invert: false
// }
// }
// }
//
// #[inline]
// fn rsplitn_mut<'a, P>(&'a mut self, n: usize, pred: P) -> RSplitNMut<'a, T, P> where
// P: FnMut(&T) -> bool,
// {
// RSplitNMut {
// inner: GenericSplitN {
// iter: self.split_mut(pred),
// count: n,
// invert: true
// }
// }
// }
//
// #[inline]
// fn chunks_mut(&mut self, chunk_size: usize) -> ChunksMut<T> {
// assert!(chunk_size > 0);
// ChunksMut { v: self, chunk_size: chunk_size }
// }
//
// #[inline]
// fn swap(&mut self, a: usize, b: usize) {
// unsafe {
// // Can't take two mutable loans from one vector, so instead just cast
// // them to their raw pointers to do the swap
// let pa: *mut T = &mut self[a];
// let pb: *mut T = &mut self[b];
// ptr::swap(pa, pb);
// }
// }
//
// fn reverse(&mut self) {
// let mut i: usize = 0;
// let ln = self.len();<|fim▁hole|> // while i < ln / 2 {
// // Unsafe swap to avoid the bounds check in safe swap.
// unsafe {
// let pa: *mut T = self.get_unchecked_mut(i);
// let pb: *mut T = self.get_unchecked_mut(ln - i - 1);
// ptr::swap(pa, pb);
// }
// i += 1;
// }
// }
//
// #[inline]
// unsafe fn get_unchecked_mut(&mut self, index: usize) -> &mut T {
// transmute((self.repr().data as *mut T).offset(index as isize))
// }
//
// #[inline]
// fn as_mut_ptr(&mut self) -> *mut T {
// self.repr().data as *mut T
// }
//
// #[inline]
// fn position_elem(&self, x: &T) -> Option<usize> where T: PartialEq {
// self.iter().position(|y| *x == *y)
// }
//
// #[inline]
// fn rposition_elem(&self, t: &T) -> Option<usize> where T: PartialEq {
// self.iter().rposition(|x| *x == *t)
// }
//
// #[inline]
// fn contains(&self, x: &T) -> bool where T: PartialEq {
// self.iter().any(|elt| *x == *elt)
// }
//
// #[inline]
// fn starts_with(&self, needle: &[T]) -> bool where T: PartialEq {
// let n = needle.len();
// self.len() >= n && needle == &self[..n]
// }
//
// #[inline]
// fn ends_with(&self, needle: &[T]) -> bool where T: PartialEq {
// let (m, n) = (self.len(), needle.len());
// m >= n && needle == &self[m-n..]
// }
//
// #[unstable(feature = "core")]
// fn binary_search(&self, x: &T) -> Result<usize, usize> where T: Ord {
// self.binary_search_by(|p| p.cmp(x))
// }
//
// #[unstable(feature = "core")]
// fn next_permutation(&mut self) -> bool where T: Ord {
// // These cases only have 1 permutation each, so we can't do anything.
// if self.len() < 2 { return false; }
//
// // Step 1: Identify the longest, rightmost weakly decreasing part of the vector
// let mut i = self.len() - 1;
// while i > 0 && self[i-1] >= self[i] {
// i -= 1;
// }
//
// // If that is the entire vector, this is the last-ordered permutation.
// if i == 0 {
// return false;
// }
//
// // Step 2: Find the rightmost element larger than the pivot (i-1)
// let mut j = self.len() - 1;
// while j >= i && self[j] <= self[i-1] {
// j -= 1;
// }
//
// // Step 3: Swap that element with the pivot
// self.swap(j, i-1);
//
// // Step 4: Reverse the (previously) weakly decreasing part
// self[i..].reverse();
//
// true
// }
//
// #[unstable(feature = "core")]
// fn prev_permutation(&mut self) -> bool where T: Ord {
// // These cases only have 1 permutation each, so we can't do anything.
// if self.len() < 2 { return false; }
//
// // Step 1: Identify the longest, rightmost weakly increasing part of the vector
// let mut i = self.len() - 1;
// while i > 0 && self[i-1] <= self[i] {
// i -= 1;
// }
//
// // If that is the entire vector, this is the first-ordered permutation.
// if i == 0 {
// return false;
// }
//
// // Step 2: Reverse the weakly increasing part
// self[i..].reverse();
//
// // Step 3: Find the rightmost element equal to or bigger than the pivot (i-1)
// let mut j = self.len() - 1;
// while j >= i && self[j-1] < self[i-1] {
// j -= 1;
// }
//
// // Step 4: Swap that element with the pivot
// self.swap(i-1, j);
//
// true
// }
//
// #[inline]
// fn clone_from_slice(&mut self, src: &[T]) -> usize where T: Clone {
// let min = cmp::min(self.len(), src.len());
// let dst = &mut self[.. min];
// let src = &src[.. min];
// for i in 0..min {
// dst[i].clone_from(&src[i]);
// }
// min
// }
// }
// pub struct Iter<'a, T: 'a> {
// ptr: *const T,
// end: *const T,
// _marker: marker::PhantomData<&'a T>,
// }
// macro_rules! iterator {
// (struct $name:ident -> $ptr:ty, $elem:ty) => {
// #[stable(feature = "rust1", since = "1.0.0")]
// impl<'a, T> Iterator for $name<'a, T> {
// type Item = $elem;
//
// #[inline]
// fn next(&mut self) -> Option<$elem> {
// // could be implemented with slices, but this avoids bounds checks
// unsafe {
// if mem::size_of::<T>() != 0 {
// assume(!self.ptr.is_null());
// assume(!self.end.is_null());
// }
// if self.ptr == self.end {
// None
// } else {
// let old = self.ptr;
// self.ptr = slice_offset!(self.ptr, 1);
// Some(slice_ref!(old))
// }
// }
// }
//
// #[inline]
// fn size_hint(&self) -> (usize, Option<usize>) {
// let diff = (self.end as usize).wrapping_sub(self.ptr as usize);
// let size = mem::size_of::<T>();
// let exact = diff / (if size == 0 {1} else {size});
// (exact, Some(exact))
// }
//
// #[inline]
// fn count(self) -> usize {
// self.size_hint().0
// }
//
// #[inline]
// fn nth(&mut self, n: usize) -> Option<$elem> {
// // Call helper method. Can't put the definition here because mut versus const.
// self.iter_nth(n)
// }
//
// #[inline]
// fn last(mut self) -> Option<$elem> {
// self.next_back()
// }
// }
//
// #[stable(feature = "rust1", since = "1.0.0")]
// impl<'a, T> DoubleEndedIterator for $name<'a, T> {
// #[inline]
// fn next_back(&mut self) -> Option<$elem> {
// // could be implemented with slices, but this avoids bounds checks
// unsafe {
// if mem::size_of::<T>() != 0 {
// assume(!self.ptr.is_null());
// assume(!self.end.is_null());
// }
// if self.end == self.ptr {
// None
// } else {
// self.end = slice_offset!(self.end, -1);
// Some(slice_ref!(self.end))
// }
// }
// }
// }
// }
// }
// iterator!{struct Iter -> *const T, &'a T}
type T = i32;
#[test]
fn size_hint_test1() {
let slice: &[T] = &[1, 2, 3, 4, 5, 6];
let iter: Iter<T> = slice.iter();
let (lower, upper): (usize, Option<usize>) = iter.size_hint();
assert_eq!(lower, 6);
assert_eq!(upper, Some::<usize>(6));
}
#[test]
fn size_hint_test2() {
let slice: &[T] = &[1, 2, 3, 4, 5, 6];
let mut iter: Iter<T> = slice.iter();
assert_eq!(iter.next(), Some::<&T>(&1));
let (lower, upper): (usize, Option<usize>) = iter.size_hint();
assert_eq!(lower, 5);
assert_eq!(upper, Some::<usize>(5));
}
}<|fim▁end|>
| |
<|file_name|>related_descriptors.py<|end_file_name|><|fim▁begin|>"""
Accessors for related objects.
When a field defines a relation between two models, each model class provides
an attribute to access related instances of the other model class (unless the
reverse accessor has been disabled with related_name='+').
Accessors are implemented as descriptors in order to customize access and
assignment. This module defines the descriptor classes.
Forward accessors follow foreign keys. Reverse accessors trace them back. For
example, with the following models::
class Parent(Model):
pass
class Child(Model):
parent = ForeignKey(Parent, related_name='children')
``child.parent`` is a forward many-to-one relation. ``parent.children`` is a
reverse many-to-one relation.
There are three types of relations (many-to-one, one-to-one, and many-to-many)
and two directions (forward and reverse) for a total of six combinations.
1. Related instance on the forward side of a many-to-one or one-to-one
relation: ``ForwardManyToOneDescriptor``.
Uniqueness of foreign key values is irrelevant to accessing the related
instance, making the many-to-one and one-to-one cases identical as far as
the descriptor is concerned. The constraint is checked upstream (unicity
validation in forms) or downstream (unique indexes in the database).
If you're looking for ``ForwardOneToOneDescriptor``, use
``ForwardManyToOneDescriptor`` instead.
2. Related instance on the reverse side of a one-to-one relation:
``ReverseOneToOneDescriptor``.
One-to-one relations are asymmetrical, despite the apparent symmetry of the
name, because they're implemented in the database with a foreign key from
one table to another. As a consequence ``ReverseOneToOneDescriptor`` is
slightly different from ``ForwardManyToOneDescriptor``.
3. Related objects manager for related instances on the reverse side of a
many-to-one relation: ``ReverseManyToOneDescriptor``.
Unlike the previous two classes, this one provides access to a collection
of objects. It returns a manager rather than an instance.
4. Related objects manager for related instances on the forward or reverse
sides of a many-to-many relation: ``ManyToManyDescriptor``.
Many-to-many relations are symmetrical. The syntax of Django models
requires declaring them on one side but that's an implementation detail.
They could be declared on the other side without any change in behavior.
Therefore the forward and reverse descriptors can be the same.
If you're looking for ``ForwardManyToManyDescriptor`` or
``ReverseManyToManyDescriptor``, use ``ManyToManyDescriptor`` instead.
"""
from __future__ import unicode_literals
import warnings
from operator import attrgetter
from django.db import connections, router, transaction
from django.db.models import Q, signals
from django.db.models.query import QuerySet
from django.utils.deprecation import RemovedInDjango20Warning
from django.utils.functional import cached_property
class ForwardManyToOneDescriptor(object):
"""
Accessor to the related object on the forward side of a many-to-one or
one-to-one relation.
In the example::
class Child(Model):
parent = ForeignKey(Parent, related_name='children')
``child.parent`` is a ``ForwardManyToOneDescriptor`` instance.
"""
def __init__(self, field_with_rel):
self.field = field_with_rel
self.cache_name = self.field.get_cache_name()
@cached_property
def RelatedObjectDoesNotExist(self):
# The exception can't be created at initialization time since the
# related model might not be resolved yet; `rel.model` might still be
# a string model reference.
return type(
str('RelatedObjectDoesNotExist'),
(self.field.remote_field.model.DoesNotExist, AttributeError),
{}
)
def is_cached(self, instance):
return hasattr(instance, self.cache_name)
def get_queryset(self, **hints):
manager = self.field.remote_field.model._default_manager
# If the related manager indicates that it should be used for
# related fields, respect that.
if not getattr(manager, 'use_for_related_fields', False):
manager = self.field.remote_field.model._base_manager
return manager.db_manager(hints=hints).all()
def get_prefetch_queryset(self, instances, queryset=None):
if queryset is None:
queryset = self.get_queryset()
queryset._add_hints(instance=instances[0])
rel_obj_attr = self.field.get_foreign_related_value
instance_attr = self.field.get_local_related_value
instances_dict = {instance_attr(inst): inst for inst in instances}
related_field = self.field.foreign_related_fields[0]
# FIXME: This will need to be revisited when we introduce support for
# composite fields. In the meantime we take this practical approach to
# solve a regression on 1.6 when the reverse manager in hidden
# (related_name ends with a '+'). Refs #21410.
# The check for len(...) == 1 is a special case that allows the query
# to be join-less and smaller. Refs #21760.
if self.field.remote_field.is_hidden() or len(self.field.foreign_related_fields) == 1:
query = {'%s__in' % related_field.name: set(instance_attr(inst)[0] for inst in instances)}
else:
query = {'%s__in' % self.field.related_query_name(): instances}
queryset = queryset.filter(**query)
# Since we're going to assign directly in the cache,
# we must manage the reverse relation cache manually.
if not self.field.remote_field.multiple:
rel_obj_cache_name = self.field.remote_field.get_cache_name()
for rel_obj in queryset:
instance = instances_dict[rel_obj_attr(rel_obj)]
setattr(rel_obj, rel_obj_cache_name, instance)
return queryset, rel_obj_attr, instance_attr, True, self.cache_name
def __get__(self, instance, cls=None):
"""
Get the related instance through the forward relation.
With the example above, when getting ``child.parent``:
- ``self`` is the descriptor managing the ``parent`` attribute
- ``instance`` is the ``child`` instance
- ``cls`` is the ``Child`` class (we don't need it)
"""
if instance is None:
return self
# The related instance is loaded from the database and then cached in
# the attribute defined in self.cache_name. It can also be pre-cached
# by the reverse accessor (ReverseOneToOneDescriptor).
try:
rel_obj = getattr(instance, self.cache_name)
except AttributeError:
val = self.field.get_local_related_value(instance)
if None in val:
rel_obj = None
else:
qs = self.get_queryset(instance=instance)
qs = qs.filter(self.field.get_reverse_related_filter(instance))
# Assuming the database enforces foreign keys, this won't fail.
rel_obj = qs.get()
# If this is a one-to-one relation, set the reverse accessor
# cache on the related object to the current instance to avoid
# an extra SQL query if it's accessed later on.
if not self.field.remote_field.multiple:
setattr(rel_obj, self.field.remote_field.get_cache_name(), instance)
setattr(instance, self.cache_name, rel_obj)
if rel_obj is None and not self.field.null:
raise self.RelatedObjectDoesNotExist(
"%s has no %s." % (self.field.model.__name__, self.field.name)
)
else:
return rel_obj
def __set__(self, instance, value):
"""
Set the related instance through the forward relation.
With the example above, when setting ``child.parent = parent``:
- ``self`` is the descriptor managing the ``parent`` attribute
- ``instance`` is the ``child`` instance
- ``value`` in the ``parent`` instance on the right of the equal sign
"""
# An object must be an instance of the related class.
if value is not None and not isinstance(value, self.field.remote_field.model._meta.concrete_model):
raise ValueError(
'Cannot assign "%r": "%s.%s" must be a "%s" instance.' % (
value,
instance._meta.object_name,
self.field.name,
self.field.remote_field.model._meta.object_name,
)
)
elif value is not None:
if instance._state.db is None:
instance._state.db = router.db_for_write(instance.__class__, instance=value)
elif value._state.db is None:
value._state.db = router.db_for_write(value.__class__, instance=instance)
elif value._state.db is not None and instance._state.db is not None:
if not router.allow_relation(value, instance):
raise ValueError('Cannot assign "%r": the current database router prevents this relation.' % value)
# If we're setting the value of a OneToOneField to None, we need to clear
# out the cache on any old related object. Otherwise, deleting the
# previously-related object will also cause this object to be deleted,
# which is wrong.
if value is None:
# Look up the previously-related object, which may still be available
# since we've not yet cleared out the related field.
# Use the cache directly, instead of the accessor; if we haven't
# populated the cache, then we don't care - we're only accessing
# the object to invalidate the accessor cache, so there's no
# need to populate the cache just to expire it again.
related = getattr(instance, self.cache_name, None)
# If we've got an old related object, we need to clear out its
# cache. This cache also might not exist if the related object
# hasn't been accessed yet.
if related is not None:
setattr(related, self.field.remote_field.get_cache_name(), None)
for lh_field, rh_field in self.field.related_fields:
setattr(instance, lh_field.attname, None)
# Set the values of the related field.
else:
for lh_field, rh_field in self.field.related_fields:
setattr(instance, lh_field.attname, getattr(value, rh_field.attname))
# Set the related instance cache used by __get__ to avoid a SQL query
# when accessing the attribute we just set.
setattr(instance, self.cache_name, value)
# If this is a one-to-one relation, set the reverse accessor cache on
# the related object to the current instance to avoid an extra SQL
# query if it's accessed later on.
if value is not None and not self.field.remote_field.multiple:
setattr(value, self.field.remote_field.get_cache_name(), instance)
class ReverseOneToOneDescriptor(object):
"""
Accessor to the related object on the reverse side of a one-to-one
relation.
In the example::
class Restaurant(Model):
place = OneToOneField(Place, related_name='restaurant')
``place.restaurant`` is a ``ReverseOneToOneDescriptor`` instance.
"""
def __init__(self, related):
self.related = related
self.cache_name = related.get_cache_name()
@cached_property
def RelatedObjectDoesNotExist(self):
# The exception isn't created at initialization time for the sake of
# consistency with `ForwardManyToOneDescriptor`.
return type(
str('RelatedObjectDoesNotExist'),
(self.related.related_model.DoesNotExist, AttributeError),
{}
)
def is_cached(self, instance):
return hasattr(instance, self.cache_name)
def get_queryset(self, **hints):
manager = self.related.related_model._default_manager
# If the related manager indicates that it should be used for
# related fields, respect that.
if not getattr(manager, 'use_for_related_fields', False):
manager = self.related.related_model._base_manager
return manager.db_manager(hints=hints).all()
def get_prefetch_queryset(self, instances, queryset=None):
if queryset is None:
queryset = self.get_queryset()
queryset._add_hints(instance=instances[0])
rel_obj_attr = attrgetter(self.related.field.attname)
def instance_attr(obj):
return obj._get_pk_val()
instances_dict = {instance_attr(inst): inst for inst in instances}
query = {'%s__in' % self.related.field.name: instances}
queryset = queryset.filter(**query)
# Since we're going to assign directly in the cache,
# we must manage the reverse relation cache manually.
rel_obj_cache_name = self.related.field.get_cache_name()
for rel_obj in queryset:
instance = instances_dict[rel_obj_attr(rel_obj)]
setattr(rel_obj, rel_obj_cache_name, instance)
return queryset, rel_obj_attr, instance_attr, True, self.cache_name
def __get__(self, instance, cls=None):
"""
Get the related instance through the reverse relation.
With the example above, when getting ``place.restaurant``:
- ``self`` is the descriptor managing the ``restaurant`` attribute
- ``instance`` is the ``place`` instance
- ``instance_type`` in the ``Place`` class (we don't need it)
Keep in mind that ``Restaurant`` holds the foreign key to ``Place``.
"""
if instance is None:
return self
# The related instance is loaded from the database and then cached in
# the attribute defined in self.cache_name. It can also be pre-cached
# by the forward accessor (ForwardManyToOneDescriptor).
try:
rel_obj = getattr(instance, self.cache_name)
except AttributeError:
related_pk = instance._get_pk_val()
if related_pk is None:
rel_obj = None
else:
filter_args = self.related.field.get_forward_related_filter(instance)
try:
rel_obj = self.get_queryset(instance=instance).get(**filter_args)
except self.related.related_model.DoesNotExist:
rel_obj = None
else:
# Set the forward accessor cache on the related object to
# the current instance to avoid an extra SQL query if it's
# accessed later on.
setattr(rel_obj, self.related.field.get_cache_name(), instance)
setattr(instance, self.cache_name, rel_obj)
if rel_obj is None:
raise self.RelatedObjectDoesNotExist(
"%s has no %s." % (
instance.__class__.__name__,
self.related.get_accessor_name()
)
)
else:
return rel_obj
def __set__(self, instance, value):
"""
Set the related instance through the reverse relation.
With the example above, when setting ``place.restaurant = restaurant``:
- ``self`` is the descriptor managing the ``restaurant`` attribute
- ``instance`` is the ``place`` instance
- ``value`` in the ``restaurant`` instance on the right of the equal sign
Keep in mind that ``Restaurant`` holds the foreign key to ``Place``.
"""
# The similarity of the code below to the code in
# ForwardManyToOneDescriptor is annoying, but there's a bunch
# of small differences that would make a common base class convoluted.
if value is None:
# Update the cached related instance (if any) & clear the cache.
try:
rel_obj = getattr(instance, self.cache_name)
except AttributeError:
pass
else:
delattr(instance, self.cache_name)
setattr(rel_obj, self.related.field.name, None)
elif not isinstance(value, self.related.related_model):
# An object must be an instance of the related class.
raise ValueError(
'Cannot assign "%r": "%s.%s" must be a "%s" instance.' % (
value,
instance._meta.object_name,
self.related.get_accessor_name(),
self.related.related_model._meta.object_name,
)
)
else:
if instance._state.db is None:
instance._state.db = router.db_for_write(instance.__class__, instance=value)
elif value._state.db is None:
value._state.db = router.db_for_write(value.__class__, instance=instance)
elif value._state.db is not None and instance._state.db is not None:
if not router.allow_relation(value, instance):
raise ValueError('Cannot assign "%r": the current database router prevents this relation.' % value)
related_pk = tuple(getattr(instance, field.attname) for field in self.related.field.foreign_related_fields)
# Set the value of the related field to the value of the related object's related field
for index, field in enumerate(self.related.field.local_related_fields):
setattr(value, field.attname, related_pk[index])
# Set the related instance cache used by __get__ to avoid a SQL query
# when accessing the attribute we just set.
setattr(instance, self.cache_name, value)
# Set the forward accessor cache on the related object to the current
# instance to avoid an extra SQL query if it's accessed later on.
setattr(value, self.related.field.get_cache_name(), instance)
class ReverseManyToOneDescriptor(object):
"""
Accessor to the related objects manager on the reverse side of a
many-to-one relation.
In the example::
class Child(Model):
parent = ForeignKey(Parent, related_name='children')
``parent.children`` is a ``ReverseManyToOneDescriptor`` instance.
Most of the implementation is delegated to a dynamically defined manager
class built by ``create_forward_many_to_many_manager()`` defined below.
"""
def __init__(self, rel):
self.rel = rel
self.field = rel.field
@cached_property
def related_manager_cls(self):
return create_reverse_many_to_one_manager(
self.rel.related_model._default_manager.__class__,
self.rel,
)
def __get__(self, instance, cls=None):
"""
Get the related objects through the reverse relation.
With the example above, when getting ``parent.children``:
- ``self`` is the descriptor managing the ``children`` attribute
- ``instance`` is the ``parent`` instance
- ``instance_type`` in the ``Parent`` class (we don't need it)
"""
if instance is None:
return self
return self.related_manager_cls(instance)
def __set__(self, instance, value):
"""
Set the related objects through the reverse relation.
With the example above, when setting ``parent.children = children``:
- ``self`` is the descriptor managing the ``children`` attribute
- ``instance`` is the ``parent`` instance
- ``value`` in the ``children`` sequence on the right of the equal sign
"""
warnings.warn(
'Direct assignment to the reverse side of a related set is '
'deprecated due to the implicit save() that happens. Use %s.set() '
'instead.' % self.rel.get_accessor_name(), RemovedInDjango20Warning, stacklevel=2,
)
manager = self.__get__(instance)
manager.set(value)
def create_reverse_many_to_one_manager(superclass, rel):
"""
Create a manager for the reverse side of a many-to-one relation.
This manager subclasses another manager, generally the default manager of
the related model, and adds behaviors specific to many-to-one relations.
"""
class RelatedManager(superclass):
def __init__(self, instance):
super(RelatedManager, self).__init__()
self.instance = instance
self.model = rel.related_model
self.field = rel.field
self.core_filters = {self.field.name: instance}
def __call__(self, **kwargs):
# We use **kwargs rather than a kwarg argument to enforce the
# `manager='manager_name'` syntax.
manager = getattr(self.model, kwargs.pop('manager'))
manager_class = create_reverse_many_to_one_manager(manager.__class__, rel)
return manager_class(self.instance)
do_not_call_in_templates = True
def _apply_rel_filters(self, queryset):
"""
Filter the queryset for the instance this manager is bound to.
"""
db = self._db or router.db_for_read(self.model, instance=self.instance)
empty_strings_as_null = connections[db].features.interprets_empty_strings_as_nulls
queryset._add_hints(instance=self.instance)
if self._db:
queryset = queryset.using(self._db)
queryset = queryset.filter(**self.core_filters)
for field in self.field.foreign_related_fields:
val = getattr(self.instance, field.attname)
if val is None or (val == '' and empty_strings_as_null):
return queryset.none()
queryset._known_related_objects = {self.field: {self.instance.pk: self.instance}}
return queryset
def get_queryset(self):
try:
return self.instance._prefetched_objects_cache[self.field.related_query_name()]
except (AttributeError, KeyError):
queryset = super(RelatedManager, self).get_queryset()
return self._apply_rel_filters(queryset)
def get_prefetch_queryset(self, instances, queryset=None):
if queryset is None:
queryset = super(RelatedManager, self).get_queryset()
queryset._add_hints(instance=instances[0])
queryset = queryset.using(queryset._db or self._db)
rel_obj_attr = self.field.get_local_related_value
instance_attr = self.field.get_foreign_related_value
instances_dict = {instance_attr(inst): inst for inst in instances}
query = {'%s__in' % self.field.name: instances}
queryset = queryset.filter(**query)
# Since we just bypassed this class' get_queryset(), we must manage
# the reverse relation manually.
for rel_obj in queryset:
instance = instances_dict[rel_obj_attr(rel_obj)]
setattr(rel_obj, self.field.name, instance)
cache_name = self.field.related_query_name()
return queryset, rel_obj_attr, instance_attr, False, cache_name
def add(self, *objs, **kwargs):
bulk = kwargs.pop('bulk', True)
objs = list(objs)
db = router.db_for_write(self.model, instance=self.instance)
def check_and_update_obj(obj):
if not isinstance(obj, self.model):
raise TypeError("'%s' instance expected, got %r" % (
self.model._meta.object_name, obj,
))
setattr(obj, self.field.name, self.instance)
if bulk:
pks = []
for obj in objs:
check_and_update_obj(obj)
if obj._state.adding or obj._state.db != db:
raise ValueError(
"%r instance isn't saved. Use bulk=False or save "
"the object first." % obj
)
pks.append(obj.pk)
self.model._base_manager.using(db).filter(pk__in=pks).update(**{
self.field.name: self.instance,
})
else:
with transaction.atomic(using=db, savepoint=False):
for obj in objs:
check_and_update_obj(obj)
obj.save()
add.alters_data = True
def create(self, **kwargs):
kwargs[self.field.name] = self.instance
db = router.db_for_write(self.model, instance=self.instance)
return super(RelatedManager, self.db_manager(db)).create(**kwargs)
create.alters_data = True
def get_or_create(self, **kwargs):
kwargs[self.field.name] = self.instance
db = router.db_for_write(self.model, instance=self.instance)
return super(RelatedManager, self.db_manager(db)).get_or_create(**kwargs)
get_or_create.alters_data = True
def update_or_create(self, **kwargs):
kwargs[self.field.name] = self.instance
db = router.db_for_write(self.model, instance=self.instance)
return super(RelatedManager, self.db_manager(db)).update_or_create(**kwargs)
update_or_create.alters_data = True
# remove() and clear() are only provided if the ForeignKey can have a value of null.
if rel.field.null:
def remove(self, *objs, **kwargs):
if not objs:
return
bulk = kwargs.pop('bulk', True)
val = self.field.get_foreign_related_value(self.instance)
old_ids = set()
for obj in objs:
# Is obj actually part of this descriptor set?
if self.field.get_local_related_value(obj) == val:
old_ids.add(obj.pk)
else:
raise self.field.remote_field.model.DoesNotExist(
"%r is not related to %r." % (obj, self.instance)
)
self._clear(self.filter(pk__in=old_ids), bulk)
remove.alters_data = True
def clear(self, **kwargs):
bulk = kwargs.pop('bulk', True)
self._clear(self, bulk)
clear.alters_data = True
def _clear(self, queryset, bulk):
db = router.db_for_write(self.model, instance=self.instance)
queryset = queryset.using(db)
if bulk:
# `QuerySet.update()` is intrinsically atomic.
queryset.update(**{self.field.name: None})
else:
with transaction.atomic(using=db, savepoint=False):
for obj in queryset:
setattr(obj, self.field.name, None)
obj.save(update_fields=[self.field.name])
_clear.alters_data = True
def set(self, objs, **kwargs):
# Force evaluation of `objs` in case it's a queryset whose value
# could be affected by `manager.clear()`. Refs #19816.
objs = tuple(objs)
bulk = kwargs.pop('bulk', True)
clear = kwargs.pop('clear', False)
if self.field.null:
db = router.db_for_write(self.model, instance=self.instance)
with transaction.atomic(using=db, savepoint=False):
if clear:
self.clear()
self.add(*objs, bulk=bulk)
else:
old_objs = set(self.using(db).all())
new_objs = []
for obj in objs:
if obj in old_objs:
old_objs.remove(obj)
else:
new_objs.append(obj)
self.remove(*old_objs, bulk=bulk)
self.add(*new_objs, bulk=bulk)
else:
self.add(*objs, bulk=bulk)
set.alters_data = True
return RelatedManager
class ManyToManyDescriptor(ReverseManyToOneDescriptor):
"""
Accessor to the related objects manager on the forward and reverse sides of
a many-to-many relation.
In the example::
class Pizza(Model):
toppings = ManyToManyField(Topping, related_name='pizzas')
``pizza.toppings`` and ``topping.pizzas`` are ``ManyToManyDescriptor``
instances.
Most of the implementation is delegated to a dynamically defined manager
class built by ``create_forward_many_to_many_manager()`` defined below.
"""
def __init__(self, rel, reverse=False):
super(ManyToManyDescriptor, self).__init__(rel)
self.reverse = reverse
@property
def through(self):
# through is provided so that you have easy access to the through
# model (Book.authors.through) for inlines, etc. This is done as
# a property to ensure that the fully resolved value is returned.
return self.rel.through
@cached_property
def related_manager_cls(self):
model = self.rel.related_model if self.reverse else self.rel.model
return create_forward_many_to_many_manager(
model._default_manager.__class__,
self.rel,
reverse=self.reverse,
)
def create_forward_many_to_many_manager(superclass, rel, reverse):
"""
Create a manager for the either side of a many-to-many relation.
This manager subclasses another manager, generally the default manager of
the related model, and adds behaviors specific to many-to-many relations.
"""
class ManyRelatedManager(superclass):
def __init__(self, instance=None):
super(ManyRelatedManager, self).__init__()
self.instance = instance
if not reverse:
self.model = rel.model
self.query_field_name = rel.field.related_query_name()
self.prefetch_cache_name = rel.field.name
self.source_field_name = rel.field.m2m_field_name()
self.target_field_name = rel.field.m2m_reverse_field_name()
self.symmetrical = rel.symmetrical
else:
self.model = rel.related_model
self.query_field_name = rel.field.name
self.prefetch_cache_name = rel.field.related_query_name()
self.source_field_name = rel.field.m2m_reverse_field_name()
self.target_field_name = rel.field.m2m_field_name()
self.symmetrical = False
self.through = rel.through
self.reverse = reverse
self.source_field = self.through._meta.get_field(self.source_field_name)
self.target_field = self.through._meta.get_field(self.target_field_name)
self.core_filters = {}
for lh_field, rh_field in self.source_field.related_fields:
core_filter_key = '%s__%s' % (self.query_field_name, rh_field.name)
self.core_filters[core_filter_key] = getattr(instance, rh_field.attname)
self.related_val = self.source_field.get_foreign_related_value(instance)
if None in self.related_val:
raise ValueError('"%r" needs to have a value for field "%s" before '
'this many-to-many relationship can be used.' %
(instance, self.source_field_name))
# Even if this relation is not to pk, we require still pk value.
# The wish is that the instance has been already saved to DB,
# although having a pk value isn't a guarantee of that.
if instance.pk is None:
raise ValueError("%r instance needs to have a primary key value before "
"a many-to-many relationship can be used." %
instance.__class__.__name__)
def __call__(self, **kwargs):
# We use **kwargs rather than a kwarg argument to enforce the
# `manager='manager_name'` syntax.
manager = getattr(self.model, kwargs.pop('manager'))
manager_class = create_forward_many_to_many_manager(manager.__class__, rel, reverse)
return manager_class(instance=self.instance)
do_not_call_in_templates = True
def _build_remove_filters(self, removed_vals):
filters = Q(**{self.source_field_name: self.related_val})
# No need to add a subquery condition if removed_vals is a QuerySet without
# filters.
removed_vals_filters = (not isinstance(removed_vals, QuerySet) or
removed_vals._has_filters())
if removed_vals_filters:
filters &= Q(**{'%s__in' % self.target_field_name: removed_vals})
if self.symmetrical:
symmetrical_filters = Q(**{self.target_field_name: self.related_val})
if removed_vals_filters:
symmetrical_filters &= Q(
**{'%s__in' % self.source_field_name: removed_vals})
filters |= symmetrical_filters
return filters
def _apply_rel_filters(self, queryset):
"""
Filter the queryset for the instance this manager is bound to.
"""
queryset._add_hints(instance=self.instance)
if self._db:
queryset = queryset.using(self._db)
return queryset._next_is_sticky().filter(**self.core_filters)
def get_queryset(self):
try:
return self.instance._prefetched_objects_cache[self.prefetch_cache_name]
except (AttributeError, KeyError):
queryset = super(ManyRelatedManager, self).get_queryset()
return self._apply_rel_filters(queryset)
def get_prefetch_queryset(self, instances, queryset=None):
if queryset is None:
queryset = super(ManyRelatedManager, self).get_queryset()
queryset._add_hints(instance=instances[0])
queryset = queryset.using(queryset._db or self._db)
query = {'%s__in' % self.query_field_name: instances}
queryset = queryset._next_is_sticky().filter(**query)
# M2M: need to annotate the query in order to get the primary model
# that the secondary model was actually related to. We know that
# there will already be a join on the join table, so we can just add
# the select.
# For non-autocreated 'through' models, can't assume we are
# dealing with PK values.
fk = self.through._meta.get_field(self.source_field_name)
join_table = self.through._meta.db_table
connection = connections[queryset.db]
qn = connection.ops.quote_name
queryset = queryset.extra(select={
'_prefetch_related_val_%s' % f.attname:
'%s.%s' % (qn(join_table), qn(f.column)) for f in fk.local_related_fields})
return (
queryset,
lambda result: tuple(
getattr(result, '_prefetch_related_val_%s' % f.attname)
for f in fk.local_related_fields
),
lambda inst: tuple(
f.get_db_prep_value(getattr(inst, f.attname), connection)
for f in fk.foreign_related_fields
),
False,
self.prefetch_cache_name,
)
def add(self, *objs):
if not rel.through._meta.auto_created:
opts = self.through._meta
raise AttributeError(
"Cannot use add() on a ManyToManyField which specifies an "
"intermediary model. Use %s.%s's Manager instead." %
(opts.app_label, opts.object_name)
)
db = router.db_for_write(self.through, instance=self.instance)
with transaction.atomic(using=db, savepoint=False):
self._add_items(self.source_field_name, self.target_field_name, *objs)
# If this is a symmetrical m2m relation to self, add the mirror entry in the m2m table
if self.symmetrical:
self._add_items(self.target_field_name, self.source_field_name, *objs)
add.alters_data = True
def remove(self, *objs):
if not rel.through._meta.auto_created:
opts = self.through._meta
raise AttributeError(
"Cannot use remove() on a ManyToManyField which specifies "
"an intermediary model. Use %s.%s's Manager instead." %
(opts.app_label, opts.object_name)
)
self._remove_items(self.source_field_name, self.target_field_name, *objs)
remove.alters_data = True
def clear(self):
db = router.db_for_write(self.through, instance=self.instance)
with transaction.atomic(using=db, savepoint=False):
signals.m2m_changed.send(sender=self.through, action="pre_clear",
instance=self.instance, reverse=self.reverse,
model=self.model, pk_set=None, using=db)
filters = self._build_remove_filters(super(ManyRelatedManager, self).get_queryset().using(db))
self.through._default_manager.using(db).filter(filters).delete()
signals.m2m_changed.send(sender=self.through, action="post_clear",
instance=self.instance, reverse=self.reverse,
model=self.model, pk_set=None, using=db)
clear.alters_data = True
def set(self, objs, **kwargs):
if not rel.through._meta.auto_created:
opts = self.through._meta
raise AttributeError(
"Cannot set values on a ManyToManyField which specifies an "
"intermediary model. Use %s.%s's Manager instead." %
(opts.app_label, opts.object_name)
)
# Force evaluation of `objs` in case it's a queryset whose value
# could be affected by `manager.clear()`. Refs #19816.
objs = tuple(objs)
clear = kwargs.pop('clear', False)
db = router.db_for_write(self.through, instance=self.instance)
with transaction.atomic(using=db, savepoint=False):
if clear:
self.clear()
self.add(*objs)
else:
old_ids = set(self.using(db).values_list(self.target_field.target_field.attname, flat=True))
new_objs = []
for obj in objs:
fk_val = (self.target_field.get_foreign_related_value(obj)[0]
if isinstance(obj, self.model) else obj)
if fk_val in old_ids:
old_ids.remove(fk_val)
else:
new_objs.append(obj)
self.remove(*old_ids)
self.add(*new_objs)
set.alters_data = True
def create(self, **kwargs):
# This check needs to be done here, since we can't later remove this
# from the method lookup table, as we do with add and remove.
if not self.through._meta.auto_created:
opts = self.through._meta
raise AttributeError(
"Cannot use create() on a ManyToManyField which specifies "
"an intermediary model. Use %s.%s's Manager instead." %
(opts.app_label, opts.object_name)
)
db = router.db_for_write(self.instance.__class__, instance=self.instance)
new_obj = super(ManyRelatedManager, self.db_manager(db)).create(**kwargs)
self.add(new_obj)
return new_obj
create.alters_data = True
def get_or_create(self, **kwargs):
db = router.db_for_write(self.instance.__class__, instance=self.instance)
obj, created = super(ManyRelatedManager, self.db_manager(db)).get_or_create(**kwargs)
# We only need to add() if created because if we got an object back
# from get() then the relationship already exists.
if created:
self.add(obj)
return obj, created
get_or_create.alters_data = True
def update_or_create(self, **kwargs):
db = router.db_for_write(self.instance.__class__, instance=self.instance)
obj, created = super(ManyRelatedManager, self.db_manager(db)).update_or_create(**kwargs)
# We only need to add() if created because if we got an object back
# from get() then the relationship already exists.
if created:
self.add(obj)
return obj, created
update_or_create.alters_data = True
def _add_items(self, source_field_name, target_field_name, *objs):
# source_field_name: the PK fieldname in join table for the source object
# target_field_name: the PK fieldname in join table for the target object
# *objs - objects to add. Either object instances, or primary keys of object instances.
# If there aren't any objects, there is nothing to do.
from django.db.models import Model
if objs:
new_ids = set()
for obj in objs:
if isinstance(obj, self.model):
if not router.allow_relation(obj, self.instance):
raise ValueError(
'Cannot add "%r": instance is on database "%s", value is on database "%s"' %
(obj, self.instance._state.db, obj._state.db)
)
fk_val = self.through._meta.get_field(
target_field_name).get_foreign_related_value(obj)[0]
if fk_val is None:
raise ValueError(
'Cannot add "%r": the value for field "%s" is None' %
(obj, target_field_name)
)
new_ids.add(fk_val)
elif isinstance(obj, Model):
raise TypeError(
"'%s' instance expected, got %r" %
(self.model._meta.object_name, obj)
)
else:
new_ids.add(obj)
db = router.db_for_write(self.through, instance=self.instance)
vals = (self.through._default_manager.using(db)
.values_list(target_field_name, flat=True)
.filter(**{
source_field_name: self.related_val[0],
'%s__in' % target_field_name: new_ids,
}))
new_ids = new_ids - set(vals)
with transaction.atomic(using=db, savepoint=False):
if self.reverse or source_field_name == self.source_field_name:
# Don't send the signal when we are inserting the
# duplicate data row for symmetrical reverse entries.
signals.m2m_changed.send(sender=self.through, action='pre_add',
instance=self.instance, reverse=self.reverse,
model=self.model, pk_set=new_ids, using=db)
# Add the ones that aren't there already
self.through._default_manager.using(db).bulk_create([
self.through(**{
'%s_id' % source_field_name: self.related_val[0],
'%s_id' % target_field_name: obj_id,
})
for obj_id in new_ids
])
if self.reverse or source_field_name == self.source_field_name:
# Don't send the signal when we are inserting the
# duplicate data row for symmetrical reverse entries.
signals.m2m_changed.send(sender=self.through, action='post_add',
instance=self.instance, reverse=self.reverse,
model=self.model, pk_set=new_ids, using=db)
def _remove_items(self, source_field_name, target_field_name, *objs):
# source_field_name: the PK colname in join table for the source object
# target_field_name: the PK colname in join table for the target object
# *objs - objects to remove
if not objs:
return
# Check that all the objects are of the right type
old_ids = set()
for obj in objs:
if isinstance(obj, self.model):<|fim▁hole|> old_ids.add(obj)
db = router.db_for_write(self.through, instance=self.instance)
with transaction.atomic(using=db, savepoint=False):
# Send a signal to the other end if need be.
signals.m2m_changed.send(sender=self.through, action="pre_remove",
instance=self.instance, reverse=self.reverse,
model=self.model, pk_set=old_ids, using=db)
target_model_qs = super(ManyRelatedManager, self).get_queryset()
if target_model_qs._has_filters():
old_vals = target_model_qs.using(db).filter(**{
'%s__in' % self.target_field.target_field.attname: old_ids})
else:
old_vals = old_ids
filters = self._build_remove_filters(old_vals)
self.through._default_manager.using(db).filter(filters).delete()
signals.m2m_changed.send(sender=self.through, action="post_remove",
instance=self.instance, reverse=self.reverse,
model=self.model, pk_set=old_ids, using=db)
return ManyRelatedManager<|fim▁end|>
|
fk_val = self.target_field.get_foreign_related_value(obj)[0]
old_ids.add(fk_val)
else:
|
<|file_name|>git.go<|end_file_name|><|fim▁begin|>package vcs
import (
"bytes"
"fmt"
"io"
"log"
"os/exec"
"path/filepath"
"strings"
)
const defaultRef = "master"
func init() {
Register(newGit, "git")
}
type GitDriver struct{}
func newGit(b []byte) (Driver, error) {
return &GitDriver{}, nil
}
func (g *GitDriver) HeadRev(dir string) (string, error) {
cmd := exec.Command(
"git",
"rev-parse",
"HEAD")
cmd.Dir = dir
r, err := cmd.StdoutPipe()
if err != nil {
return "", err
}
defer r.Close()
if err := cmd.Start(); err != nil {
return "", err
}
var buf bytes.Buffer
if _, err := io.Copy(&buf, r); err != nil {
return "", err<|fim▁hole|> }
return strings.TrimSpace(buf.String()), cmd.Wait()
}
func run(desc, dir, cmd string, args ...string) error {
c := exec.Command(cmd, args...)
c.Dir = dir
if out, err := c.CombinedOutput(); err != nil {
log.Printf(
"Failed to %s %s, see output below\n%sContinuing...",
desc,
dir,
out)
return err
}
return nil
}
func (g *GitDriver) Pull(dir string) (string, error) {
if err := run("git fetch", dir,
"git",
"fetch",
"--prune",
"--no-tags",
"--depth", "1",
"origin",
fmt.Sprintf("+%s:remotes/origin/%s", defaultRef, defaultRef)); err != nil {
return "", err
}
if err := run("git reset", dir,
"git",
"reset",
"--hard",
fmt.Sprintf("origin/%s", defaultRef)); err != nil {
return "", err
}
return g.HeadRev(dir)
}
func (g *GitDriver) Clone(dir, url string) (string, error) {
par, rep := filepath.Split(dir)
cmd := exec.Command(
"git",
"clone",
"--depth", "1",
url,
rep)
cmd.Dir = par
out, err := cmd.CombinedOutput()
if err != nil {
log.Printf("Failed to clone %s, see output below\n%sContinuing...", url, out)
return "", err
}
return g.HeadRev(dir)
}
func (g *GitDriver) SpecialFiles() []string {
return []string{
".git",
}
}<|fim▁end|>
| |
<|file_name|>ReplyActivity.java<|end_file_name|><|fim▁begin|>package bolianeducation.bolianchild.view;
import android.graphics.Bitmap;
import android.os.Bundle;
import android.os.Handler;
import android.os.Message;
import android.support.annotation.Nullable;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.text.TextUtils;
import android.view.View;
import android.widget.EditText;
import android.widget.TextView;
import com.andview.refreshview.XRefreshView;
import com.google.gson.reflect.TypeToken;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import bolianeducation.bolianchild.R;
import bolianeducation.bolianchild.adapter.ReplyAdapter;
import bolianeducation.bolianchild.api.BLService;
import bolianeducation.bolianchild.camera.CameraActivity;
import bolianeducation.bolianchild.camera.PopupWindowHelper;
import bolianeducation.bolianchild.custom.ProgressDialog;
import bolianeducation.bolianchild.manager.ActivityManage;
import bolianeducation.bolianchild.manager.InfoManager;
import bolianeducation.bolianchild.modle.CommentResponse;
import bolianeducation.bolianchild.modle.PageEntity;
import bolianeducation.bolianchild.modle.ReplyEntity;
import bolianeducation.bolianchild.modle.ResponseEntity;
import bolianeducation.bolianchild.utils.GsonUtils;
import bolianeducation.bolianchild.utils.LogManager;
import bolianeducation.bolianchild.utils.ToastUtil;
import butterknife.BindView;
import butterknife.ButterKnife;
import butterknife.OnClick;
import retrofit2.Call;
import retrofit2.Callback;
import retrofit2.Response;
import static bolianeducation.bolianchild.KeyList.IKEY_REPLY_ENTITY;
import static bolianeducation.bolianchild.KeyList.IKEY_TITLE;
/**
* Created by admin on 2017/8/10.
* 某个评论的所有回复页
*/
public class ReplyActivity extends CameraActivity {
@BindView(R.id.recycler_view)
RecyclerView recyclerView;
@BindView(R.id.xrefreshview)
XRefreshView xRefreshView;
@BindView(R.id.tv_right)
TextView tvRight;
@BindView(R.id.et_content)
EditText etContent;
List<ReplyEntity> replyList = new ArrayList();
int pageNo = 1;
int pageSize = 15;
ReplyAdapter adapter;
CommentResponse.Comment taskReply;
String title;
private ProgressDialog mDialog;
PopupWindowHelper popupWindowHelper;
@Override
protected void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_reply);
ButterKnife.bind(this);
if (getIntent() != null) {
taskReply = (CommentResponse.Comment) getIntent().getSerializableExtra(IKEY_REPLY_ENTITY);
title = getIntent().getStringExtra(IKEY_TITLE);
}
init();
}
@Override
public void onPicture(File pictureFile, String picturePath, Bitmap pictureBitmap) {
if (picturePath == null) return;
requestUploadImg(pictureFile);
}
//定义Handler对象
private Handler handler = new Handler() {
@Override
//当有消息发送出来的时候就执行Handler的这个方法
public void handleMessage(Message msg) {
super.handleMessage(msg);
//处理UI
requestAddReply((List) msg.obj);
}
};
ArrayList<String> imgIds = new ArrayList<>();
/**
* 上传图片
*/
private void requestUploadImg(final File file) {
mDialog.show();
imgIds.clear();
new Thread() {
@Override
public void run() {
//你要执行的方法
Call<ResponseEntity> uploadImg = BLService.getHeaderService().uploadImg(BLService.createFilePart("imgFile", file));
try {
ResponseEntity body = uploadImg.execute().body();
if (body != null) {
imgIds.add(body.getEntityId());
}
LogManager.e("tag", GsonUtils.toJson(body));
} catch (IOException e) {
e.printStackTrace();
}
//执行完毕后给handler发送消息
Message message = new Message();
message.obj = imgIds;
handler.sendMessage(message);
}
}.start();
}
private void init() {
//前个页面携带
setTitle(title);
setTvBackVisible(true);
tvRight.setVisibility(View.VISIBLE);
tvRight.setText(R.string.report);
mDialog = ProgressDialog.createDialog(context, "正在上传图片");
initPopupWindow();
xRefreshView.setAutoRefresh(true);
xRefreshView.setPullLoadEnable(true);
xRefreshView.setAutoLoadMore(false);
xRefreshView.setPinnedTime(500);
xRefreshView.setXRefreshViewListener(new XRefreshView.SimpleXRefreshListener() {
@Override
public void onRefresh(boolean isPullDown) {
pageNo = 1;
requestAllReplyList();
}
@Override
public void onLoadMore(boolean isSilence) {
// if (pageNo >= dataBeanX.getTotalPages()) {
// return;
// }
pageNo++;
requestAllReplyList();
}
});
recyclerView.setLayoutManager(new LinearLayoutManager(this));
replyList.add(new ReplyEntity());//占位头部
adapter = new ReplyAdapter(context, taskReply, replyList);
recyclerView.setAdapter(adapter);
}
/**
* 初始化点击头像后的PopupWindow
*/
public void initPopupWindow() {
popupWindowHelper = new PopupWindowHelper(context);
popupWindowHelper.initPopup();
//拍照
popupWindowHelper.setOnTakePhotoListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
openCamera();
}
});
//从相册选择
popupWindowHelper.setTakeAlbumListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
openAlbum();
}
});
}
PageEntity pageEntity;
//请求某条评论的所有回复
private void requestAllReplyList() {
Call<ResponseEntity> commentList = BLService.getHeaderService().getAllReplyById(taskReply.getId(), pageNo, pageSize);
commentList.enqueue(new Callback<ResponseEntity>() {
@Override
public void onResponse(Call<ResponseEntity> call, Response<ResponseEntity> response) {
ResponseEntity body = response.body();
LogManager.e("tag", GsonUtils.toJson(body));
if (body == null) return;
pageEntity = GsonUtils.fromJson(GsonUtils.toJson(body.getData()), new TypeToken<PageEntity<ReplyEntity>>() {
}.getType());
updateUI();
}
@Override
public void onFailure(Call<ResponseEntity> call, Throwable t) {
runOnUiThread(new Runnable() {
@Override
public void run() {
if (pageNo > 1) {
pageNo--;
}
onLoadComplete();
}
});
}
});
}
//更新UI
private void updateUI() {
runOnUiThread(new Runnable() {
@Override
public void run() {
//设置刷新
// if (pageNo >= dataBeanX.getTotalPages()) {
// xRefreshView.setPullLoadEnable(true);
// } else {
// xRefreshView.setPullLoadEnable(false);
// }
if (pageNo == 1) {
replyList.clear();
replyList.add(new ReplyEntity());//占位头部
}
onLoadComplete();
if (pageEntity.getData() == null) return;
replyList.addAll(pageEntity.getData());
adapter.notifyDataSetChanged();
}
});
}
//加载完毕
protected void onLoadComplete() {
new Handler().postDelayed(new Runnable() {
@Override
public void run() {
// ToastUtil.showToast(CommunityWelfareActivity.this, "刷新首页数据了啊。。。");
xRefreshView.stopRefresh();
xRefreshView.stopLoadMore();
}
}, 500);
}
String content;
@OnClick({R.id.iv_send, R.id.tv_send, R.id.tv_right})
void onClick(View v) {
switch (v.getId()) {
case R.id.iv_send:
if (popupWindowHelper.isShowing()) {
popupWindowHelper.dismiss();
} else {
popupWindowHelper.show(v);
}
break;
case R.id.tv_send:
content = etContent.getText().toString();
if (TextUtils.isEmpty(content)) {<|fim▁hole|> return;
}
requestAddReply(null);
break;
case R.id.tv_right:
//举报
ActivityManage.startReportActivity(this, taskReply.getId());
break;
}
}
/**
* 请求添加评论
*/
private void requestAddReply(List<String> imgs) {
final ReplyEntity replyEntity = new ReplyEntity();
replyEntity.setTaskActionId(taskReply.getId());
replyEntity.setReplyTime(System.currentTimeMillis());
replyEntity.setContent(content);
replyEntity.setUserId(InfoManager.getUserId());
replyEntity.setAttachmentList(imgs);
Call<ResponseEntity> addReply = BLService.getHeaderService().addReply(replyEntity);
addReply.enqueue(new Callback<ResponseEntity>() {
@Override
public void onResponse(Call<ResponseEntity> call, Response<ResponseEntity> response) {
mDialog.dismiss();
ResponseEntity body = response.body();
if (body == null) return;
ToastUtil.showToast(ReplyActivity.this, body.getErrorMessage());
pageNo = 1;
requestAllReplyList();//刷新数据
etContent.setText("");
hideKeyBoard();
}
@Override
public void onFailure(Call<ResponseEntity> call, Throwable t) {
mDialog.dismiss();
ToastUtil.showToast(context, t.getMessage());
}
});
}
@Override
public void onDestroy() {
super.onDestroy();
if (mDialog != null) {
mDialog.dismiss();
}
if (popupWindowHelper != null) {
popupWindowHelper.dismiss();
}
}
}<|fim▁end|>
|
ToastUtil.showToast(this, "请输入回复内容");
|
<|file_name|>zfssaiscsi.py<|end_file_name|><|fim▁begin|># Copyright (c) 2014, 2015, Oracle and/or its affiliates. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
ZFS Storage Appliance Cinder Volume Driver
"""
import ast
import math
from oslo_config import cfg
from oslo_log import log
from oslo_serialization import base64
from oslo_utils import units
import six
from cinder import exception
from cinder import utils
from cinder.i18n import _, _LE, _LI, _LW
from cinder.image import image_utils
from cinder.volume import driver
from cinder.volume.drivers.san import san
from cinder.volume.drivers.zfssa import zfssarest
from cinder.volume import volume_types
import taskflow.engines
from taskflow.patterns import linear_flow as lf
from taskflow import task
CONF = cfg.CONF
LOG = log.getLogger(__name__)
ZFSSA_OPTS = [
cfg.StrOpt('zfssa_pool',
help='Storage pool name.'),
cfg.StrOpt('zfssa_project',
help='Project name.'),
cfg.StrOpt('zfssa_lun_volblocksize', default='8k',
choices=['512', '1k', '2k', '4k', '8k', '16k', '32k', '64k',
'128k'],
help='Block size.'),
cfg.BoolOpt('zfssa_lun_sparse', default=False,
help='Flag to enable sparse (thin-provisioned): True, False.'),
cfg.StrOpt('zfssa_lun_compression', default='off',
choices=['off', 'lzjb', 'gzip-2', 'gzip', 'gzip-9'],
help='Data compression.'),
cfg.StrOpt('zfssa_lun_logbias', default='latency',
choices=['latency', 'throughput'],
help='Synchronous write bias.'),
cfg.StrOpt('zfssa_initiator_group', default='',
help='iSCSI initiator group.'),
cfg.StrOpt('zfssa_initiator', default='',
help='iSCSI initiator IQNs. (comma separated)'),
cfg.StrOpt('zfssa_initiator_user', default='',
help='iSCSI initiator CHAP user (name).'),
cfg.StrOpt('zfssa_initiator_password', default='',
help='Secret of the iSCSI initiator CHAP user.', secret=True),
cfg.StrOpt('zfssa_initiator_config', default='',
help='iSCSI initiators configuration.'),
cfg.StrOpt('zfssa_target_group', default='tgt-grp',
help='iSCSI target group name.'),
cfg.StrOpt('zfssa_target_user', default='',
help='iSCSI target CHAP user (name).'),
cfg.StrOpt('zfssa_target_password', default='', secret=True,
help='Secret of the iSCSI target CHAP user.'),
cfg.StrOpt('zfssa_target_portal',
help='iSCSI target portal (Data-IP:Port, w.x.y.z:3260).'),
cfg.StrOpt('zfssa_target_interfaces',
help='Network interfaces of iSCSI targets. (comma separated)'),
cfg.IntOpt('zfssa_rest_timeout',
help='REST connection timeout. (seconds)'),
cfg.StrOpt('zfssa_replication_ip', default='',
help='IP address used for replication data. (maybe the same as '
'data ip)'),
cfg.BoolOpt('zfssa_enable_local_cache', default=True,
help='Flag to enable local caching: True, False.'),
cfg.StrOpt('zfssa_cache_project', default='os-cinder-cache',
help='Name of ZFSSA project where cache volumes are stored.')
]
CONF.register_opts(ZFSSA_OPTS)
ZFSSA_LUN_SPECS = {
'zfssa:volblocksize',
'zfssa:sparse',
'zfssa:compression',
'zfssa:logbias',
}
def factory_zfssa():
return zfssarest.ZFSSAApi()
class ZFSSAISCSIDriver(driver.ISCSIDriver):
"""ZFSSA Cinder iSCSI volume driver.
Version history:
1.0.1:
Backend enabled volume migration.
Local cache feature.
"""
VERSION = '1.0.1'
protocol = 'iSCSI'
def __init__(self, *args, **kwargs):
super(ZFSSAISCSIDriver, self).__init__(*args, **kwargs)
self.configuration.append_config_values(ZFSSA_OPTS)
self.configuration.append_config_values(san.san_opts)
self.zfssa = None
self.tgt_zfssa = None
self._stats = None
self.tgtiqn = None
def _get_target_alias(self):
"""return target alias."""
return self.configuration.zfssa_target_group
def do_setup(self, context):
"""Setup - create multiple elements.
Project, initiators, initiatorgroup, target and targetgroup.
"""
lcfg = self.configuration
LOG.info(_LI('Connecting to host: %s.'), lcfg.san_ip)
self.zfssa = factory_zfssa()
self.tgt_zfssa = factory_zfssa()
self.zfssa.set_host(lcfg.san_ip, timeout=lcfg.zfssa_rest_timeout)
auth_str = '%s:%s' % (lcfg.san_login, lcfg.san_password)
auth_str = base64.encode_as_text(auth_str)[:-1]
self.zfssa.login(auth_str)
self.zfssa.create_project(lcfg.zfssa_pool, lcfg.zfssa_project,
compression=lcfg.zfssa_lun_compression,
logbias=lcfg.zfssa_lun_logbias)
if lcfg.zfssa_enable_local_cache:
self.zfssa.create_project(lcfg.zfssa_pool,
lcfg.zfssa_cache_project,
compression=lcfg.zfssa_lun_compression,
logbias=lcfg.zfssa_lun_logbias)
schemas = [
{'property': 'image_id',
'description': 'OpenStack image ID',
'type': 'String'},
{'property': 'updated_at',
'description': 'Most recent updated time of image',
'type': 'String'}]
self.zfssa.create_schemas(schemas)
if (lcfg.zfssa_initiator_config != ''):
initiator_config = ast.literal_eval(lcfg.zfssa_initiator_config)
for initiator_group in initiator_config:
zfssa_initiator_group = initiator_group
for zfssa_initiator in initiator_config[zfssa_initiator_group]:
self.zfssa.create_initiator(zfssa_initiator['iqn'],
zfssa_initiator_group + '-' +
zfssa_initiator['iqn'],
chapuser=
zfssa_initiator['user'],
chapsecret=
zfssa_initiator['password'])
if (zfssa_initiator_group != 'default'):
self.zfssa.add_to_initiatorgroup(
zfssa_initiator['iqn'],
zfssa_initiator_group)
else:
LOG.warning(_LW('zfssa_initiator_config not found. '
'Using deprecated configuration options.'))
if (lcfg.zfssa_initiator != '' and
(lcfg.zfssa_initiator_group == '' or
lcfg.zfssa_initiator_group == 'default')):
LOG.warning(_LW('zfssa_initiator: %(ini)s'
' wont be used on '
'zfssa_initiator_group= %(inigrp)s.'),
{'ini': lcfg.zfssa_initiator,
'inigrp': lcfg.zfssa_initiator_group})
# Setup initiator and initiator group
if (lcfg.zfssa_initiator != '' and
lcfg.zfssa_initiator_group != '' and
lcfg.zfssa_initiator_group != 'default'):
for initiator in lcfg.zfssa_initiator.split(','):
self.zfssa.create_initiator(
initiator, lcfg.zfssa_initiator_group + '-' +
initiator, chapuser=lcfg.zfssa_initiator_user,
chapsecret=lcfg.zfssa_initiator_password)
self.zfssa.add_to_initiatorgroup(
initiator, lcfg.zfssa_initiator_group)
# Parse interfaces
interfaces = []
for interface in lcfg.zfssa_target_interfaces.split(','):
if interface == '':
continue
interfaces.append(interface)
# Setup target and target group
iqn = self.zfssa.create_target(
self._get_target_alias(),
interfaces,
tchapuser=lcfg.zfssa_target_user,
tchapsecret=lcfg.zfssa_target_password)
self.zfssa.add_to_targetgroup(iqn, lcfg.zfssa_target_group)
def check_for_setup_error(self):
"""Check that driver can login.
Check also pool, project, initiators, initiatorgroup, target and
targetgroup.
"""
lcfg = self.configuration
self.zfssa.verify_pool(lcfg.zfssa_pool)
self.zfssa.verify_project(lcfg.zfssa_pool, lcfg.zfssa_project)
if (lcfg.zfssa_initiator_config != ''):
initiator_config = ast.literal_eval(lcfg.zfssa_initiator_config)
for initiator_group in initiator_config:
zfssa_initiator_group = initiator_group
for zfssa_initiator in initiator_config[zfssa_initiator_group]:
self.zfssa.verify_initiator(zfssa_initiator['iqn'])
else:
if (lcfg.zfssa_initiator != '' and
lcfg.zfssa_initiator_group != '' and
lcfg.zfssa_initiator_group != 'default'):
for initiator in lcfg.zfssa_initiator.split(','):
self.zfssa.verify_initiator(initiator)
self.zfssa.verify_target(self._get_target_alias())
def _get_provider_info(self, volume, lun=None):
"""Return provider information."""
lcfg = self.configuration
project = lcfg.zfssa_project
if ((lcfg.zfssa_enable_local_cache is True) and
(volume['name'].startswith('os-cache-vol-'))):
project = lcfg.zfssa_cache_project
if lun is None:
lun = self.zfssa.get_lun(lcfg.zfssa_pool,
project,
volume['name'])
if isinstance(lun['number'], list):
lun['number'] = lun['number'][0]
if self.tgtiqn is None:
self.tgtiqn = self.zfssa.get_target(self._get_target_alias())
loc = "%s %s %s" % (lcfg.zfssa_target_portal, self.tgtiqn,
lun['number'])
LOG.debug('_get_provider_info: provider_location: %s', loc)
provider = {'provider_location': loc}
if lcfg.zfssa_target_user != '' and lcfg.zfssa_target_password != '':
provider['provider_auth'] = ('CHAP %s %s' %
(lcfg.zfssa_target_user,
lcfg.zfssa_target_password))
return provider
def create_volume(self, volume):
"""Create a volume on ZFSSA."""
LOG.debug('zfssa.create_volume: volume=' + volume['name'])
lcfg = self.configuration
volsize = str(volume['size']) + 'g'
specs = self._get_voltype_specs(volume)
self.zfssa.create_lun(lcfg.zfssa_pool,
lcfg.zfssa_project,
volume['name'],
volsize,
lcfg.zfssa_target_group,
specs)
def delete_volume(self, volume):
"""Deletes a volume with the given volume['name']."""
LOG.debug('zfssa.delete_volume: name=%s', volume['name'])
lcfg = self.configuration
try:
lun2del = self.zfssa.get_lun(lcfg.zfssa_pool,
lcfg.zfssa_project,
volume['name'])
except exception.VolumeBackendAPIException as ex:
# NOTE(jdg): This will log an error and continue
# if for some reason the volume no longer exists
# on the backend
if 'Error Getting Volume' in ex.message:
LOG.error(_LE("Volume ID %s was not found on "
"the zfssa device while attempting "
"delete_volume operation."), volume['id'])
return
# Delete clone temp snapshot. see create_cloned_volume()
if 'origin' in lun2del and 'id' in volume:
if lun2del['nodestroy']:
self.zfssa.set_lun_props(lcfg.zfssa_pool,
lcfg.zfssa_project,
volume['name'],
nodestroy=False)
tmpsnap = 'tmp-snapshot-%s' % volume['id']
if lun2del['origin']['snapshot'] == tmpsnap:
self.zfssa.delete_snapshot(lcfg.zfssa_pool,
lcfg.zfssa_project,
lun2del['origin']['share'],
lun2del['origin']['snapshot'])
return
self.zfssa.delete_lun(pool=lcfg.zfssa_pool,
project=lcfg.zfssa_project,
lun=volume['name'])
if ('origin' in lun2del and
lun2del['origin']['project'] == lcfg.zfssa_cache_project):
self._check_origin(lun2del, volume['name'])
def create_snapshot(self, snapshot):
"""Creates a snapshot of a volume.
Snapshot name: snapshot['name']
Volume name: snapshot['volume_name']
"""
LOG.debug('zfssa.create_snapshot: snapshot=%s', snapshot['name'])
lcfg = self.configuration
self.zfssa.create_snapshot(lcfg.zfssa_pool,
lcfg.zfssa_project,
snapshot['volume_name'],
snapshot['name'])
def delete_snapshot(self, snapshot):
"""Deletes a snapshot."""
LOG.debug('zfssa.delete_snapshot: snapshot=%s', snapshot['name'])
lcfg = self.configuration
numclones = self.zfssa.num_clones(lcfg.zfssa_pool,
lcfg.zfssa_project,
snapshot['volume_name'],
snapshot['name'])
if numclones > 0:
LOG.error(_LE('Snapshot %s: has clones'), snapshot['name'])
raise exception.SnapshotIsBusy(snapshot_name=snapshot['name'])
self.zfssa.delete_snapshot(lcfg.zfssa_pool,
lcfg.zfssa_project,
snapshot['volume_name'],
snapshot['name'])
def create_volume_from_snapshot(self, volume, snapshot):
"""Creates a volume from a snapshot - clone a snapshot."""
LOG.debug('zfssa.create_volume_from_snapshot: volume=%s',
volume['name'])
LOG.debug('zfssa.create_volume_from_snapshot: snapshot=%s',
snapshot['name'])<|fim▁hole|> 'Snapshot: %(snapshot)s')
% {'clone': volume['name'],
'size': volume['size'],
'snapshot': snapshot['name']})
LOG.error(exception_msg)
raise exception.InvalidInput(reason=exception_msg)
lcfg = self.configuration
self.zfssa.clone_snapshot(lcfg.zfssa_pool,
lcfg.zfssa_project,
snapshot['volume_name'],
snapshot['name'],
lcfg.zfssa_project,
volume['name'])
def _update_volume_status(self):
"""Retrieve status info from volume group."""
LOG.debug("Updating volume status")
self._stats = None
data = {}
backend_name = self.configuration.safe_get('volume_backend_name')
data["volume_backend_name"] = backend_name or self.__class__.__name__
data["vendor_name"] = 'Oracle'
data["driver_version"] = self.VERSION
data["storage_protocol"] = self.protocol
lcfg = self.configuration
(avail, total) = self.zfssa.get_project_stats(lcfg.zfssa_pool,
lcfg.zfssa_project)
if avail is None or total is None:
return
host = lcfg.san_ip
pool = lcfg.zfssa_pool
project = lcfg.zfssa_project
auth_str = '%s:%s' % (lcfg.san_login, lcfg.san_password)
auth_str = base64.encode_as_text(auth_str)[:-1]
zfssa_tgt_group = lcfg.zfssa_target_group
repl_ip = lcfg.zfssa_replication_ip
data['location_info'] = "%s:%s:%s:%s:%s:%s" % (host, auth_str, pool,
project,
zfssa_tgt_group,
repl_ip)
data['total_capacity_gb'] = int(total) / units.Gi
data['free_capacity_gb'] = int(avail) / units.Gi
data['reserved_percentage'] = 0
data['QoS_support'] = False
self._stats = data
def get_volume_stats(self, refresh=False):
"""Get volume status.
If 'refresh' is True, run update the stats first.
"""
if refresh:
self._update_volume_status()
return self._stats
def create_export(self, context, volume, connector):
pass
def remove_export(self, context, volume):
pass
def ensure_export(self, context, volume):
pass
def extend_volume(self, volume, new_size):
"""Driver entry point to extent volume size."""
LOG.debug('extend_volume: volume name: %s', volume['name'])
lcfg = self.configuration
self.zfssa.set_lun_props(lcfg.zfssa_pool,
lcfg.zfssa_project,
volume['name'],
volsize=new_size * units.Gi)
def create_cloned_volume(self, volume, src_vref):
"""Create a clone of the specified volume."""
zfssa_snapshot = {'volume_name': src_vref['name'],
'name': 'tmp-snapshot-%s' % volume['id']}
self.create_snapshot(zfssa_snapshot)
try:
self.create_volume_from_snapshot(volume, zfssa_snapshot)
except exception.VolumeBackendAPIException:
LOG.error(_LE('Clone Volume:'
'%(volume)s failed from source volume:'
'%(src_vref)s'),
{'volume': volume['name'],
'src_vref': src_vref['name']})
# Cleanup snapshot
self.delete_snapshot(zfssa_snapshot)
@utils.synchronized('zfssaiscsi', external=True)
def clone_image(self, context, volume,
image_location, image_meta,
image_service):
"""Create a volume efficiently from an existing image.
Verify the image ID being used:
(1) If there is no existing cache volume, create one and transfer
image data to it. Take a snapshot.
(2) If a cache volume already exists, verify if it is either alternated
or updated. If so try to remove it, raise exception if removal fails.
Create a new cache volume as in (1).
Clone a volume from the cache volume and returns it to Cinder.
A file lock is placed on this method to prevent:
(a) a race condition when a cache volume has been verified, but then
gets deleted before it is cloned.
(b) failure of subsequent clone_image requests if the first request is
still pending.
"""
LOG.debug('Cloning image %(image)s to volume %(volume)s',
{'image': image_meta['id'], 'volume': volume['name']})
lcfg = self.configuration
cachevol_size = 0
if not lcfg.zfssa_enable_local_cache:
return None, False
with image_utils.TemporaryImages.fetch(image_service,
context,
image_meta['id']) as tmp_image:
info = image_utils.qemu_img_info(tmp_image)
cachevol_size = int(math.ceil(float(info.virtual_size) / units.Gi))
if cachevol_size > volume['size']:
exception_msg = (_LE('Image size %(img_size)dGB is larger '
'than volume size %(vol_size)dGB.'),
{'img_size': cachevol_size,
'vol_size': volume['size']})
LOG.error(exception_msg)
return None, False
specs = self._get_voltype_specs(volume)
cachevol_props = {'size': cachevol_size}
try:
cache_vol, cache_snap = self._verify_cache_volume(context,
image_meta,
image_service,
specs,
cachevol_props)
# A cache volume and a snapshot should be ready by now
# Create a clone from the cache volume
self.zfssa.clone_snapshot(lcfg.zfssa_pool,
lcfg.zfssa_cache_project,
cache_vol,
cache_snap,
lcfg.zfssa_project,
volume['name'])
if cachevol_size < volume['size']:
self.extend_volume(volume, volume['size'])
except exception.VolumeBackendAPIException as exc:
exception_msg = (_LE('Cannot clone image %(image)s to '
'volume %(volume)s. Error: %(error)s.'),
{'volume': volume['name'],
'image': image_meta['id'],
'error': exc.message})
LOG.error(exception_msg)
return None, False
return None, True
def _verify_cache_volume(self, context, img_meta,
img_service, specs, cachevol_props):
"""Verify if we have a cache volume that we want.
If we don't, create one.
If we do, check if it's been updated:
* If so, delete it and recreate a new volume
* If not, we are good.
If it's out of date, delete it and create a new one.
After the function returns, there should be a cache volume available,
ready for cloning.
"""
lcfg = self.configuration
cachevol_name = 'os-cache-vol-%s' % img_meta['id']
cachesnap_name = 'image-%s' % img_meta['id']
cachevol_meta = {
'cache_name': cachevol_name,
'snap_name': cachesnap_name,
}
cachevol_props.update(cachevol_meta)
cache_vol, cache_snap = None, None
updated_at = six.text_type(img_meta['updated_at'].isoformat())
LOG.debug('Verifying cache volume %s:', cachevol_name)
try:
cache_vol = self.zfssa.get_lun(lcfg.zfssa_pool,
lcfg.zfssa_cache_project,
cachevol_name)
if (not cache_vol.get('updated_at', None) or
not cache_vol.get('image_id', None)):
exc_msg = (_('Cache volume %s does not have required '
'properties') % cachevol_name)
LOG.error(exc_msg)
raise exception.VolumeBackendAPIException(data=exc_msg)
cache_snap = self.zfssa.get_lun_snapshot(lcfg.zfssa_pool,
lcfg.zfssa_cache_project,
cachevol_name,
cachesnap_name)
except exception.VolumeNotFound:
# There is no existing cache volume, create one:
return self._create_cache_volume(context,
img_meta,
img_service,
specs,
cachevol_props)
except exception.SnapshotNotFound:
exception_msg = (_('Cache volume %(cache_vol)s'
'does not have snapshot %(cache_snap)s.'),
{'cache_vol': cachevol_name,
'cache_snap': cachesnap_name})
LOG.error(exception_msg)
raise exception.VolumeBackendAPIException(data=exception_msg)
# A cache volume does exist, check if it's updated:
if ((cache_vol['updated_at'] != updated_at) or
(cache_vol['image_id'] != img_meta['id'])):
# The cache volume is updated, but has clones:
if cache_snap['numclones'] > 0:
exception_msg = (_('Cannot delete '
'cache volume: %(cachevol_name)s. '
'It was updated at %(updated_at)s '
'and currently has %(numclones)s '
'volume instances.'),
{'cachevol_name': cachevol_name,
'updated_at': updated_at,
'numclones': cache_snap['numclones']})
LOG.error(exception_msg)
raise exception.VolumeBackendAPIException(data=exception_msg)
# The cache volume is updated, but has no clone, so we delete it
# and re-create a new one:
self.zfssa.delete_lun(lcfg.zfssa_pool,
lcfg.zfssa_cache_project,
cachevol_name)
return self._create_cache_volume(context,
img_meta,
img_service,
specs,
cachevol_props)
return cachevol_name, cachesnap_name
def _create_cache_volume(self, context, img_meta,
img_service, specs, cachevol_props):
"""Create a cache volume from an image.
Returns names of the cache volume and its snapshot.
"""
lcfg = self.configuration
cachevol_size = int(cachevol_props['size'])
lunsize = "%sg" % six.text_type(cachevol_size)
lun_props = {
'custom:image_id': img_meta['id'],
'custom:updated_at': (
six.text_type(img_meta['updated_at'].isoformat())),
}
lun_props.update(specs)
cache_vol = {
'name': cachevol_props['cache_name'],
'id': img_meta['id'],
'size': cachevol_size,
}
LOG.debug('Creating cache volume %s.', cache_vol['name'])
try:
self.zfssa.create_lun(lcfg.zfssa_pool,
lcfg.zfssa_cache_project,
cache_vol['name'],
lunsize,
lcfg.zfssa_target_group,
lun_props)
super(ZFSSAISCSIDriver, self).copy_image_to_volume(context,
cache_vol,
img_service,
img_meta['id'])
self.zfssa.create_snapshot(lcfg.zfssa_pool,
lcfg.zfssa_cache_project,
cache_vol['name'],
cachevol_props['snap_name'])
except Exception as exc:
exc_msg = (_('Fail to create cache volume %(volume)s. '
'Error: %(err)s'),
{'volume': cache_vol['name'],
'err': six.text_type(exc)})
LOG.error(exc_msg)
self.zfssa.delete_lun(lcfg.zfssa_pool,
lcfg.zfssa_cache_project,
cache_vol['name'])
raise exception.VolumeBackendAPIException(data=exc_msg)
return cachevol_props['cache_name'], cachevol_props['snap_name']
def local_path(self, volume):
"""Not implemented."""
pass
def backup_volume(self, context, backup, backup_service):
"""Not implemented."""
pass
def restore_backup(self, context, backup, volume, backup_service):
"""Not implemented."""
pass
def _verify_clone_size(self, snapshot, size):
"""Check whether the clone size is the same as the parent volume."""
lcfg = self.configuration
lun = self.zfssa.get_lun(lcfg.zfssa_pool,
lcfg.zfssa_project,
snapshot['volume_name'])
return lun['size'] == size
def initialize_connection(self, volume, connector):
lcfg = self.configuration
init_groups = self.zfssa.get_initiator_initiatorgroup(
connector['initiator'])
if ((lcfg.zfssa_enable_local_cache is True) and
(volume['name'].startswith('os-cache-vol-'))):
project = lcfg.zfssa_cache_project
else:
project = lcfg.zfssa_project
for initiator_group in init_groups:
self.zfssa.set_lun_initiatorgroup(lcfg.zfssa_pool,
project,
volume['name'],
initiator_group)
iscsi_properties = {}
provider = self._get_provider_info(volume)
(target_portal, iqn, lun) = provider['provider_location'].split()
iscsi_properties['target_discovered'] = False
iscsi_properties['target_portal'] = target_portal
iscsi_properties['target_iqn'] = iqn
iscsi_properties['target_lun'] = lun
iscsi_properties['volume_id'] = volume['id']
if 'provider_auth' in provider:
(auth_method, auth_username, auth_password) = provider[
'provider_auth'].split()
iscsi_properties['auth_method'] = auth_method
iscsi_properties['auth_username'] = auth_username
iscsi_properties['auth_password'] = auth_password
return {
'driver_volume_type': 'iscsi',
'data': iscsi_properties
}
def terminate_connection(self, volume, connector, **kwargs):
"""Driver entry point to terminate a connection for a volume."""
LOG.debug('terminate_connection: volume name: %s.', volume['name'])
lcfg = self.configuration
project = lcfg.zfssa_project
if ((lcfg.zfssa_enable_local_cache is True) and
(volume['name'].startswith('os-cache-vol-'))):
project = lcfg.zfssa_cache_project
self.zfssa.set_lun_initiatorgroup(lcfg.zfssa_pool,
project,
volume['name'],
'')
def _get_voltype_specs(self, volume):
"""Get specs suitable for volume creation."""
vtype = volume.get('volume_type_id', None)
extra_specs = None
if vtype:
extra_specs = volume_types.get_volume_type_extra_specs(vtype)
return self._get_specs(extra_specs)
def _get_specs(self, xspecs):
"""Return a dict with extra specs and/or config values."""
result = {}
for spc in ZFSSA_LUN_SPECS:
val = None
prop = spc.split(':')[1]
cfg = 'zfssa_lun_' + prop
if xspecs:
val = xspecs.pop(spc, None)
if val is None:
val = self.configuration.safe_get(cfg)
if val is not None and val != '':
result.update({prop: val})
return result
def migrate_volume(self, ctxt, volume, host):
LOG.debug('Attempting ZFSSA enabled volume migration. volume: %(id)s, '
'host: %(host)s, status=%(status)s.',
{'id': volume['id'],
'host': host,
'status': volume['status']})
lcfg = self.configuration
default_ret = (False, None)
if volume['status'] != "available":
LOG.debug('Only available volumes can be migrated using backend '
'assisted migration. Defaulting to generic migration.')
return default_ret
if (host['capabilities']['vendor_name'] != 'Oracle' or
host['capabilities']['storage_protocol'] != self.protocol):
LOG.debug('Source and destination drivers need to be Oracle iSCSI '
'to use backend assisted migration. Defaulting to '
'generic migration.')
return default_ret
if 'location_info' not in host['capabilities']:
LOG.debug('Could not find location_info in capabilities reported '
'by the destination driver. Defaulting to generic '
'migration.')
return default_ret
loc_info = host['capabilities']['location_info']
try:
(tgt_host, auth_str, tgt_pool, tgt_project, tgt_tgtgroup,
tgt_repl_ip) = loc_info.split(':')
except ValueError:
LOG.error(_LE("Location info needed for backend enabled volume "
"migration not in correct format: %s. Continuing "
"with generic volume migration."), loc_info)
return default_ret
if tgt_repl_ip == '':
msg = _LE("zfssa_replication_ip not set in cinder.conf. "
"zfssa_replication_ip is needed for backend enabled "
"volume migration. Continuing with generic volume "
"migration.")
LOG.error(msg)
return default_ret
src_pool = lcfg.zfssa_pool
src_project = lcfg.zfssa_project
try:
LOG.info(_LI('Connecting to target host: %s for backend enabled '
'migration.'), tgt_host)
self.tgt_zfssa.set_host(tgt_host)
self.tgt_zfssa.login(auth_str)
# Verify that the replication service is online
try:
self.zfssa.verify_service('replication')
self.tgt_zfssa.verify_service('replication')
except exception.VolumeBackendAPIException:
return default_ret
# ensure that a target group by the same name exists on the target
# system also, if not, use default migration.
lun = self.zfssa.get_lun(src_pool, src_project, volume['name'])
if lun['targetgroup'] != tgt_tgtgroup:
return default_ret
tgt_asn = self.tgt_zfssa.get_asn()
src_asn = self.zfssa.get_asn()
# verify on the source system that the destination has been
# registered as a replication target
tgts = self.zfssa.get_replication_targets()
targets = []
for target in tgts['targets']:
if target['asn'] == tgt_asn:
targets.append(target)
if targets == []:
LOG.debug('Target host: %(host)s for volume migration '
'not configured as a replication target '
'for volume: %(vol)s.',
{'host': tgt_repl_ip,
'vol': volume['name']})
return default_ret
# Multiple ips from the same appliance may be configured
# as different targets
for target in targets:
if target['address'] == tgt_repl_ip + ':216':
break
if target['address'] != tgt_repl_ip + ':216':
LOG.debug('Target with replication ip: %s not configured on '
'the source appliance for backend enabled volume '
'migration. Proceeding with default migration.',
tgt_repl_ip)
return default_ret
flow = lf.Flow('zfssa_volume_migration').add(
MigrateVolumeInit(),
MigrateVolumeCreateAction(provides='action_id'),
MigrateVolumeSendReplUpdate(),
MigrateVolumeSeverRepl(),
MigrateVolumeMoveVol(),
MigrateVolumeCleanUp()
)
taskflow.engines.run(flow,
store={'driver': self,
'tgt_zfssa': self.tgt_zfssa,
'tgt_pool': tgt_pool,
'tgt_project': tgt_project,
'volume': volume, 'tgt_asn': tgt_asn,
'src_zfssa': self.zfssa,
'src_asn': src_asn,
'src_pool': src_pool,
'src_project': src_project,
'target': target})
return(True, None)
except Exception:
LOG.error(_LE("Error migrating volume: %s"), volume['name'])
raise
def update_migrated_volume(self, ctxt, volume, new_volume,
original_volume_status):
"""Return model update for migrated volume.
:param volume: The original volume that was migrated to this backend
:param new_volume: The migration volume object that was created on
this backend as part of the migration process
:param original_volume_status: The status of the original volume
:return model_update to update DB with any needed changes
"""
lcfg = self.configuration
original_name = CONF.volume_name_template % volume['id']
current_name = CONF.volume_name_template % new_volume['id']
LOG.debug('Renaming migrated volume: %(cur)s to %(org)s',
{'cur': current_name,
'org': original_name})
self.zfssa.set_lun_props(lcfg.zfssa_pool, lcfg.zfssa_project,
current_name, name=original_name)
return {'_name_id': None}
@utils.synchronized('zfssaiscsi', external=True)
def _check_origin(self, lun, volname):
"""Verify the cache volume of a bootable volume.
If the cache no longer has clone, it will be deleted.
There is a small lag between the time a clone is deleted and the number
of clones being updated accordingly. There is also a race condition
when multiple volumes (clones of a cache volume) are deleted at once,
leading to the number of clones reported incorrectly. The file lock is
here to avoid such issues.
"""
lcfg = self.configuration
cache = lun['origin']
numclones = -1
if (cache['snapshot'].startswith('image-') and
cache['share'].startswith('os-cache-vol')):
try:
numclones = self.zfssa.num_clones(lcfg.zfssa_pool,
lcfg.zfssa_cache_project,
cache['share'],
cache['snapshot'])
except Exception:
LOG.debug('Cache volume is already deleted.')
return
LOG.debug('Checking cache volume %(name)s, numclones = %(clones)d',
{'name': cache['share'], 'clones': numclones})
# Sometimes numclones still hold old values even when all clones
# have been deleted. So we handle this situation separately here:
if numclones == 1:
try:
self.zfssa.get_lun(lcfg.zfssa_pool,
lcfg.zfssa_project,
volname)
# The volume does exist, so return
return
except exception.VolumeNotFound:
# The volume is already deleted
numclones = 0
if numclones == 0:
try:
self.zfssa.delete_lun(lcfg.zfssa_pool,
lcfg.zfssa_cache_project,
cache['share'])
except exception.VolumeBackendAPIException:
LOG.warning(_LW("Volume %s exists but can't be deleted"),
cache['share'])
class MigrateVolumeInit(task.Task):
def execute(self, src_zfssa, volume, src_pool, src_project):
LOG.debug('Setting inherit flag on source backend to False.')
src_zfssa.edit_inherit_replication_flag(src_pool, src_project,
volume['name'], set=False)
def revert(self, src_zfssa, volume, src_pool, src_project, **kwargs):
LOG.debug('Rollback: Setting inherit flag on source appliance to '
'True.')
src_zfssa.edit_inherit_replication_flag(src_pool, src_project,
volume['name'], set=True)
class MigrateVolumeCreateAction(task.Task):
def execute(self, src_zfssa, volume, src_pool, src_project, target,
tgt_pool):
LOG.debug('Creating replication action on source appliance.')
action_id = src_zfssa.create_replication_action(src_pool,
src_project,
target['label'],
tgt_pool,
volume['name'])
self._action_id = action_id
return action_id
def revert(self, src_zfssa, **kwargs):
if hasattr(self, '_action_id'):
LOG.debug('Rollback: deleting replication action on source '
'appliance.')
src_zfssa.delete_replication_action(self._action_id)
class MigrateVolumeSendReplUpdate(task.Task):
def execute(self, src_zfssa, action_id):
LOG.debug('Sending replication update from source appliance.')
src_zfssa.send_repl_update(action_id)
LOG.debug('Deleting replication action on source appliance.')
src_zfssa.delete_replication_action(action_id)
self._action_deleted = True
class MigrateVolumeSeverRepl(task.Task):
def execute(self, tgt_zfssa, src_asn, action_id, driver):
source = tgt_zfssa.get_replication_source(src_asn)
if not source:
err = (_('Source with host ip/name: %s not found on the '
'target appliance for backend enabled volume '
'migration, procedding with default migration.'),
driver.configuration.san_ip)
LOG.error(err)
raise exception.VolumeBackendAPIException(data=err)
LOG.debug('Severing replication package on destination appliance.')
tgt_zfssa.sever_replication(action_id, source['name'],
project=action_id)
class MigrateVolumeMoveVol(task.Task):
def execute(self, tgt_zfssa, tgt_pool, tgt_project, action_id, volume):
LOG.debug('Moving LUN to destination project on destination '
'appliance.')
tgt_zfssa.move_volume(tgt_pool, action_id, volume['name'], tgt_project)
LOG.debug('Deleting temporary project on destination appliance.')
tgt_zfssa.delete_project(tgt_pool, action_id)
self._project_deleted = True
def revert(self, tgt_zfssa, tgt_pool, tgt_project, action_id, volume,
**kwargs):
if not hasattr(self, '_project_deleted'):
LOG.debug('Rollback: deleting temporary project on destination '
'appliance.')
tgt_zfssa.delete_project(tgt_pool, action_id)
class MigrateVolumeCleanUp(task.Task):
def execute(self, driver, volume, tgt_zfssa):
LOG.debug('Finally, delete source volume on source appliance.')
driver.delete_volume(volume)
tgt_zfssa.logout()<|fim▁end|>
|
if not self._verify_clone_size(snapshot, volume['size'] * units.Gi):
exception_msg = (_('Error verifying clone size on '
'Volume clone: %(clone)s '
'Size: %(size)d on'
|
<|file_name|>Gruntfile.js<|end_file_name|><|fim▁begin|>function initialize(grunt) {
grunt.initConfig({
pkg: grunt.file.readJSON('package.json'),
exec: {
build: {
command: 'make build'
},
// 'build-types': { command: 'make build-types' },
'build-style': { command: 'make build-style' },
'build-server': { command: 'make build-server' },
'build-client': { command: 'make build-client' },
// 'database-shell': {
// command: "echo 'mongo --username client --password testing christian.mongohq.com:10062/Beta-CitizenDish'"
// }
},
watch: {
types: {
files: [ 'types/egyptian-number-system.d.ts'],
tasks: [ 'exec:build-types'],
spawn: false
},
style: {
files: [ 'style/less/*.less', 'style/less/**/*.less','public/less/**/*.less' ],
tasks: [ 'exec:build-style'],
spawn: false
},
server: {
files: [ 'server/**/*.ts', 'server/*.ts', ],
tasks: [ 'exec:build-server' ],
spawn: false
},
client: {
files: [ 'client/**/*.ts', 'client/*.ts'],
tasks: [ 'exec:build-client' ],
spawn: false
}<|fim▁hole|> script: 'server/api.js',
options: {
ext: 'js',
watch: ['server'],
ignore: ['server/**'],
delay: 2000,
legacyWatch: false
}
},
developer: {
script: 'server/developer-api.js',
options: {
ext: 'js',
watch: ['server'],
// ignore: ['server/**'],
delay: 3000,
legacyWatch: false
}
}
} ,
concurrent: {
options: {
logConcurrentOutput: true
},
developer: {
tasks: [ 'exec:build', 'nodemon:developer', 'watch:style', 'watch:server', 'watch:client' ]
// tasks: [ 'exec:build', 'nodemon:server', 'watch:types', 'watch:style', 'watch:server', 'watch:client' ]
},
application: {
tasks: [ 'exec:build', 'nodemon:application', 'watch:style', 'watch:server', 'watch:client' ]
// tasks: [ 'exec:build', 'nodemon:server', 'watch:types', 'watch:style', 'watch:server', 'watch:client' ]
}
}
}) ;
grunt.loadNpmTasks('grunt-exec');
grunt.loadNpmTasks('grunt-nodemon');
grunt.loadNpmTasks('grunt-contrib-watch');
grunt.loadNpmTasks('grunt-concurrent');
grunt.registerTask('default', ['concurrent:application']) ;
grunt.registerTask('developer', ['concurrent:developer']) ;
grunt.option('debug', true);
// grunt.option('force', true);
}
module.exports = initialize;<|fim▁end|>
|
},
nodemon: {
application: {
|
<|file_name|>fast-forward.d.ts<|end_file_name|><|fim▁begin|>// TypeScript Version: 2.1
<|fim▁hole|><|fim▁end|>
|
import * as React from 'react';
import { IconBaseProps } from 'react-icon-base';
export default class FaFastForward extends React.Component<IconBaseProps, any> { }
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from operator import attrgetter
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType
from pyangbind.lib.yangtypes import RestrictedClassType
from pyangbind.lib.yangtypes import TypedListType
from pyangbind.lib.yangtypes import YANGBool
from pyangbind.lib.yangtypes import YANGListType
from pyangbind.lib.yangtypes import YANGDynClass
from pyangbind.lib.yangtypes import ReferenceType
from pyangbind.lib.base import PybindBase
from collections import OrderedDict
from decimal import Decimal
from bitarray import bitarray
import six
# PY3 support of some PY2 keywords (needs improved)
if six.PY3:
import builtins as __builtin__
long = int
elif six.PY2:
import __builtin__
from . import state
from . import unknown_subtlv
from . import unreserved_bandwidths
from . import administrative_groups
class sub_tlv(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance - based on the path /network-instances/network-instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa-types/lsa-type/lsas/lsa/opaque-lsa/traffic-engineering/tlvs/tlv/link/sub-tlvs/sub-tlv. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: The Sub-TLVs included within the Traffic Engineering
LSA's sub-TLV
"""
__slots__ = (
"_path_helper",
"_extmethods",
"__state",
"__unknown_subtlv",
"__unreserved_bandwidths",
"__administrative_groups",
)
_yang_name = "sub-tlv"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
self.__unknown_subtlv = YANGDynClass(
base=unknown_subtlv.unknown_subtlv,
is_container="container",
yang_name="unknown-subtlv",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
self.__unreserved_bandwidths = YANGDynClass(
base=unreserved_bandwidths.unreserved_bandwidths,
is_container="container",
yang_name="unreserved-bandwidths",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
self.__administrative_groups = YANGDynClass(
base=administrative_groups.administrative_groups,
is_container="container",
yang_name="administrative-groups",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"protocols",
"protocol",
"ospfv2",
"areas",
"area",
"lsdb",
"lsa-types",
"lsa-type",
"lsas",
"lsa",
"opaque-lsa",
"traffic-engineering",
"tlvs",
"tlv",
"link",
"sub-tlvs",
"sub-tlv",
]
def _get_state(self):
"""
Getter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/opaque_lsa/traffic_engineering/tlvs/tlv/link/sub_tlvs/sub_tlv/state (container)
YANG Description: State parameters of the Link Sub-TLV
"""
return self.__state
def _set_state(self, v, load=False):
"""
Setter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/opaque_lsa/traffic_engineering/tlvs/tlv/link/sub_tlvs/sub_tlv/state (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_state() directly.
YANG Description: State parameters of the Link Sub-TLV
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """state must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
}
)
self.__state = t
if hasattr(self, "_set"):
self._set()
def _unset_state(self):
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
def _get_unknown_subtlv(self):
"""
Getter method for unknown_subtlv, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/opaque_lsa/traffic_engineering/tlvs/tlv/link/sub_tlvs/sub_tlv/unknown_subtlv (container)
YANG Description: An unknown SubTLV within the context. Unknown Sub-TLV
are defined to be the set of SubTLVs that are not modelled
by the OpenConfig schema, or are unknown to the local system
such that it cannot decode their value.
"""
return self.__unknown_subtlv
def _set_unknown_subtlv(self, v, load=False):
"""
Setter method for unknown_subtlv, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/opaque_lsa/traffic_engineering/tlvs/tlv/link/sub_tlvs/sub_tlv/unknown_subtlv (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_unknown_subtlv is considered as a private
method. Backends looking to populate this variable should<|fim▁hole|>by the OpenConfig schema, or are unknown to the local system
such that it cannot decode their value.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=unknown_subtlv.unknown_subtlv,
is_container="container",
yang_name="unknown-subtlv",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """unknown_subtlv must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=unknown_subtlv.unknown_subtlv, is_container='container', yang_name="unknown-subtlv", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
}
)
self.__unknown_subtlv = t
if hasattr(self, "_set"):
self._set()
def _unset_unknown_subtlv(self):
self.__unknown_subtlv = YANGDynClass(
base=unknown_subtlv.unknown_subtlv,
is_container="container",
yang_name="unknown-subtlv",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
def _get_unreserved_bandwidths(self):
"""
Getter method for unreserved_bandwidths, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/opaque_lsa/traffic_engineering/tlvs/tlv/link/sub_tlvs/sub_tlv/unreserved_bandwidths (container)
YANG Description: The unreserved link bandwidths for the Traffic
Engineering LSA - utilised when the sub-TLV type
indicates that the sub-TLV describes unreserved
bandwidth
"""
return self.__unreserved_bandwidths
def _set_unreserved_bandwidths(self, v, load=False):
"""
Setter method for unreserved_bandwidths, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/opaque_lsa/traffic_engineering/tlvs/tlv/link/sub_tlvs/sub_tlv/unreserved_bandwidths (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_unreserved_bandwidths is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_unreserved_bandwidths() directly.
YANG Description: The unreserved link bandwidths for the Traffic
Engineering LSA - utilised when the sub-TLV type
indicates that the sub-TLV describes unreserved
bandwidth
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=unreserved_bandwidths.unreserved_bandwidths,
is_container="container",
yang_name="unreserved-bandwidths",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """unreserved_bandwidths must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=unreserved_bandwidths.unreserved_bandwidths, is_container='container', yang_name="unreserved-bandwidths", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
}
)
self.__unreserved_bandwidths = t
if hasattr(self, "_set"):
self._set()
def _unset_unreserved_bandwidths(self):
self.__unreserved_bandwidths = YANGDynClass(
base=unreserved_bandwidths.unreserved_bandwidths,
is_container="container",
yang_name="unreserved-bandwidths",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
def _get_administrative_groups(self):
"""
Getter method for administrative_groups, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/opaque_lsa/traffic_engineering/tlvs/tlv/link/sub_tlvs/sub_tlv/administrative_groups (container)
YANG Description: The administrative groups that are set for the
Traffic Engineering LSA - utilised when the sub-TLV type
indicates that the sub-TLV describes administrative
groups
"""
return self.__administrative_groups
def _set_administrative_groups(self, v, load=False):
"""
Setter method for administrative_groups, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/opaque_lsa/traffic_engineering/tlvs/tlv/link/sub_tlvs/sub_tlv/administrative_groups (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_administrative_groups is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_administrative_groups() directly.
YANG Description: The administrative groups that are set for the
Traffic Engineering LSA - utilised when the sub-TLV type
indicates that the sub-TLV describes administrative
groups
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=administrative_groups.administrative_groups,
is_container="container",
yang_name="administrative-groups",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """administrative_groups must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=administrative_groups.administrative_groups, is_container='container', yang_name="administrative-groups", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
}
)
self.__administrative_groups = t
if hasattr(self, "_set"):
self._set()
def _unset_administrative_groups(self):
self.__administrative_groups = YANGDynClass(
base=administrative_groups.administrative_groups,
is_container="container",
yang_name="administrative-groups",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
state = __builtin__.property(_get_state)
unknown_subtlv = __builtin__.property(_get_unknown_subtlv)
unreserved_bandwidths = __builtin__.property(_get_unreserved_bandwidths)
administrative_groups = __builtin__.property(_get_administrative_groups)
_pyangbind_elements = OrderedDict(
[
("state", state),
("unknown_subtlv", unknown_subtlv),
("unreserved_bandwidths", unreserved_bandwidths),
("administrative_groups", administrative_groups),
]
)
from . import state
from . import unknown_subtlv
from . import unreserved_bandwidths
from . import administrative_groups
class sub_tlv(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance-l2 - based on the path /network-instances/network-instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa-types/lsa-type/lsas/lsa/opaque-lsa/traffic-engineering/tlvs/tlv/link/sub-tlvs/sub-tlv. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: The Sub-TLVs included within the Traffic Engineering
LSA's sub-TLV
"""
__slots__ = (
"_path_helper",
"_extmethods",
"__state",
"__unknown_subtlv",
"__unreserved_bandwidths",
"__administrative_groups",
)
_yang_name = "sub-tlv"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
self.__unknown_subtlv = YANGDynClass(
base=unknown_subtlv.unknown_subtlv,
is_container="container",
yang_name="unknown-subtlv",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
self.__unreserved_bandwidths = YANGDynClass(
base=unreserved_bandwidths.unreserved_bandwidths,
is_container="container",
yang_name="unreserved-bandwidths",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
self.__administrative_groups = YANGDynClass(
base=administrative_groups.administrative_groups,
is_container="container",
yang_name="administrative-groups",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"protocols",
"protocol",
"ospfv2",
"areas",
"area",
"lsdb",
"lsa-types",
"lsa-type",
"lsas",
"lsa",
"opaque-lsa",
"traffic-engineering",
"tlvs",
"tlv",
"link",
"sub-tlvs",
"sub-tlv",
]
def _get_state(self):
"""
Getter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/opaque_lsa/traffic_engineering/tlvs/tlv/link/sub_tlvs/sub_tlv/state (container)
YANG Description: State parameters of the Link Sub-TLV
"""
return self.__state
def _set_state(self, v, load=False):
"""
Setter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/opaque_lsa/traffic_engineering/tlvs/tlv/link/sub_tlvs/sub_tlv/state (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_state() directly.
YANG Description: State parameters of the Link Sub-TLV
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """state must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
}
)
self.__state = t
if hasattr(self, "_set"):
self._set()
def _unset_state(self):
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
def _get_unknown_subtlv(self):
"""
Getter method for unknown_subtlv, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/opaque_lsa/traffic_engineering/tlvs/tlv/link/sub_tlvs/sub_tlv/unknown_subtlv (container)
YANG Description: An unknown SubTLV within the context. Unknown Sub-TLV
are defined to be the set of SubTLVs that are not modelled
by the OpenConfig schema, or are unknown to the local system
such that it cannot decode their value.
"""
return self.__unknown_subtlv
def _set_unknown_subtlv(self, v, load=False):
"""
Setter method for unknown_subtlv, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/opaque_lsa/traffic_engineering/tlvs/tlv/link/sub_tlvs/sub_tlv/unknown_subtlv (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_unknown_subtlv is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_unknown_subtlv() directly.
YANG Description: An unknown SubTLV within the context. Unknown Sub-TLV
are defined to be the set of SubTLVs that are not modelled
by the OpenConfig schema, or are unknown to the local system
such that it cannot decode their value.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=unknown_subtlv.unknown_subtlv,
is_container="container",
yang_name="unknown-subtlv",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """unknown_subtlv must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=unknown_subtlv.unknown_subtlv, is_container='container', yang_name="unknown-subtlv", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
}
)
self.__unknown_subtlv = t
if hasattr(self, "_set"):
self._set()
def _unset_unknown_subtlv(self):
self.__unknown_subtlv = YANGDynClass(
base=unknown_subtlv.unknown_subtlv,
is_container="container",
yang_name="unknown-subtlv",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
def _get_unreserved_bandwidths(self):
"""
Getter method for unreserved_bandwidths, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/opaque_lsa/traffic_engineering/tlvs/tlv/link/sub_tlvs/sub_tlv/unreserved_bandwidths (container)
YANG Description: The unreserved link bandwidths for the Traffic
Engineering LSA - utilised when the sub-TLV type
indicates that the sub-TLV describes unreserved
bandwidth
"""
return self.__unreserved_bandwidths
def _set_unreserved_bandwidths(self, v, load=False):
"""
Setter method for unreserved_bandwidths, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/opaque_lsa/traffic_engineering/tlvs/tlv/link/sub_tlvs/sub_tlv/unreserved_bandwidths (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_unreserved_bandwidths is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_unreserved_bandwidths() directly.
YANG Description: The unreserved link bandwidths for the Traffic
Engineering LSA - utilised when the sub-TLV type
indicates that the sub-TLV describes unreserved
bandwidth
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=unreserved_bandwidths.unreserved_bandwidths,
is_container="container",
yang_name="unreserved-bandwidths",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """unreserved_bandwidths must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=unreserved_bandwidths.unreserved_bandwidths, is_container='container', yang_name="unreserved-bandwidths", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
}
)
self.__unreserved_bandwidths = t
if hasattr(self, "_set"):
self._set()
def _unset_unreserved_bandwidths(self):
self.__unreserved_bandwidths = YANGDynClass(
base=unreserved_bandwidths.unreserved_bandwidths,
is_container="container",
yang_name="unreserved-bandwidths",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
def _get_administrative_groups(self):
"""
Getter method for administrative_groups, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/opaque_lsa/traffic_engineering/tlvs/tlv/link/sub_tlvs/sub_tlv/administrative_groups (container)
YANG Description: The administrative groups that are set for the
Traffic Engineering LSA - utilised when the sub-TLV type
indicates that the sub-TLV describes administrative
groups
"""
return self.__administrative_groups
def _set_administrative_groups(self, v, load=False):
"""
Setter method for administrative_groups, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/opaque_lsa/traffic_engineering/tlvs/tlv/link/sub_tlvs/sub_tlv/administrative_groups (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_administrative_groups is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_administrative_groups() directly.
YANG Description: The administrative groups that are set for the
Traffic Engineering LSA - utilised when the sub-TLV type
indicates that the sub-TLV describes administrative
groups
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=administrative_groups.administrative_groups,
is_container="container",
yang_name="administrative-groups",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """administrative_groups must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=administrative_groups.administrative_groups, is_container='container', yang_name="administrative-groups", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
}
)
self.__administrative_groups = t
if hasattr(self, "_set"):
self._set()
def _unset_administrative_groups(self):
self.__administrative_groups = YANGDynClass(
base=administrative_groups.administrative_groups,
is_container="container",
yang_name="administrative-groups",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
state = __builtin__.property(_get_state)
unknown_subtlv = __builtin__.property(_get_unknown_subtlv)
unreserved_bandwidths = __builtin__.property(_get_unreserved_bandwidths)
administrative_groups = __builtin__.property(_get_administrative_groups)
_pyangbind_elements = OrderedDict(
[
("state", state),
("unknown_subtlv", unknown_subtlv),
("unreserved_bandwidths", unreserved_bandwidths),
("administrative_groups", administrative_groups),
]
)<|fim▁end|>
|
do so via calling thisObj._set_unknown_subtlv() directly.
YANG Description: An unknown SubTLV within the context. Unknown Sub-TLV
are defined to be the set of SubTLVs that are not modelled
|
<|file_name|>structured.go<|end_file_name|><|fim▁begin|>// Copyright 2015 The Cockroach Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
// implied. See the License for the specific language governing
// permissions and limitations under the License. See the AUTHORS file
// for names of contributors.
//
// Author: Spencer Kimball ([email protected])
package log
import (
"encoding/json"
"fmt"
"os"
"reflect"
"unicode/utf8"
"golang.org/x/net/context"
"github.com/cockroachdb/cockroach/roachpb"
"github.com/cockroachdb/cockroach/util/caller"
)
// AddStructured creates a structured log entry to be written to the
// specified facility of the logger.
func AddStructured(ctx context.Context, s Severity, depth int, format string, args []interface{}) {
file, line, _ := caller.Lookup(depth + 1)
entry := LogEntry{}
entry.set(ctx, format, args)
logging.outputLogEntry(s, file, line, false, &entry)
}
// getJSON returns a JSON representation of the specified argument.
// Returns nil if the type is simple and does not require a separate
// JSON representation.
func getJSON(arg interface{}) []byte {
// Not much point in storying strings and byte slices twice, as
// they're nearly always exactly specified in the format string.
switch arg.(type) {
case string, []byte, roachpb.Key, roachpb.EncodedKey:
return nil
}
jsonBytes, err := json.Marshal(arg)
if err != nil {
return []byte(fmt.Sprintf("{\"error\": %q}", err.Error()))
}
return jsonBytes
}
func (entry *LogEntry) set(ctx context.Context, format string, args []interface{}) {
entry.Format, entry.Args = parseFormatWithArgs(format, args)
if ctx != nil {
for i := Field(0); i < maxField; i++ {
if v := ctx.Value(i); v != nil {
switch vTyp := v.(type) {
case roachpb.NodeID:
entry.NodeID = &vTyp
case roachpb.StoreID:
entry.StoreID = &vTyp
case roachpb.RangeID:
entry.RangeID = &vTyp
case roachpb.Method:
entry.Method = &vTyp
case roachpb.Key:
entry.Key = vTyp
}
}
}
}
}
// parseFormatWithArgs parses the format string, matching each
// format specifier with an argument from the args array.
func parseFormatWithArgs(format string, args []interface{}) (string, []LogEntry_Arg) {
// Process format string.
var logArgs []LogEntry_Arg
var buf []byte
var idx int
end := len(format)
for i := 0; i < end; {
lasti := i
for i < end && format[i] != '%' {
i++
}
if i > lasti {
buf = append(buf, format[lasti:i]...)
}
if i >= end {
break
}
<|fim▁hole|>
// Process one verb.
i++
F:
for ; i < end; i++ {
switch format[i] {
case '#', '0', '+', '-', ' ':
default:
break F
}
}
// TODO(spencer): should arg numbers dynamic precision be
// supported? They're so rare, better to just panic here for now.
if i < end && (format[i] == '[' || format[i] == '*') {
panic(fmt.Sprintf("arg numbers in format not supported by logger: %s", format))
}
// Read optional width.
for ; i < end && format[i] >= '0' && format[i] <= '9'; i++ {
}
// Read optional precision.
if i < end && format[i] == '.' {
for i = i + 1; i < end && format[i] >= '0' && format[i] <= '9'; i++ {
}
}
if i >= end {
break
}
c, w := utf8.DecodeRuneInString(format[i:])
i += w
// Escape and add percent directly to format buf.
if c == '%' {
buf = append(buf, '%', '%')
continue
}
buf = append(buf, "%s"...)
// New format string always gets %s, though we use the actual
// format to generate the string here for the log argument.
if idx >= len(args) {
fmt.Fprintf(os.Stderr, "ERROR: insufficient parameters specified for format string %s", format)
return string(append(buf, format[i:]...)), logArgs
}
logArgs = append(logArgs, makeLogArg(format[start:i], args[idx]))
idx++ // advance to next arg index
}
// Add arguments which were not processed via format specifiers.
for ; idx < len(args); idx++ {
logArgs = append(logArgs, makeLogArg("%v", args[idx]))
}
return string(buf), logArgs
}
func makeLogArg(format string, arg interface{}) LogEntry_Arg {
var tstr string
if t := reflect.TypeOf(arg); t != nil {
tstr = t.String()
}
return LogEntry_Arg{
Type: tstr,
Str: fmt.Sprintf(format, arg),
Json: getJSON(arg),
}
}<|fim▁end|>
|
start := i
|
<|file_name|>LeaderBoard.java<|end_file_name|><|fim▁begin|>package com.comp.ninti.sportsmanager;
import android.content.Intent;
import android.os.Bundle;
import android.os.Handler;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.view.MenuItem;
import android.widget.ListView;
import com.comp.ninti.adapter.LeaderBoardAdapter;
import com.comp.ninti.database.DbHandler;
import com.comp.ninti.general.core.Event;
public class LeaderBoard extends AppCompatActivity {
private Event event;
private DbHandler dbHandler;
private LeaderBoardAdapter leaderBoardAdapter;
private ListView listView;
@Override
public boolean onOptionsItemSelected(MenuItem item) {
if (item.getItemId() == android.R.id.home) {
super.onBackPressed();
return true;
}
return super.onOptionsItemSelected(item);
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
event = getIntent().getExtras().getParcelable("com.comp.ninti.general.core.Event");
Intent intent = new Intent();
intent.putExtra("com.comp.ninti.general.core.Event", event);
setResult(RESULT_CANCELED, intent);
setContentView(R.layout.activity_leader_board);
Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar);
setSupportActionBar(toolbar);
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
listView = (ListView) findViewById(R.id.lvLeaderBoard);
listView.setTextFilterEnabled(true);
displayItems();
}<|fim▁hole|>
private void displayItems() {
dbHandler = new DbHandler(LeaderBoard.this, "", null, 1);
new Handler().post(new Runnable() {
@Override
public void run() {
leaderBoardAdapter = new LeaderBoardAdapter(
LeaderBoard.this,
dbHandler.getLeaderBoard(event.getId()),
0);
listView.setAdapter(leaderBoardAdapter);
}
});
dbHandler.close();
}
@Override
protected void onResume() {
super.onResume();
displayItems();
}
}<|fim▁end|>
| |
<|file_name|>vec.rs<|end_file_name|><|fim▁begin|>use std::vec::IntoIter;
use std::marker::PhantomData;
use super::{Set, SetManager};
use super::sort::SortManager;
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
pub enum Error {
IndexOutOfRange { index: usize, total: usize },
}
pub struct VecSetIter<T, E> {
iter: Option<IntoIter<T>>,
_marker: PhantomData<E>,
}
impl<T, E> Iterator for VecSetIter<T, E> {
type Item = Result<T, E>;
fn next(&mut self) -> Option<Self::Item> {
if let Some(mut iter) = self.iter.take() {
match iter.next() {
None => None,
Some(value) => {
self.iter = Some(iter);
Some(Ok(value))
}
}
} else {
None
}
}
}
impl<T> Set for Vec<T> where T: Sized {
type T = T;
type E = Error;
type I = VecSetIter<Self::T, Self::E>;
fn size(&self) -> usize {
self.len()
}
fn get(&self, index: usize) -> Result<&Self::T, Self::E> {
let slice: &[T] = self;
slice.get(index).ok_or(Error::IndexOutOfRange { index: index, total: self.len(), })
}
fn add(&mut self, item: Self::T) -> Result<(), Self::E> {
self.push(item);
Ok(())
}
fn into_iter(self) -> Self::I {
VecSetIter {
iter: Some(IntoIterator::into_iter(self)),
_marker: PhantomData,
}
}
}
pub struct Manager<T> {
_marker: PhantomData<T>,
}
impl<T> Manager<T> {
pub fn new() -> Manager<T> {
Manager {
_marker: PhantomData,
}
}
}
impl<T> SetManager for Manager<T> {
type S = Vec<T>;
type E = ();
<|fim▁hole|> Some(hint) => Vec::with_capacity(hint),
None => Vec::new(),
})
}
fn reserve(&mut self, set: &mut Self::S, additional: usize) -> Result<(), Self::E> {
Ok(set.reserve(additional))
}
}
impl SortManager for Manager<usize> {
type S = Vec<usize>;
type E = ();
fn sort<SF>(&mut self, set: &mut Self::S, pred: SF) -> Result<(), Self::E> where SF: Fn(usize, usize) -> bool {
use std::cmp::Ordering;
set.sort_by(|&a, &b| if pred(a, b) {
Ordering::Less
} else {
Ordering::Greater
});
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::{Error, Manager};
use super::super::{Set, SetManager};
fn run_basic<S>(mut set: S) where S: Set<T = u8, E = Error> {
assert_eq!(set.size(), 0);
assert_eq!(set.add(0), Ok(()));
assert_eq!(set.size(), 1);
assert_eq!(set.add(1), Ok(()));
assert_eq!(set.size(), 2);
assert_eq!(set.get(0), Ok(&0));
assert_eq!(set.get(1), Ok(&1));
assert_eq!(set.get(2), Err(Error::IndexOutOfRange { index: 2, total: 2, }));
assert_eq!(set.into_iter().map(|r| r.unwrap()).collect::<Vec<_>>(), vec![0, 1]);
}
#[test]
fn basic() {
run_basic(Manager::new().make_set(None).unwrap());
}
}<|fim▁end|>
|
fn make_set(&mut self, size_hint: Option<usize>) -> Result<Self::S, Self::E> {
Ok(match size_hint {
|
<|file_name|>filereader.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::dom::bindings::cell::DomRefCell;
use crate::dom::bindings::codegen::Bindings::BlobBinding::BlobMethods;
use crate::dom::bindings::codegen::Bindings::FileReaderBinding::{
self, FileReaderConstants, FileReaderMethods,
};
use crate::dom::bindings::codegen::UnionTypes::StringOrObject;
use crate::dom::bindings::error::{Error, ErrorResult, Fallible};
use crate::dom::bindings::inheritance::Castable;
use crate::dom::bindings::refcounted::Trusted;
use crate::dom::bindings::reflector::{reflect_dom_object, DomObject};
use crate::dom::bindings::root::{DomRoot, MutNullableDom};
use crate::dom::bindings::str::DOMString;
use crate::dom::bindings::trace::RootedTraceableBox;
use crate::dom::blob::Blob;
use crate::dom::domexception::{DOMErrorName, DOMException};
use crate::dom::event::{Event, EventBubbles, EventCancelable};
use crate::dom::eventtarget::EventTarget;
use crate::dom::globalscope::GlobalScope;
use crate::dom::progressevent::ProgressEvent;
use crate::task::TaskCanceller;
use crate::task_source::file_reading::{FileReadingTask, FileReadingTaskSource};
use crate::task_source::{TaskSource, TaskSourceName};
use base64;
use dom_struct::dom_struct;
use encoding_rs::{Encoding, UTF_8};
use js::jsapi::Heap;
use js::jsapi::JSAutoRealm;
use js::jsapi::JSContext;
use js::jsapi::JSObject;
use js::jsval::{self, JSVal};
use js::typedarray::{ArrayBuffer, CreateWith};
use mime::{self, Mime};
use servo_atoms::Atom;
use std::cell::Cell;
use std::ptr;
use std::sync::Arc;
use std::thread;
#[derive(Clone, Copy, JSTraceable, MallocSizeOf, PartialEq)]
pub enum FileReaderFunction {
ReadAsText,
ReadAsDataUrl,
ReadAsArrayBuffer,
}
pub type TrustedFileReader = Trusted<FileReader>;
#[derive(Clone, MallocSizeOf)]
pub struct ReadMetaData {
pub blobtype: String,
pub label: Option<String>,
pub function: FileReaderFunction,
}
impl ReadMetaData {
pub fn new(
blobtype: String,<|fim▁hole|> function: FileReaderFunction,
) -> ReadMetaData {
ReadMetaData {
blobtype: blobtype,
label: label,
function: function,
}
}
}
#[derive(Clone, Copy, JSTraceable, MallocSizeOf, PartialEq)]
pub struct GenerationId(u32);
#[repr(u16)]
#[derive(Clone, Copy, Debug, JSTraceable, MallocSizeOf, PartialEq)]
pub enum FileReaderReadyState {
Empty = FileReaderConstants::EMPTY,
Loading = FileReaderConstants::LOADING,
Done = FileReaderConstants::DONE,
}
#[derive(JSTraceable, MallocSizeOf)]
pub enum FileReaderResult {
ArrayBuffer(#[ignore_malloc_size_of = "mozjs"] Heap<JSVal>),
String(DOMString),
}
pub struct FileReaderSharedFunctionality;
impl FileReaderSharedFunctionality {
pub fn dataurl_format(blob_contents: &[u8], blob_type: String) -> DOMString {
let base64 = base64::encode(&blob_contents);
let dataurl = if blob_type.is_empty() {
format!("data:base64,{}", base64)
} else {
format!("data:{};base64,{}", blob_type, base64)
};
DOMString::from(dataurl)
}
pub fn text_decode(
blob_contents: &[u8],
blob_type: &str,
blob_label: &Option<String>,
) -> DOMString {
//https://w3c.github.io/FileAPI/#encoding-determination
// Steps 1 & 2 & 3
let mut encoding = blob_label
.as_ref()
.map(|string| string.as_bytes())
.and_then(Encoding::for_label);
// Step 4 & 5
encoding = encoding.or_else(|| {
let resultmime = blob_type.parse::<Mime>().ok();
resultmime.and_then(|mime| {
mime.params()
.find(|(ref k, _)| &mime::CHARSET == k)
.and_then(|(_, ref v)| Encoding::for_label(v.as_ref().as_bytes()))
})
});
// Step 6
let enc = encoding.unwrap_or(UTF_8);
let convert = blob_contents;
// Step 7
let (output, _, _) = enc.decode(convert);
DOMString::from(output)
}
}
#[dom_struct]
pub struct FileReader {
eventtarget: EventTarget,
ready_state: Cell<FileReaderReadyState>,
error: MutNullableDom<DOMException>,
result: DomRefCell<Option<FileReaderResult>>,
generation_id: Cell<GenerationId>,
}
impl FileReader {
pub fn new_inherited() -> FileReader {
FileReader {
eventtarget: EventTarget::new_inherited(),
ready_state: Cell::new(FileReaderReadyState::Empty),
error: MutNullableDom::new(None),
result: DomRefCell::new(None),
generation_id: Cell::new(GenerationId(0)),
}
}
pub fn new(global: &GlobalScope) -> DomRoot<FileReader> {
reflect_dom_object(
Box::new(FileReader::new_inherited()),
global,
FileReaderBinding::Wrap,
)
}
pub fn Constructor(global: &GlobalScope) -> Fallible<DomRoot<FileReader>> {
Ok(FileReader::new(global))
}
//https://w3c.github.io/FileAPI/#dfn-error-steps
pub fn process_read_error(
filereader: TrustedFileReader,
gen_id: GenerationId,
error: DOMErrorName,
) {
let fr = filereader.root();
macro_rules! return_on_abort(
() => (
if gen_id != fr.generation_id.get() {
return
}
);
);
return_on_abort!();
// Step 1
fr.change_ready_state(FileReaderReadyState::Done);
*fr.result.borrow_mut() = None;
let exception = DOMException::new(&fr.global(), error);
fr.error.set(Some(&exception));
fr.dispatch_progress_event(atom!("error"), 0, None);
return_on_abort!();
// Step 3
fr.dispatch_progress_event(atom!("loadend"), 0, None);
return_on_abort!();
// Step 4
fr.terminate_ongoing_reading();
}
// https://w3c.github.io/FileAPI/#dfn-readAsText
pub fn process_read_data(filereader: TrustedFileReader, gen_id: GenerationId) {
let fr = filereader.root();
macro_rules! return_on_abort(
() => (
if gen_id != fr.generation_id.get() {
return
}
);
);
return_on_abort!();
//FIXME Step 7 send current progress
fr.dispatch_progress_event(atom!("progress"), 0, None);
}
// https://w3c.github.io/FileAPI/#dfn-readAsText
pub fn process_read(filereader: TrustedFileReader, gen_id: GenerationId) {
let fr = filereader.root();
macro_rules! return_on_abort(
() => (
if gen_id != fr.generation_id.get() {
return
}
);
);
return_on_abort!();
// Step 6
fr.dispatch_progress_event(atom!("loadstart"), 0, None);
}
// https://w3c.github.io/FileAPI/#dfn-readAsText
#[allow(unsafe_code)]
pub fn process_read_eof(
filereader: TrustedFileReader,
gen_id: GenerationId,
data: ReadMetaData,
blob_contents: Arc<Vec<u8>>,
) {
let fr = filereader.root();
macro_rules! return_on_abort(
() => (
if gen_id != fr.generation_id.get() {
return
}
);
);
return_on_abort!();
// Step 8.1
fr.change_ready_state(FileReaderReadyState::Done);
// Step 8.2
match data.function {
FileReaderFunction::ReadAsDataUrl => {
FileReader::perform_readasdataurl(&fr.result, data, &blob_contents)
},
FileReaderFunction::ReadAsText => {
FileReader::perform_readastext(&fr.result, data, &blob_contents)
},
FileReaderFunction::ReadAsArrayBuffer => {
let _ac = JSAutoRealm::new(fr.global().get_cx(), *fr.reflector().get_jsobject());
FileReader::perform_readasarraybuffer(
&fr.result,
fr.global().get_cx(),
data,
&blob_contents,
)
},
};
// Step 8.3
fr.dispatch_progress_event(atom!("load"), 0, None);
return_on_abort!();
// Step 8.4
if fr.ready_state.get() != FileReaderReadyState::Loading {
fr.dispatch_progress_event(atom!("loadend"), 0, None);
}
return_on_abort!();
// Step 9
fr.terminate_ongoing_reading();
}
// https://w3c.github.io/FileAPI/#dfn-readAsText
fn perform_readastext(
result: &DomRefCell<Option<FileReaderResult>>,
data: ReadMetaData,
blob_bytes: &[u8],
) {
let blob_label = &data.label;
let blob_type = &data.blobtype;
let output = FileReaderSharedFunctionality::text_decode(blob_bytes, blob_type, blob_label);
*result.borrow_mut() = Some(FileReaderResult::String(output));
}
//https://w3c.github.io/FileAPI/#dfn-readAsDataURL
fn perform_readasdataurl(
result: &DomRefCell<Option<FileReaderResult>>,
data: ReadMetaData,
bytes: &[u8],
) {
let output = FileReaderSharedFunctionality::dataurl_format(bytes, data.blobtype);
*result.borrow_mut() = Some(FileReaderResult::String(output));
}
// https://w3c.github.io/FileAPI/#dfn-readAsArrayBuffer
#[allow(unsafe_code)]
fn perform_readasarraybuffer(
result: &DomRefCell<Option<FileReaderResult>>,
cx: *mut JSContext,
_: ReadMetaData,
bytes: &[u8],
) {
unsafe {
rooted!(in(cx) let mut array_buffer = ptr::null_mut::<JSObject>());
assert!(
ArrayBuffer::create(cx, CreateWith::Slice(bytes), array_buffer.handle_mut())
.is_ok()
);
*result.borrow_mut() = Some(FileReaderResult::ArrayBuffer(Heap::default()));
if let Some(FileReaderResult::ArrayBuffer(ref mut heap)) = *result.borrow_mut() {
heap.set(jsval::ObjectValue(array_buffer.get()));
};
}
}
}
impl FileReaderMethods for FileReader {
// https://w3c.github.io/FileAPI/#dfn-onloadstart
event_handler!(loadstart, GetOnloadstart, SetOnloadstart);
// https://w3c.github.io/FileAPI/#dfn-onprogress
event_handler!(progress, GetOnprogress, SetOnprogress);
// https://w3c.github.io/FileAPI/#dfn-onload
event_handler!(load, GetOnload, SetOnload);
// https://w3c.github.io/FileAPI/#dfn-onabort
event_handler!(abort, GetOnabort, SetOnabort);
// https://w3c.github.io/FileAPI/#dfn-onerror
event_handler!(error, GetOnerror, SetOnerror);
// https://w3c.github.io/FileAPI/#dfn-onloadend
event_handler!(loadend, GetOnloadend, SetOnloadend);
// https://w3c.github.io/FileAPI/#dfn-readAsArrayBuffer
fn ReadAsArrayBuffer(&self, blob: &Blob) -> ErrorResult {
self.read(FileReaderFunction::ReadAsArrayBuffer, blob, None)
}
// https://w3c.github.io/FileAPI/#dfn-readAsDataURL
fn ReadAsDataURL(&self, blob: &Blob) -> ErrorResult {
self.read(FileReaderFunction::ReadAsDataUrl, blob, None)
}
// https://w3c.github.io/FileAPI/#dfn-readAsText
fn ReadAsText(&self, blob: &Blob, label: Option<DOMString>) -> ErrorResult {
self.read(FileReaderFunction::ReadAsText, blob, label)
}
// https://w3c.github.io/FileAPI/#dfn-abort
fn Abort(&self) {
// Step 2
if self.ready_state.get() == FileReaderReadyState::Loading {
self.change_ready_state(FileReaderReadyState::Done);
}
// Steps 1 & 3
*self.result.borrow_mut() = None;
let exception = DOMException::new(&self.global(), DOMErrorName::AbortError);
self.error.set(Some(&exception));
self.terminate_ongoing_reading();
// Steps 5 & 6
self.dispatch_progress_event(atom!("abort"), 0, None);
self.dispatch_progress_event(atom!("loadend"), 0, None);
}
// https://w3c.github.io/FileAPI/#dfn-error
fn GetError(&self) -> Option<DomRoot<DOMException>> {
self.error.get()
}
#[allow(unsafe_code)]
// https://w3c.github.io/FileAPI/#dfn-result
unsafe fn GetResult(&self, _: *mut JSContext) -> Option<StringOrObject> {
self.result.borrow().as_ref().map(|r| match *r {
FileReaderResult::String(ref string) => StringOrObject::String(string.clone()),
FileReaderResult::ArrayBuffer(ref arr_buffer) => {
let result = RootedTraceableBox::new(Heap::default());
result.set((*arr_buffer.ptr.get()).to_object());
StringOrObject::Object(result)
},
})
}
// https://w3c.github.io/FileAPI/#dfn-readyState
fn ReadyState(&self) -> u16 {
self.ready_state.get() as u16
}
}
impl FileReader {
fn dispatch_progress_event(&self, type_: Atom, loaded: u64, total: Option<u64>) {
let progressevent = ProgressEvent::new(
&self.global(),
type_,
EventBubbles::DoesNotBubble,
EventCancelable::NotCancelable,
total.is_some(),
loaded,
total.unwrap_or(0),
);
progressevent.upcast::<Event>().fire(self.upcast());
}
fn terminate_ongoing_reading(&self) {
let GenerationId(prev_id) = self.generation_id.get();
self.generation_id.set(GenerationId(prev_id + 1));
}
fn read(
&self,
function: FileReaderFunction,
blob: &Blob,
label: Option<DOMString>,
) -> ErrorResult {
// Step 1
if self.ready_state.get() == FileReaderReadyState::Loading {
return Err(Error::InvalidState);
}
// Step 2
self.change_ready_state(FileReaderReadyState::Loading);
// Step 3
let blob_contents = Arc::new(blob.get_bytes().unwrap_or(vec![]));
let type_ = blob.Type();
let load_data = ReadMetaData::new(String::from(type_), label.map(String::from), function);
let fr = Trusted::new(self);
let gen_id = self.generation_id.get();
let global = self.global();
let canceller = global.task_canceller(TaskSourceName::FileReading);
let task_source = global.file_reading_task_source();
thread::Builder::new()
.name("file reader async operation".to_owned())
.spawn(move || {
perform_annotated_read_operation(
gen_id,
load_data,
blob_contents,
fr,
task_source,
canceller,
)
})
.expect("Thread spawning failed");
Ok(())
}
fn change_ready_state(&self, state: FileReaderReadyState) {
self.ready_state.set(state);
}
}
// https://w3c.github.io/FileAPI/#thread-read-operation
fn perform_annotated_read_operation(
gen_id: GenerationId,
data: ReadMetaData,
blob_contents: Arc<Vec<u8>>,
filereader: TrustedFileReader,
task_source: FileReadingTaskSource,
canceller: TaskCanceller,
) {
// Step 4
let task = FileReadingTask::ProcessRead(filereader.clone(), gen_id);
task_source.queue_with_canceller(task, &canceller).unwrap();
let task = FileReadingTask::ProcessReadData(filereader.clone(), gen_id);
task_source.queue_with_canceller(task, &canceller).unwrap();
let task = FileReadingTask::ProcessReadEOF(filereader, gen_id, data, blob_contents);
task_source.queue_with_canceller(task, &canceller).unwrap();
}<|fim▁end|>
|
label: Option<String>,
|
<|file_name|>elastic.py<|end_file_name|><|fim▁begin|>from elasticsearch import Elasticsearch
from django.conf import settings
def get_es_client(silent=False):
"""
Returns the elasticsearch client which uses the configuration file
"""
es_client = Elasticsearch([settings.ELASTIC_SEARCH_HOST],
scheme='http',
port=9200,
http_compress=True)
# test if it works
if not silent and not es_client.cat.health(request_timeout=30):
raise ValueError('Credentials do not work for Elastic search')
return es_client
def get_index_config(lang):
"""
Returns the elasticsearch index configuration.
Configures the analysers based on the language passed in.
"""
return {
"settings": {<|fim▁hole|> "index": {
"number_of_shards": 1,
"number_of_replicas": 0
}
},
'mappings': {
'_doc': {
'properties': {
'title': {
'type': 'text',
'analyzer': settings.ELASTIC_SEARCH_ANALYSERS[lang]
},
'content': {
'type': 'text',
'analyzer': settings.ELASTIC_SEARCH_ANALYSERS[lang]
},
'url': {'type': 'text'},
'title_plain': {'type': 'text'},
'content_plain': {'type': 'text'},
'author': {
'type': 'keyword'
},
'source': {
'type': 'keyword'
},
'argument_score': {
'type': 'float'
}
}
}
}
}<|fim▁end|>
| |
<|file_name|>amqp9_0.rs<|end_file_name|><|fim▁begin|>// Generated by build.rs script in amqp0-parser-nom
// Pre-generated files are used by default. Generation is done with the amqp0-codegen crate
//
// To regenerate, and not use pre-generated files, use: cargo --features="amqp0-build-parser"
// To format and replace the pre-generated files, use: cargo --features="amqp0-pregen-parser"
//
// EDITORS BEWARE: Your modifications may be overridden
#![allow(unused_variables)]
use nom::{IResult, be_u8, be_u16, be_u32, be_u64};
// Class access
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::access::Request<'a> {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,realm: call!(::common::shortstr) >>
flag1: bits!(tuple!(
call!(::common::bool_bit),
call!(::common::bool_bit),
call!(::common::bool_bit),
call!(::common::bool_bit),
call!(::common::bool_bit)
)) >>
(::primitives::amqp9_0::access::Request::new(realm, flag1.0, flag1.1, flag1.2, flag1.3, flag1.4))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::access::RequestOk {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,ticket: be_u16 >>
(::primitives::amqp9_0::access::RequestOk::new(ticket))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::access::ClassMethod<'a> {
type Output = Self;
fn nom_bytes<'pool, P>(input: &'a [u8], pool: &'pool mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
switch!(input, be_u16,
10 => map!(
call!(<::primitives::amqp9_0::access::Request as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::access::ClassMethod::Request
) | // map
11 => map!(
call!(<::primitives::amqp9_0::access::RequestOk as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::access::ClassMethod::RequestOk
) // map!
) // switch!
} // fn nom_bytes
} // impl ::NomBytes for ::primitives::amqp9_0::access::SpecMethod<'a>
// Class basic
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::basic::Ack {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,delivery_tag: be_u64 >>
multiple: bits!(call!(::common::bool_bit)) >>
(::primitives::amqp9_0::basic::Ack::new(delivery_tag, multiple))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::basic::Cancel<'a> {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,consumer_tag: call!(::common::shortstr) >>
no_wait: bits!(call!(::common::bool_bit)) >>
(::primitives::amqp9_0::basic::Cancel::new(consumer_tag, no_wait))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::basic::CancelOk<'a> {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,consumer_tag: call!(::common::shortstr) >>
(::primitives::amqp9_0::basic::CancelOk::new(consumer_tag))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::basic::Consume<'a> {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], pool: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,ticket: be_u16 >>
queue: call!(::common::shortstr) >>
consumer_tag: call!(::common::shortstr) >>
flag1: bits!(tuple!(
call!(::common::bool_bit),
call!(::common::bool_bit),
call!(::common::bool_bit),
call!(::common::bool_bit)
)) >>
filter: apply!(<::primitives::field::TableEntries as ::NomBytes>::nom_bytes, pool) >>
(::primitives::amqp9_0::basic::Consume::new(ticket, queue, consumer_tag, flag1.0, flag1.1, flag1.2, flag1.3, filter))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::basic::ConsumeOk<'a> {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,consumer_tag: call!(::common::shortstr) >>
(::primitives::amqp9_0::basic::ConsumeOk::new(consumer_tag))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::basic::Deliver<'a> {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,consumer_tag: call!(::common::shortstr) >>
delivery_tag: be_u64 >>
redelivered: bits!(call!(::common::bool_bit)) >>
exchange: call!(::common::shortstr) >>
routing_key: call!(::common::shortstr) >>
(::primitives::amqp9_0::basic::Deliver::new(consumer_tag, delivery_tag, redelivered, exchange, routing_key))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::basic::Get<'a> {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,ticket: be_u16 >>
queue: call!(::common::shortstr) >>
no_ack: bits!(call!(::common::bool_bit)) >>
(::primitives::amqp9_0::basic::Get::new(ticket, queue, no_ack))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::basic::GetEmpty<'a> {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,cluster_id: call!(::common::shortstr) >>
(::primitives::amqp9_0::basic::GetEmpty::new(cluster_id))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::basic::GetOk<'a> {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,delivery_tag: be_u64 >>
redelivered: bits!(call!(::common::bool_bit)) >>
exchange: call!(::common::shortstr) >>
routing_key: call!(::common::shortstr) >>
message_count: be_u32 >>
(::primitives::amqp9_0::basic::GetOk::new(delivery_tag, redelivered, exchange, routing_key, message_count))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::basic::Publish<'a> {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,ticket: be_u16 >>
exchange: call!(::common::shortstr) >>
routing_key: call!(::common::shortstr) >>
flag1: bits!(tuple!(
call!(::common::bool_bit),
call!(::common::bool_bit)
)) >>
(::primitives::amqp9_0::basic::Publish::new(ticket, exchange, routing_key, flag1.0, flag1.1))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::basic::Qos {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,prefetch_size: be_u32 >>
prefetch_count: be_u16 >>
global: bits!(call!(::common::bool_bit)) >>
(::primitives::amqp9_0::basic::Qos::new(prefetch_size, prefetch_count, global))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::basic::QosOk {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,(::primitives::amqp9_0::basic::QosOk::new())
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::basic::Recover {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,requeue: bits!(call!(::common::bool_bit)) >>
(::primitives::amqp9_0::basic::Recover::new(requeue))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::basic::Reject {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,delivery_tag: be_u64 >>
requeue: bits!(call!(::common::bool_bit)) >>
(::primitives::amqp9_0::basic::Reject::new(delivery_tag, requeue))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::basic::Return<'a> {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,reply_code: be_u16 >>
reply_text: call!(::common::shortstr) >>
exchange: call!(::common::shortstr) >>
routing_key: call!(::common::shortstr) >>
(::primitives::amqp9_0::basic::Return::new(reply_code, reply_text, exchange, routing_key))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::basic::ClassMethod<'a> {
type Output = Self;
fn nom_bytes<'pool, P>(input: &'a [u8], pool: &'pool mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
switch!(input, be_u16,
80 => map!(
call!(<::primitives::amqp9_0::basic::Ack as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::basic::ClassMethod::Ack
) | // map
30 => map!(
call!(<::primitives::amqp9_0::basic::Cancel as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::basic::ClassMethod::Cancel
) | // map
31 => map!(
call!(<::primitives::amqp9_0::basic::CancelOk as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::basic::ClassMethod::CancelOk
) | // map
20 => map!(
call!(<::primitives::amqp9_0::basic::Consume as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::basic::ClassMethod::Consume
) | // map
21 => map!(
call!(<::primitives::amqp9_0::basic::ConsumeOk as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::basic::ClassMethod::ConsumeOk
) | // map
60 => map!(
call!(<::primitives::amqp9_0::basic::Deliver as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::basic::ClassMethod::Deliver
) | // map
70 => map!(
call!(<::primitives::amqp9_0::basic::Get as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::basic::ClassMethod::Get
) | // map
72 => map!(
call!(<::primitives::amqp9_0::basic::GetEmpty as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::basic::ClassMethod::GetEmpty
) | // map
71 => map!(
call!(<::primitives::amqp9_0::basic::GetOk as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::basic::ClassMethod::GetOk
) | // map
40 => map!(
call!(<::primitives::amqp9_0::basic::Publish as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::basic::ClassMethod::Publish
) | // map
10 => map!(
call!(<::primitives::amqp9_0::basic::Qos as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::basic::ClassMethod::Qos
) | // map
11 => map!(
call!(<::primitives::amqp9_0::basic::QosOk as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::basic::ClassMethod::QosOk
) | // map
100 => map!(
call!(<::primitives::amqp9_0::basic::Recover as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::basic::ClassMethod::Recover
) | // map
90 => map!(
call!(<::primitives::amqp9_0::basic::Reject as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::basic::ClassMethod::Reject
) | // map
50 => map!(
call!(<::primitives::amqp9_0::basic::Return as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::basic::ClassMethod::Return
) // map!
) // switch!
} // fn nom_bytes
} // impl ::NomBytes for ::primitives::amqp9_0::basic::SpecMethod<'a>
// Class channel
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::channel::Close<'a> {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,reply_code: be_u16 >>
reply_text: call!(::common::shortstr) >>
class_id: be_u16 >>
method_id: be_u16 >>
(::primitives::amqp9_0::channel::Close::new(reply_code, reply_text, class_id, method_id))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::channel::CloseOk {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,(::primitives::amqp9_0::channel::CloseOk::new())
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::channel::Flow {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,active: bits!(call!(::common::bool_bit)) >>
(::primitives::amqp9_0::channel::Flow::new(active))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::channel::FlowOk {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,active: bits!(call!(::common::bool_bit)) >>
(::primitives::amqp9_0::channel::FlowOk::new(active))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::channel::Ok {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,(::primitives::amqp9_0::channel::Ok::new())
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::channel::Open<'a> {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,out_of_band: call!(::common::shortstr) >>
(::primitives::amqp9_0::channel::Open::new(out_of_band))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::channel::OpenOk<'a> {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,channel_id: call!(::common::longstr) >>
(::primitives::amqp9_0::channel::OpenOk::new(channel_id))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::channel::Ping {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,(::primitives::amqp9_0::channel::Ping::new())
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::channel::Pong {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,(::primitives::amqp9_0::channel::Pong::new())
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::channel::Resume<'a> {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,channel_id: call!(::common::longstr) >>
(::primitives::amqp9_0::channel::Resume::new(channel_id))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::channel::ClassMethod<'a> {
type Output = Self;
fn nom_bytes<'pool, P>(input: &'a [u8], pool: &'pool mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
switch!(input, be_u16,
40 => map!(
call!(<::primitives::amqp9_0::channel::Close as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::channel::ClassMethod::Close
) | // map
41 => map!(
call!(<::primitives::amqp9_0::channel::CloseOk as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::channel::ClassMethod::CloseOk
) | // map
20 => map!(
call!(<::primitives::amqp9_0::channel::Flow as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::channel::ClassMethod::Flow
) | // map
21 => map!(
call!(<::primitives::amqp9_0::channel::FlowOk as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::channel::ClassMethod::FlowOk
) | // map
80 => map!(
call!(<::primitives::amqp9_0::channel::Ok as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::channel::ClassMethod::Ok
) | // map
10 => map!(
call!(<::primitives::amqp9_0::channel::Open as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::channel::ClassMethod::Open
) | // map
11 => map!(
call!(<::primitives::amqp9_0::channel::OpenOk as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::channel::ClassMethod::OpenOk
) | // map
60 => map!(
call!(<::primitives::amqp9_0::channel::Ping as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::channel::ClassMethod::Ping
) | // map
70 => map!(
call!(<::primitives::amqp9_0::channel::Pong as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::channel::ClassMethod::Pong
) | // map
50 => map!(
call!(<::primitives::amqp9_0::channel::Resume as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::channel::ClassMethod::Resume
) // map!
) // switch!
} // fn nom_bytes
} // impl ::NomBytes for ::primitives::amqp9_0::channel::SpecMethod<'a>
// Class connection
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::connection::Close<'a> {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,reply_code: be_u16 >>
reply_text: call!(::common::shortstr) >>
class_id: be_u16 >>
method_id: be_u16 >>
(::primitives::amqp9_0::connection::Close::new(reply_code, reply_text, class_id, method_id))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::connection::CloseOk {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,(::primitives::amqp9_0::connection::CloseOk::new())
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::connection::Open<'a> {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,virtual_host: call!(::common::shortstr) >>
capabilities: call!(::common::shortstr) >>
insist: bits!(call!(::common::bool_bit)) >>
(::primitives::amqp9_0::connection::Open::new(virtual_host, capabilities, insist))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::connection::OpenOk<'a> {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,known_hosts: call!(::common::shortstr) >>
(::primitives::amqp9_0::connection::OpenOk::new(known_hosts))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::connection::Redirect<'a> {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,host: call!(::common::shortstr) >>
known_hosts: call!(::common::shortstr) >>
(::primitives::amqp9_0::connection::Redirect::new(host, known_hosts))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::connection::Secure<'a> {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,challenge: call!(::common::longstr) >>
(::primitives::amqp9_0::connection::Secure::new(challenge))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::connection::SecureOk<'a> {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,response: call!(::common::longstr) >>
(::primitives::amqp9_0::connection::SecureOk::new(response))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::connection::Start<'a> {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], pool: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,version_major: be_u8 >>
version_minor: be_u8 >>
server_properties: apply!(<::primitives::field::TableEntries as ::NomBytes>::nom_bytes, pool) >>
mechanisms: call!(::common::longstr) >>
locales: call!(::common::longstr) >>
(::primitives::amqp9_0::connection::Start::new(version_major, version_minor, server_properties, mechanisms, locales))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::connection::StartOk<'a> {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], pool: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,client_properties: apply!(<::primitives::field::TableEntries as ::NomBytes>::nom_bytes, pool) >>
mechanism: call!(::common::shortstr) >>
response: call!(::common::longstr) >>
locale: call!(::common::shortstr) >>
(::primitives::amqp9_0::connection::StartOk::new(client_properties, mechanism, response, locale))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::connection::Tune {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,channel_max: be_u16 >>
frame_max: be_u32 >>
heartbeat: be_u16 >>
(::primitives::amqp9_0::connection::Tune::new(channel_max, frame_max, heartbeat))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::connection::TuneOk {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,channel_max: be_u16 >>
frame_max: be_u32 >>
heartbeat: be_u16 >>
(::primitives::amqp9_0::connection::TuneOk::new(channel_max, frame_max, heartbeat))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::connection::ClassMethod<'a> {
type Output = Self;
fn nom_bytes<'pool, P>(input: &'a [u8], pool: &'pool mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
switch!(input, be_u16,
50 => map!(
call!(<::primitives::amqp9_0::connection::Close as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::connection::ClassMethod::Close
) | // map
51 => map!(
call!(<::primitives::amqp9_0::connection::CloseOk as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::connection::ClassMethod::CloseOk
) | // map
40 => map!(
call!(<::primitives::amqp9_0::connection::Open as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::connection::ClassMethod::Open
) | // map
41 => map!(
call!(<::primitives::amqp9_0::connection::OpenOk as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::connection::ClassMethod::OpenOk
) | // map
42 => map!(
call!(<::primitives::amqp9_0::connection::Redirect as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::connection::ClassMethod::Redirect
) | // map
20 => map!(
call!(<::primitives::amqp9_0::connection::Secure as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::connection::ClassMethod::Secure
) | // map
21 => map!(
call!(<::primitives::amqp9_0::connection::SecureOk as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::connection::ClassMethod::SecureOk
) | // map
10 => map!(
call!(<::primitives::amqp9_0::connection::Start as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::connection::ClassMethod::Start
) | // map
11 => map!(
call!(<::primitives::amqp9_0::connection::StartOk as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::connection::ClassMethod::StartOk
) | // map
30 => map!(
call!(<::primitives::amqp9_0::connection::Tune as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::connection::ClassMethod::Tune
) | // map
31 => map!(
call!(<::primitives::amqp9_0::connection::TuneOk as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::connection::ClassMethod::TuneOk
) // map!
) // switch!
} // fn nom_bytes
} // impl ::NomBytes for ::primitives::amqp9_0::connection::SpecMethod<'a>
// Class dtx
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::dtx::Select {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,(::primitives::amqp9_0::dtx::Select::new())
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::dtx::SelectOk {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,(::primitives::amqp9_0::dtx::SelectOk::new())
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::dtx::Start<'a> {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,dtx_identifier: call!(::common::shortstr) >>
(::primitives::amqp9_0::dtx::Start::new(dtx_identifier))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::dtx::StartOk {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,(::primitives::amqp9_0::dtx::StartOk::new())
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::dtx::ClassMethod<'a> {
type Output = Self;
fn nom_bytes<'pool, P>(input: &'a [u8], pool: &'pool mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
switch!(input, be_u16,
10 => map!(
call!(<::primitives::amqp9_0::dtx::Select as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::dtx::ClassMethod::Select
) | // map
11 => map!(
call!(<::primitives::amqp9_0::dtx::SelectOk as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::dtx::ClassMethod::SelectOk
) | // map
20 => map!(
call!(<::primitives::amqp9_0::dtx::Start as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::dtx::ClassMethod::Start
) | // map
21 => map!(
call!(<::primitives::amqp9_0::dtx::StartOk as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::dtx::ClassMethod::StartOk
) // map!
) // switch!
} // fn nom_bytes
} // impl ::NomBytes for ::primitives::amqp9_0::dtx::SpecMethod<'a>
// Class exchange
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::exchange::Declare<'a> {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], pool: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,ticket: be_u16 >>
exchange: call!(::common::shortstr) >>
ty: call!(::common::shortstr) >>
flag1: bits!(tuple!(
call!(::common::bool_bit),
call!(::common::bool_bit),
call!(::common::bool_bit),
call!(::common::bool_bit),
call!(::common::bool_bit)
)) >>
arguments: apply!(<::primitives::field::TableEntries as ::NomBytes>::nom_bytes, pool) >>
(::primitives::amqp9_0::exchange::Declare::new(ticket, exchange, ty, flag1.0, flag1.1, flag1.2, flag1.3, flag1.4, arguments))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::exchange::DeclareOk {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,(::primitives::amqp9_0::exchange::DeclareOk::new())
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::exchange::Delete<'a> {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,ticket: be_u16 >>
exchange: call!(::common::shortstr) >>
flag1: bits!(tuple!(
call!(::common::bool_bit),
call!(::common::bool_bit)
)) >>
(::primitives::amqp9_0::exchange::Delete::new(ticket, exchange, flag1.0, flag1.1))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::exchange::DeleteOk {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,(::primitives::amqp9_0::exchange::DeleteOk::new())
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::exchange::ClassMethod<'a> {
type Output = Self;
fn nom_bytes<'pool, P>(input: &'a [u8], pool: &'pool mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
switch!(input, be_u16,
10 => map!(
call!(<::primitives::amqp9_0::exchange::Declare as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::exchange::ClassMethod::Declare
) | // map
11 => map!(
call!(<::primitives::amqp9_0::exchange::DeclareOk as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::exchange::ClassMethod::DeclareOk
) | // map
20 => map!(
call!(<::primitives::amqp9_0::exchange::Delete as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::exchange::ClassMethod::Delete
) | // map
21 => map!(
call!(<::primitives::amqp9_0::exchange::DeleteOk as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::exchange::ClassMethod::DeleteOk
) // map!
) // switch!
} // fn nom_bytes
} // impl ::NomBytes for ::primitives::amqp9_0::exchange::SpecMethod<'a>
// Class file
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::file::Ack {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,delivery_tag: be_u64 >>
multiple: bits!(call!(::common::bool_bit)) >>
(::primitives::amqp9_0::file::Ack::new(delivery_tag, multiple))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::file::Cancel<'a> {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,consumer_tag: call!(::common::shortstr) >>
no_wait: bits!(call!(::common::bool_bit)) >>
(::primitives::amqp9_0::file::Cancel::new(consumer_tag, no_wait))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::file::CancelOk<'a> {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,consumer_tag: call!(::common::shortstr) >>
(::primitives::amqp9_0::file::CancelOk::new(consumer_tag))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::file::Consume<'a> {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], pool: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,ticket: be_u16 >>
queue: call!(::common::shortstr) >>
consumer_tag: call!(::common::shortstr) >>
flag1: bits!(tuple!(
call!(::common::bool_bit),
call!(::common::bool_bit),
call!(::common::bool_bit),
call!(::common::bool_bit)
)) >>
filter: apply!(<::primitives::field::TableEntries as ::NomBytes>::nom_bytes, pool) >>
(::primitives::amqp9_0::file::Consume::new(ticket, queue, consumer_tag, flag1.0, flag1.1, flag1.2, flag1.3, filter))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::file::ConsumeOk<'a> {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,consumer_tag: call!(::common::shortstr) >>
(::primitives::amqp9_0::file::ConsumeOk::new(consumer_tag))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::file::Deliver<'a> {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,consumer_tag: call!(::common::shortstr) >>
delivery_tag: be_u64 >>
redelivered: bits!(call!(::common::bool_bit)) >>
exchange: call!(::common::shortstr) >>
routing_key: call!(::common::shortstr) >>
identifier: call!(::common::shortstr) >>
(::primitives::amqp9_0::file::Deliver::new(consumer_tag, delivery_tag, redelivered, exchange, routing_key, identifier))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::file::Open<'a> {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,identifier: call!(::common::shortstr) >>
content_size: be_u64 >>
(::primitives::amqp9_0::file::Open::new(identifier, content_size))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::file::OpenOk {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,staged_size: be_u64 >>
(::primitives::amqp9_0::file::OpenOk::new(staged_size))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::file::Publish<'a> {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,ticket: be_u16 >>
exchange: call!(::common::shortstr) >>
routing_key: call!(::common::shortstr) >>
flag1: bits!(tuple!(
call!(::common::bool_bit),
call!(::common::bool_bit)
)) >>
identifier: call!(::common::shortstr) >>
(::primitives::amqp9_0::file::Publish::new(ticket, exchange, routing_key, flag1.0, flag1.1, identifier))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::file::Qos {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,prefetch_size: be_u32 >>
prefetch_count: be_u16 >>
global: bits!(call!(::common::bool_bit)) >>
(::primitives::amqp9_0::file::Qos::new(prefetch_size, prefetch_count, global))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::file::QosOk {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,(::primitives::amqp9_0::file::QosOk::new())
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::file::Reject {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,delivery_tag: be_u64 >>
requeue: bits!(call!(::common::bool_bit)) >>
(::primitives::amqp9_0::file::Reject::new(delivery_tag, requeue))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::file::Return<'a> {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,reply_code: be_u16 >>
reply_text: call!(::common::shortstr) >>
exchange: call!(::common::shortstr) >>
routing_key: call!(::common::shortstr) >>
(::primitives::amqp9_0::file::Return::new(reply_code, reply_text, exchange, routing_key))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::file::Stage {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,(::primitives::amqp9_0::file::Stage::new())
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::file::ClassMethod<'a> {
type Output = Self;
fn nom_bytes<'pool, P>(input: &'a [u8], pool: &'pool mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
switch!(input, be_u16,
90 => map!(
call!(<::primitives::amqp9_0::file::Ack as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::file::ClassMethod::Ack
) | // map
30 => map!(
call!(<::primitives::amqp9_0::file::Cancel as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::file::ClassMethod::Cancel
) | // map
31 => map!(
call!(<::primitives::amqp9_0::file::CancelOk as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::file::ClassMethod::CancelOk
) | // map
20 => map!(
call!(<::primitives::amqp9_0::file::Consume as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::file::ClassMethod::Consume
) | // map
21 => map!(
call!(<::primitives::amqp9_0::file::ConsumeOk as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::file::ClassMethod::ConsumeOk
) | // map
80 => map!(
call!(<::primitives::amqp9_0::file::Deliver as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::file::ClassMethod::Deliver
) | // map
40 => map!(
call!(<::primitives::amqp9_0::file::Open as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::file::ClassMethod::Open
) | // map
41 => map!(
call!(<::primitives::amqp9_0::file::OpenOk as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::file::ClassMethod::OpenOk
) | // map
60 => map!(
call!(<::primitives::amqp9_0::file::Publish as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::file::ClassMethod::Publish
) | // map
10 => map!(
call!(<::primitives::amqp9_0::file::Qos as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::file::ClassMethod::Qos
) | // map
11 => map!(
call!(<::primitives::amqp9_0::file::QosOk as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::file::ClassMethod::QosOk
) | // map
100 => map!(
call!(<::primitives::amqp9_0::file::Reject as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::file::ClassMethod::Reject
) | // map
70 => map!(
call!(<::primitives::amqp9_0::file::Return as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::file::ClassMethod::Return
) | // map
50 => map!(
call!(<::primitives::amqp9_0::file::Stage as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::file::ClassMethod::Stage
) // map!
) // switch!
} // fn nom_bytes
} // impl ::NomBytes for ::primitives::amqp9_0::file::SpecMethod<'a>
// Class message
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::message::Append<'a> {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,reference: call!(::common::longstr) >>
bytes: call!(::common::longstr) >>
(::primitives::amqp9_0::message::Append::new(reference, bytes))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::message::Cancel<'a> {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,destination: call!(::common::shortstr) >>
(::primitives::amqp9_0::message::Cancel::new(destination))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::message::Checkpoint<'a> {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,reference: call!(::common::longstr) >>
identifier: call!(::common::shortstr) >>
(::primitives::amqp9_0::message::Checkpoint::new(reference, identifier))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::message::Close<'a> {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,reference: call!(::common::longstr) >>
(::primitives::amqp9_0::message::Close::new(reference))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::message::Consume<'a> {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], pool: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,ticket: be_u16 >>
queue: call!(::common::shortstr) >>
destination: call!(::common::shortstr) >>
flag1: bits!(tuple!(
call!(::common::bool_bit),
call!(::common::bool_bit),
call!(::common::bool_bit)
)) >>
filter: apply!(<::primitives::field::TableEntries as ::NomBytes>::nom_bytes, pool) >>
(::primitives::amqp9_0::message::Consume::new(ticket, queue, destination, flag1.0, flag1.1, flag1.2, filter))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::message::Empty {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,(::primitives::amqp9_0::message::Empty::new())
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::message::Get<'a> {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,ticket: be_u16 >>
queue: call!(::common::shortstr) >>
destination: call!(::common::shortstr) >>
no_ack: bits!(call!(::common::bool_bit)) >>
(::primitives::amqp9_0::message::Get::new(ticket, queue, destination, no_ack))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::message::Offset {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,value: be_u64 >>
(::primitives::amqp9_0::message::Offset::new(value))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::message::Ok {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,(::primitives::amqp9_0::message::Ok::new())
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::message::Open<'a> {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,reference: call!(::common::longstr) >>
(::primitives::amqp9_0::message::Open::new(reference))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::message::Qos {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,prefetch_size: be_u32 >>
prefetch_count: be_u16 >>
global: bits!(call!(::common::bool_bit)) >>
(::primitives::amqp9_0::message::Qos::new(prefetch_size, prefetch_count, global))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::message::Recover {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,requeue: bits!(call!(::common::bool_bit)) >>
(::primitives::amqp9_0::message::Recover::new(requeue))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::message::Reject<'a> {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,code: be_u16 >>
text: call!(::common::shortstr) >>
(::primitives::amqp9_0::message::Reject::new(code, text))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::message::Resume<'a> {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,reference: call!(::common::longstr) >>
identifier: call!(::common::shortstr) >>
(::primitives::amqp9_0::message::Resume::new(reference, identifier))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::message::Transfer<'a> {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], pool: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,ticket: be_u16 >>
destination: call!(::common::shortstr) >>
flag1: bits!(tuple!(
call!(::common::bool_bit),
call!(::common::bool_bit)
)) >>
ttl: be_u64 >>
priority: be_u8 >>
timestamp: be_u64 >>
delivery_mode: be_u8 >>
expiration: be_u64 >>
exchange: call!(::common::shortstr) >>
routing_key: call!(::common::shortstr) >>
message_id: call!(::common::shortstr) >>
correlation_id: call!(::common::shortstr) >>
reply_to: call!(::common::shortstr) >>
content_type: call!(::common::shortstr) >>
content_encoding: call!(::common::shortstr) >>
user_id: call!(::common::shortstr) >>
app_id: call!(::common::shortstr) >>
transaction_id: call!(::common::shortstr) >>
security_token: call!(::common::longstr) >>
application_headers: apply!(<::primitives::field::TableEntries as ::NomBytes>::nom_bytes, pool) >>
body: length_bytes!(be_u32) >>
(::primitives::amqp9_0::message::Transfer::new(ticket, destination, flag1.0, flag1.1, ttl, priority, timestamp, delivery_mode, expiration, exchange, routing_key, message_id, correlation_id, reply_to, content_type, content_encoding, user_id, app_id, transaction_id, security_token, application_headers, body))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::message::ClassMethod<'a> {
type Output = Self;
fn nom_bytes<'pool, P>(input: &'a [u8], pool: &'pool mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
switch!(input, be_u16,
80 => map!(
call!(<::primitives::amqp9_0::message::Append as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::message::ClassMethod::Append
) | // map
30 => map!(
call!(<::primitives::amqp9_0::message::Cancel as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::message::ClassMethod::Cancel
) | // map
90 => map!(
call!(<::primitives::amqp9_0::message::Checkpoint as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::message::ClassMethod::Checkpoint
) | // map
70 => map!(
call!(<::primitives::amqp9_0::message::Close as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::message::ClassMethod::Close
) | // map
20 => map!(
call!(<::primitives::amqp9_0::message::Consume as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::message::ClassMethod::Consume
) | // map
510 => map!(
call!(<::primitives::amqp9_0::message::Empty as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::message::ClassMethod::Empty
) | // map
40 => map!(
call!(<::primitives::amqp9_0::message::Get as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::message::ClassMethod::Get
) | // map
530 => map!(
call!(<::primitives::amqp9_0::message::Offset as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::message::ClassMethod::Offset
) | // map
500 => map!(
call!(<::primitives::amqp9_0::message::Ok as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::message::ClassMethod::Ok
) | // map
60 => map!(
call!(<::primitives::amqp9_0::message::Open as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::message::ClassMethod::Open
) | // map
110 => map!(
call!(<::primitives::amqp9_0::message::Qos as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::message::ClassMethod::Qos
) | // map
50 => map!(
call!(<::primitives::amqp9_0::message::Recover as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::message::ClassMethod::Recover
) | // map
520 => map!(
call!(<::primitives::amqp9_0::message::Reject as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::message::ClassMethod::Reject
) | // map
100 => map!(
call!(<::primitives::amqp9_0::message::Resume as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::message::ClassMethod::Resume
) | // map
10 => map!(
call!(<::primitives::amqp9_0::message::Transfer as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::message::ClassMethod::Transfer
) // map!
) // switch!
} // fn nom_bytes
} // impl ::NomBytes for ::primitives::amqp9_0::message::SpecMethod<'a>
// Class queue
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::queue::Bind<'a> {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], pool: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,ticket: be_u16 >>
queue: call!(::common::shortstr) >>
exchange: call!(::common::shortstr) >>
routing_key: call!(::common::shortstr) >>
no_wait: bits!(call!(::common::bool_bit)) >>
arguments: apply!(<::primitives::field::TableEntries as ::NomBytes>::nom_bytes, pool) >>
(::primitives::amqp9_0::queue::Bind::new(ticket, queue, exchange, routing_key, no_wait, arguments))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::queue::BindOk {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,(::primitives::amqp9_0::queue::BindOk::new())
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::queue::Declare<'a> {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], pool: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,ticket: be_u16 >>
queue: call!(::common::shortstr) >>
flag1: bits!(tuple!(
call!(::common::bool_bit),
call!(::common::bool_bit),
call!(::common::bool_bit),
call!(::common::bool_bit),
call!(::common::bool_bit)
)) >>
arguments: apply!(<::primitives::field::TableEntries as ::NomBytes>::nom_bytes, pool) >>
(::primitives::amqp9_0::queue::Declare::new(ticket, queue, flag1.0, flag1.1, flag1.2, flag1.3, flag1.4, arguments))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::queue::DeclareOk<'a> {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,queue: call!(::common::shortstr) >>
message_count: be_u32 >>
consumer_count: be_u32 >>
(::primitives::amqp9_0::queue::DeclareOk::new(queue, message_count, consumer_count))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::queue::Delete<'a> {<|fim▁hole|>fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,ticket: be_u16 >>
queue: call!(::common::shortstr) >>
flag1: bits!(tuple!(
call!(::common::bool_bit),
call!(::common::bool_bit),
call!(::common::bool_bit)
)) >>
(::primitives::amqp9_0::queue::Delete::new(ticket, queue, flag1.0, flag1.1, flag1.2))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::queue::DeleteOk {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,message_count: be_u32 >>
(::primitives::amqp9_0::queue::DeleteOk::new(message_count))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::queue::Purge<'a> {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,ticket: be_u16 >>
queue: call!(::common::shortstr) >>
no_wait: bits!(call!(::common::bool_bit)) >>
(::primitives::amqp9_0::queue::Purge::new(ticket, queue, no_wait))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::queue::PurgeOk {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,message_count: be_u32 >>
(::primitives::amqp9_0::queue::PurgeOk::new(message_count))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::queue::Unbind<'a> {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], pool: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,ticket: be_u16 >>
queue: call!(::common::shortstr) >>
exchange: call!(::common::shortstr) >>
routing_key: call!(::common::shortstr) >>
arguments: apply!(<::primitives::field::TableEntries as ::NomBytes>::nom_bytes, pool) >>
(::primitives::amqp9_0::queue::Unbind::new(ticket, queue, exchange, routing_key, arguments))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::queue::UnbindOk {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,(::primitives::amqp9_0::queue::UnbindOk::new())
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::queue::ClassMethod<'a> {
type Output = Self;
fn nom_bytes<'pool, P>(input: &'a [u8], pool: &'pool mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
switch!(input, be_u16,
20 => map!(
call!(<::primitives::amqp9_0::queue::Bind as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::queue::ClassMethod::Bind
) | // map
21 => map!(
call!(<::primitives::amqp9_0::queue::BindOk as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::queue::ClassMethod::BindOk
) | // map
10 => map!(
call!(<::primitives::amqp9_0::queue::Declare as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::queue::ClassMethod::Declare
) | // map
11 => map!(
call!(<::primitives::amqp9_0::queue::DeclareOk as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::queue::ClassMethod::DeclareOk
) | // map
40 => map!(
call!(<::primitives::amqp9_0::queue::Delete as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::queue::ClassMethod::Delete
) | // map
41 => map!(
call!(<::primitives::amqp9_0::queue::DeleteOk as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::queue::ClassMethod::DeleteOk
) | // map
30 => map!(
call!(<::primitives::amqp9_0::queue::Purge as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::queue::ClassMethod::Purge
) | // map
31 => map!(
call!(<::primitives::amqp9_0::queue::PurgeOk as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::queue::ClassMethod::PurgeOk
) | // map
50 => map!(
call!(<::primitives::amqp9_0::queue::Unbind as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::queue::ClassMethod::Unbind
) | // map
51 => map!(
call!(<::primitives::amqp9_0::queue::UnbindOk as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::queue::ClassMethod::UnbindOk
) // map!
) // switch!
} // fn nom_bytes
} // impl ::NomBytes for ::primitives::amqp9_0::queue::SpecMethod<'a>
// Class stream
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::stream::Cancel<'a> {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,consumer_tag: call!(::common::shortstr) >>
no_wait: bits!(call!(::common::bool_bit)) >>
(::primitives::amqp9_0::stream::Cancel::new(consumer_tag, no_wait))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::stream::CancelOk<'a> {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,consumer_tag: call!(::common::shortstr) >>
(::primitives::amqp9_0::stream::CancelOk::new(consumer_tag))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::stream::Consume<'a> {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], pool: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,ticket: be_u16 >>
queue: call!(::common::shortstr) >>
consumer_tag: call!(::common::shortstr) >>
flag1: bits!(tuple!(
call!(::common::bool_bit),
call!(::common::bool_bit),
call!(::common::bool_bit)
)) >>
filter: apply!(<::primitives::field::TableEntries as ::NomBytes>::nom_bytes, pool) >>
(::primitives::amqp9_0::stream::Consume::new(ticket, queue, consumer_tag, flag1.0, flag1.1, flag1.2, filter))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::stream::ConsumeOk<'a> {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,consumer_tag: call!(::common::shortstr) >>
(::primitives::amqp9_0::stream::ConsumeOk::new(consumer_tag))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::stream::Deliver<'a> {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,consumer_tag: call!(::common::shortstr) >>
delivery_tag: be_u64 >>
exchange: call!(::common::shortstr) >>
queue: call!(::common::shortstr) >>
(::primitives::amqp9_0::stream::Deliver::new(consumer_tag, delivery_tag, exchange, queue))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::stream::Publish<'a> {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,ticket: be_u16 >>
exchange: call!(::common::shortstr) >>
routing_key: call!(::common::shortstr) >>
flag1: bits!(tuple!(
call!(::common::bool_bit),
call!(::common::bool_bit)
)) >>
(::primitives::amqp9_0::stream::Publish::new(ticket, exchange, routing_key, flag1.0, flag1.1))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::stream::Qos {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,prefetch_size: be_u32 >>
prefetch_count: be_u16 >>
consume_rate: be_u32 >>
global: bits!(call!(::common::bool_bit)) >>
(::primitives::amqp9_0::stream::Qos::new(prefetch_size, prefetch_count, consume_rate, global))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::stream::QosOk {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,(::primitives::amqp9_0::stream::QosOk::new())
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::stream::Return<'a> {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,reply_code: be_u16 >>
reply_text: call!(::common::shortstr) >>
exchange: call!(::common::shortstr) >>
routing_key: call!(::common::shortstr) >>
(::primitives::amqp9_0::stream::Return::new(reply_code, reply_text, exchange, routing_key))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::stream::ClassMethod<'a> {
type Output = Self;
fn nom_bytes<'pool, P>(input: &'a [u8], pool: &'pool mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
switch!(input, be_u16,
30 => map!(
call!(<::primitives::amqp9_0::stream::Cancel as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::stream::ClassMethod::Cancel
) | // map
31 => map!(
call!(<::primitives::amqp9_0::stream::CancelOk as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::stream::ClassMethod::CancelOk
) | // map
20 => map!(
call!(<::primitives::amqp9_0::stream::Consume as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::stream::ClassMethod::Consume
) | // map
21 => map!(
call!(<::primitives::amqp9_0::stream::ConsumeOk as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::stream::ClassMethod::ConsumeOk
) | // map
60 => map!(
call!(<::primitives::amqp9_0::stream::Deliver as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::stream::ClassMethod::Deliver
) | // map
40 => map!(
call!(<::primitives::amqp9_0::stream::Publish as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::stream::ClassMethod::Publish
) | // map
10 => map!(
call!(<::primitives::amqp9_0::stream::Qos as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::stream::ClassMethod::Qos
) | // map
11 => map!(
call!(<::primitives::amqp9_0::stream::QosOk as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::stream::ClassMethod::QosOk
) | // map
50 => map!(
call!(<::primitives::amqp9_0::stream::Return as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::stream::ClassMethod::Return
) // map!
) // switch!
} // fn nom_bytes
} // impl ::NomBytes for ::primitives::amqp9_0::stream::SpecMethod<'a>
// Class tunnel
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::tunnel::Request<'a> {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], pool: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,meta_data: apply!(<::primitives::field::TableEntries as ::NomBytes>::nom_bytes, pool) >>
(::primitives::amqp9_0::tunnel::Request::new(meta_data))
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::tunnel::ClassMethod<'a> {
type Output = Self;
fn nom_bytes<'pool, P>(input: &'a [u8], pool: &'pool mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
switch!(input, be_u16,
10 => map!(
call!(<::primitives::amqp9_0::tunnel::Request as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::tunnel::ClassMethod::Request
) // map!
) // switch!
} // fn nom_bytes
} // impl ::NomBytes for ::primitives::amqp9_0::tunnel::SpecMethod<'a>
// Class tx
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::tx::Commit {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,(::primitives::amqp9_0::tx::Commit::new())
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::tx::CommitOk {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,(::primitives::amqp9_0::tx::CommitOk::new())
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::tx::Rollback {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,(::primitives::amqp9_0::tx::Rollback::new())
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::tx::RollbackOk {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,(::primitives::amqp9_0::tx::RollbackOk::new())
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::tx::Select {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,(::primitives::amqp9_0::tx::Select::new())
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::tx::SelectOk {
type Output = Self;
fn nom_bytes<'b, P>(input: &'a [u8], _: &'b mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
do_parse!(input,(::primitives::amqp9_0::tx::SelectOk::new())
) // do_parse!
} // fn nom_bytes
} // impl NomBytes
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::tx::ClassMethod {
type Output = Self;
fn nom_bytes<'pool, P>(input: &'a [u8], pool: &'pool mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
switch!(input, be_u16,
20 => map!(
call!(<::primitives::amqp9_0::tx::Commit as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::tx::ClassMethod::Commit
) | // map
21 => map!(
call!(<::primitives::amqp9_0::tx::CommitOk as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::tx::ClassMethod::CommitOk
) | // map
30 => map!(
call!(<::primitives::amqp9_0::tx::Rollback as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::tx::ClassMethod::Rollback
) | // map
31 => map!(
call!(<::primitives::amqp9_0::tx::RollbackOk as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::tx::ClassMethod::RollbackOk
) | // map
10 => map!(
call!(<::primitives::amqp9_0::tx::Select as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::tx::ClassMethod::Select
) | // map
11 => map!(
call!(<::primitives::amqp9_0::tx::SelectOk as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::tx::ClassMethod::SelectOk
) // map!
) // switch!
} // fn nom_bytes
} // impl ::NomBytes for ::primitives::amqp9_0::tx::SpecMethod<'a>
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::SpecMethod<'a> {
type Output = Self;
fn nom_bytes<'pool, P>(input: &'a [u8], pool: &'pool mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
switch!(input, be_u16,
30 => map!(
call!(<::primitives::amqp9_0::AccessMethod as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::SpecMethod::Access
) | // map
60 => map!(
call!(<::primitives::amqp9_0::BasicMethod as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::SpecMethod::Basic
) | // map
20 => map!(
call!(<::primitives::amqp9_0::ChannelMethod as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::SpecMethod::Channel
) | // map
10 => map!(
call!(<::primitives::amqp9_0::ConnectionMethod as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::SpecMethod::Connection
) | // map
100 => map!(
call!(<::primitives::amqp9_0::DtxMethod as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::SpecMethod::Dtx
) | // map
40 => map!(
call!(<::primitives::amqp9_0::ExchangeMethod as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::SpecMethod::Exchange
) | // map
70 => map!(
call!(<::primitives::amqp9_0::FileMethod as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::SpecMethod::File
) | // map
120 => map!(
call!(<::primitives::amqp9_0::MessageMethod as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::SpecMethod::Message
) | // map
50 => map!(
call!(<::primitives::amqp9_0::QueueMethod as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::SpecMethod::Queue
) | // map
80 => map!(
call!(<::primitives::amqp9_0::StreamMethod as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::SpecMethod::Stream
) | // map
110 => map!(
call!(<::primitives::amqp9_0::TunnelMethod as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::SpecMethod::Tunnel
) | // map
90 => map!(
call!(<::primitives::amqp9_0::TxMethod as ::NomBytes>::nom_bytes, pool),
::primitives::amqp9_0::SpecMethod::Tx
) // map!
) // switch!
} // fn nom_bytes
} // impl ::NomBytes for ::primitives::amqp9_0::SpecMethod<'a>
impl<'a> ::NomBytes<'a> for ::primitives::amqp9_0::Frame<'a> {
type Output = Self;
fn nom_bytes<'pool, P>(input: &'a [u8], pool: &'pool mut P) -> IResult<&'a [u8], Self>
where P: ::pool::ParserPool
{
switch!(input, be_u8,
3 => do_parse!(
channel: be_u16 >>
payload: map!(length_bytes!(be_u32), ::primitives::amqp9_0::FramePayload::Body) >>
(::primitives::amqp9_0::Frame::new(channel, payload))
) | // do_parse
8 => do_parse!(
payload: value!(
::primitives::amqp9_0::FramePayload::Heartbeat,
tag!(b"\x00\x00\xCE")
) >>channel: value!(0) >>(::primitives::amqp9_0::Frame::new(channel, payload))
) | // do_parse
1 => do_parse!(
channel: be_u16 >>
payload: map!(
length_value!(
be_u32,
call!(<::primitives::amqp9_0::SpecMethod as ::NomBytes>::nom_bytes, pool)
),
::primitives::amqp9_0::FramePayload::Method
) >> // map
(::primitives::amqp9_0::Frame::new(channel, payload))
) | // do_parse
6 => do_parse!(
channel: be_u16 >>
payload: map!(length_bytes!(be_u32), ::primitives::amqp9_0::FramePayload::OobBody) >>
(::primitives::amqp9_0::Frame::new(channel, payload))
) | // do_parse
4 => do_parse!(
channel: be_u16 >>
payload: map!(
length_value!(
be_u32,
call!(<::primitives::amqp9_0::SpecMethod as ::NomBytes>::nom_bytes, pool)
),
::primitives::amqp9_0::FramePayload::OobMethod
) >> // map
(::primitives::amqp9_0::Frame::new(channel, payload))
) | // do_parse
7 => do_parse!(
channel: be_u16 >>
payload: map!(length_bytes!(be_u32), ::primitives::amqp9_0::FramePayload::Trace) >>
(::primitives::amqp9_0::Frame::new(channel, payload))
) // do_parse
) // switch!
} // fn nom_bytes
} // impl NomBytes for ::primitives::amqp9_0::Frame<'a><|fim▁end|>
|
type Output = Self;
|
<|file_name|>u32.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Operations and constants for unsigned 32-bits integers (`u32` type)
#![doc(primitive = "u32")]
use from_str::FromStr;
use num::{ToStrRadix, FromStrRadix};
use num::strconv;
use option::Option;
use slice::ImmutableVector;
use string::String;
<|fim▁hole|><|fim▁end|>
|
pub use core::u32::{BITS, BYTES, MIN, MAX};
uint_module!(u32)
|
<|file_name|>Plugin.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#
# Plugin.py
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307 USA
class Plugin(dict):
"""A dictionary with attribute-style access. It maps attribute access to
the real dictionary. """
def __init__(self, init = None):
if init is None:
init = dict()
dict.__init__(self, init)<|fim▁hole|> return list(self.__dict__.items())
def __setstate__(self, items):
for key, val in items:
self.__dict__[key] = val
def __repr__(self):
return "%s(%s)" % (self.__class__.__name__, dict.__repr__(self))
def __setitem__(self, key, value):
return super(Plugin, self).__setitem__(key, value)
def __getitem__(self, name):
return super(Plugin, self).__getitem__(name)
def __delitem__(self, name):
return super(Plugin, self).__delitem__(name)
__getattr__ = __getitem__
__setattr__ = __setitem__<|fim▁end|>
|
def __getstate__(self):
|
<|file_name|>forms.py<|end_file_name|><|fim▁begin|>from django import forms
from django.forms.forms import BoundField
from .helpers import LMIForAllClient
from .fields import MultiCharField
class FieldSet(object):
"""
Taken from stackoverflow.com/questions/10366745/django-form-field-grouping
Helper class to group BoundField objects together.
"""
def __init__(self, form, fields, legend='', cls=None):
self.form = form
self.legend = legend
self.fields = fields
self.cls = cls
def __iter__(self):
for name in self.fields:
field = self.form.fields[name]
yield BoundField(self.form, field, name)
class NoColonForm(forms.Form):
"""
Removes the default colons from form labels.
"""
def __init__(self, *args, **kwargs):
kwargs.setdefault('label_suffix', '')
super().__init__(*args, **kwargs)
class BaseLMIForm(NoColonForm):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.lmi_client = LMIForAllClient()
class SectorForm(NoColonForm):
SECTOR_INPUT_COUNT = 3
sector = MultiCharField(
count=SECTOR_INPUT_COUNT,
label="How would you describe the types of jobs you could do?",
help_text=" eg customer services, security, data entry, driver",
require_all_fields=False,
error_messages={'required': 'Enter at least one job role', },
)
class JobDescriptionsForm(BaseLMIForm):
def __init__(self, *args, **kwargs):
keywords = kwargs['keywords']
del kwargs['keywords']
super().__init__(*args, **kwargs)
self.fieldsets = []
self._add_fields_from_keywords(keywords)
def _add_fields_from_keywords(self, keywords):
for keyword in keywords:
if keyword:
soc_codes = []
lmi_data = self.lmi_client.keyword_search(keyword)
count = 6
for item in lmi_data[:count]:
soc_code = str(item['soc'])
if soc_code not in soc_codes:
soc_codes.append(soc_code)
field = forms.BooleanField(
widget=forms.CheckboxInput,
label=item['title'],
help_text=item['description'],
required=False,
)
self.fields[soc_code] = field
self.fieldsets.append(FieldSet(<|fim▁hole|>
def clean(self):
cleaned_data = super().clean()
if not any(cleaned_data.values()):
raise forms.ValidationError(
"Please select at least one job title",
code='invalid'
)
return cleaned_data<|fim▁end|>
|
self, list(soc_codes), keyword))
|
<|file_name|>main.cpp<|end_file_name|><|fim▁begin|>/*
* MaiKe Labs (2016 - 2026)
*
* Written by Jack Tan <[email protected]>
*
* Connect VCC of the SSD1306 OLED to 3.3V
* Connect GND to Ground
* Connect SCL to i2c clock - GPIO21
* Connect SDA to i2c data - GPIO22
* Connect DC to GND (The scanned i2c address is 0x3C)
*
*/
#include <stdio.h>
#include "freertos/FreeRTOS.h"
#include "freertos/task.h"
#include "esp_system.h"
#include "nvs_flash.h"
#include "U8glib.h"
U8GLIB_SSD1306_128X64 u8g(U8G_I2C_OPT_NONE); // I2C / TWI
void draw(void)
{
u8g.setFont(u8g_font_unifont);
u8g.drawStr(0, 22, "Hello World!");
}
void ssd1306_task(void *pvParameter)<|fim▁hole|>{
// assign default color value
if (u8g.getMode() == U8G_MODE_R3G3B2) {
u8g.setColorIndex(255); // white
} else if (u8g.getMode() == U8G_MODE_GRAY2BIT) {
u8g.setColorIndex(3); // max intensity
} else if (u8g.getMode() == U8G_MODE_BW) {
u8g.setColorIndex(1); // pixel on
} else if (u8g.getMode() == U8G_MODE_HICOLOR) {
u8g.setHiColorByRGB(255, 255, 255);
}
while(1) {
// picture loop
u8g.firstPage();
do {
draw();
} while (u8g.nextPage());
draw();
vTaskDelay(1000 / portTICK_RATE_MS);
}
}
extern "C" void app_main()
{
nvs_flash_init();
printf("Welcome to Noduino!\r\n");
printf("Start to test SSD1306 OLED!\r\n");
xTaskCreate(&ssd1306_task, "ssd1306_task", 2048, NULL, 5, NULL);
}<|fim▁end|>
| |
<|file_name|>split_queue_test.go<|end_file_name|><|fim▁begin|>// Copyright 2015 The Cockroach Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
// implied. See the License for the specific language governing
// permissions and limitations under the License. See the AUTHORS file
// for names of contributors.
//
// Author: Spencer Kimball ([email protected])
package storage
import (
"math"
"testing"
"github.com/cockroachdb/cockroach/config"
"github.com/cockroachdb/cockroach/gossip"
"github.com/cockroachdb/cockroach/proto"
"github.com/cockroachdb/cockroach/storage/engine"
"github.com/cockroachdb/cockroach/util/leaktest"
gogoproto "github.com/gogo/protobuf/proto"
)
var config1, config2 gogoproto.Message
// TestSplitQueueShouldQueue verifies shouldQueue method correctly
// combines splits in accounting and zone configs with the size of
// the range.
func TestSplitQueueShouldQueue(t *testing.T) {
defer leaktest.AfterTest(t)
tc := testContext{}
tc.Start(t)
defer tc.Stop()
// Set accounting and zone configs.
acctMap, err := config.NewPrefixConfigMap([]*config.PrefixConfig{
{proto.KeyMin, nil, config1},
{proto.Key("/dbA"), nil, config2},
})
if err != nil {
t.Fatal(err)
}
if err := tc.gossip.AddInfo(gossip.KeyConfigAccounting, acctMap, 0); err != nil {
t.Fatal(err)
}
zoneMap, err := config.NewPrefixConfigMap([]*config.PrefixConfig{
{proto.KeyMin, nil, &config.ZoneConfig{RangeMaxBytes: 64 << 20}},
{proto.Key("/dbB"), nil, &config.ZoneConfig{RangeMaxBytes: 64 << 20}},
})
if err != nil {
t.Fatal(err)
}
if err := tc.gossip.AddInfo(gossip.KeyConfigZone, zoneMap, 0); err != nil {
t.Fatal(err)
}
testCases := []struct {
start, end proto.Key
bytes int64
shouldQ bool
priority float64
}{
// No intersection, no bytes.
{proto.KeyMin, proto.Key("/"), 0, false, 0},
// Intersection in accounting, no bytes.
{proto.Key("/"), proto.Key("/dbA1"), 0, true, 1},
// Intersection in zone, no bytes.
{proto.Key("/dbA"), proto.Key("/dbC"), 0, true, 1},
// Multiple intersections, no bytes.
{proto.KeyMin, proto.KeyMax, 0, true, 1},
// No intersection, max bytes.
{proto.KeyMin, proto.Key("/"), 64 << 20, false, 0},
// No intersection, max bytes+1.
{proto.KeyMin, proto.Key("/"), 64<<20 + 1, true, 1},
// No intersection, max bytes * 2.
{proto.KeyMin, proto.Key("/"), 64 << 21, true, 2},
// Intersection, max bytes +1.
{proto.KeyMin, proto.KeyMax, 64<<20 + 1, true, 2},
}
splitQ := newSplitQueue(nil, tc.gossip)
for i, test := range testCases {
if err := tc.rng.stats.SetMVCCStats(tc.rng.rm.Engine(), engine.MVCCStats{KeyBytes: test.bytes}); err != nil {
t.Fatal(err)<|fim▁hole|> copy.EndKey = test.end
if err := tc.rng.setDesc(©); err != nil {
t.Fatal(err)
}
shouldQ, priority := splitQ.shouldQueue(proto.ZeroTimestamp, tc.rng)
if shouldQ != test.shouldQ {
t.Errorf("%d: should queue expected %t; got %t", i, test.shouldQ, shouldQ)
}
if math.Abs(priority-test.priority) > 0.00001 {
t.Errorf("%d: priority expected %f; got %f", i, test.priority, priority)
}
}
}
////
// NOTE: tests which actually verify processing of the split queue are
// in client_split_test.go, which is in a different test package in
// order to allow for distributed transactions with a proper client.<|fim▁end|>
|
}
copy := *tc.rng.Desc()
copy.StartKey = test.start
|
<|file_name|>IosHeart.js<|end_file_name|><|fim▁begin|>import React from 'react';
import IconBase from './../components/IconBase/IconBase';
export default class IosHeart extends React.Component {
render() {
if(this.props.bare) {
return <g>
<path d="M359.385,80C319.966,80,277.171,97.599,256,132.8C234.83,97.599,192.034,80,152.615,80C83.647,80,32,123.238,32,195.779
c0,31.288,12.562,71.924,40.923,105.657c28.359,33.735,45.229,51.7,100.153,88C228,425.738,256,432,256,432s28-6.262,82.924-42.564
c54.923-36.3,71.794-54.265,100.153-88C467.438,267.703,480,227.067,480,195.779C480,123.238,428.353,80,359.385,80z"></path>
</g>;
} return <IconBase>
<path d="M359.385,80C319.966,80,277.171,97.599,256,132.8C234.83,97.599,192.034,80,152.615,80C83.647,80,32,123.238,32,195.779
c0,31.288,12.562,71.924,40.923,105.657c28.359,33.735,45.229,51.7,100.153,88C228,425.738,256,432,256,432s28-6.262,82.924-42.564
c54.923-36.3,71.794-54.265,100.153-88C467.438,267.703,480,227.067,480,195.779C480,123.238,428.353,80,359.385,80z"></path>
</IconBase>;
}<|fim▁hole|><|fim▁end|>
|
};IosHeart.defaultProps = {bare: false}
|
<|file_name|>student-routing.module.ts<|end_file_name|><|fim▁begin|>import {NgModule} from '@angular/core';
import {Routes, RouterModule} from '@angular/router';
import {StudentListComponent} from './student-list/student-list.component';
import {StudentDetailComponent} from './student-detail/student-detail.component';
import {StudentComponent} from './student/student.component';
import {AuthGuardService} from '../shared/auth/auth-guard.service';
const studentRoutes: Routes = [
{
path: '', component: StudentListComponent
},
// {
// path: 'detail', component: StudentDetailComponent, children: [
// {path: ':studentId', component: StudentComponent}
// ]
// },
{
path: 'add', canActivate: [AuthGuardService], component: StudentDetailComponent
},
{
path: 'delete/:studentId', canActivate: [AuthGuardService], component: StudentComponent
},<|fim▁hole|> path: 'detail/:studentId', canActivate: [AuthGuardService], component: StudentComponent
},
{
path: 'update/:studentId', canActivate: [AuthGuardService], component: StudentDetailComponent
}
];
@NgModule({
imports: [RouterModule.forChild(studentRoutes)],
exports: [RouterModule]
})
export class StudentRoutingModule {
}<|fim▁end|>
|
{
|
<|file_name|>gofish.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
from __future__ import print_function
from builtins import input
import sys
import pmagpy.pmag as pmag
def main():
"""
NAME
gofish.py
DESCRIPTION
calculates fisher parameters from dec inc data
INPUT FORMAT
takes dec/inc as first two columns in space delimited file
SYNTAX
gofish.py [options] [< filename]
OPTIONS
-h prints help message and quits
-i for interactive filename entry
-f FILE, specify input file
-F FILE, specifies output file name
< filename for reading from standard input
OUTPUT
mean dec, mean inc, N, R, k, a95, csd
"""
if '-h' in sys.argv: # check if help is needed
print(main.__doc__)
sys.exit() # graceful quit
if '-i' in sys.argv: # ask for filename
file=input("Enter file name with dec, inc data: ")
f=open(file,'r')
data=f.readlines()
elif '-f' in sys.argv:
dat=[]
ind=sys.argv.index('-f')
file=sys.argv[ind+1]
f=open(file,'r')
data=f.readlines()
else:
data = sys.stdin.readlines() # read from standard input
ofile = ""
if '-F' in sys.argv:
ind = sys.argv.index('-F')
ofile= sys.argv[ind+1]
out = open(ofile, 'w + a')
DIs= [] # set up list for dec inc data
for line in data: # read in the data from standard input
if '\t' in line:
rec=line.split('\t') # split each line on space to get records
else:
rec=line.split() # split each line on space to get records
DIs.append((float(rec[0]),float(rec[1])))<|fim▁hole|> outstring='%7.1f %7.1f %i %10.4f %8.1f %7.1f %7.1f'%(fpars['dec'],fpars['inc'],fpars['n'],fpars['r'],fpars['k'],fpars['alpha95'], fpars['csd'])
if ofile == "":
print(outstring)
else:
out.write(outstring+'\n')
#
if __name__ == "__main__":
main()<|fim▁end|>
|
#
fpars=pmag.fisher_mean(DIs)
|
<|file_name|>command.rs<|end_file_name|><|fim▁begin|>// Copyright (C) 2018 Pietro Albini
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
use std::env;
use std::io::{BufRead, BufReader};
use std::net::SocketAddr;
use std::ops::{Deref, DerefMut};
use std::path::PathBuf;
use std::process;
use nix::sys::signal::{kill, Signal};
use nix::unistd::Pid;
use regex::Regex;
use common::config::Config;
use common::prelude::*;
lazy_static! {
static ref ADDR_RE: Regex = Regex::new(r"127\.0\.0\.1:[0-9]+").unwrap();
}
fn binaries_path() -> Result<PathBuf> {
let mut current = env::current_exe()?;
current.pop();
if current.ends_with("deps") {
current.pop();
}
Ok(current)
}
#[allow(dead_code)]
pub enum Stream {
Stdout,
Stderr,
}
pub struct Command {
child: process::Child,
stdout: BufReader<process::ChildStdout>,
stderr: BufReader<process::ChildStderr>,
}
impl Command {
pub fn new(binary: &str, args: &[&str]) -> Result<Self> {
let mut child = process::Command::new(binaries_path()?.join(binary))
.args(args)
.stdout(process::Stdio::piped())
.stderr(process::Stdio::piped())
.spawn()?;
Ok(Command {
stdout: BufReader::new(child.stdout.take().unwrap()),
stderr: BufReader::new(child.stderr.take().unwrap()),
child,
})
}
pub fn capture_line(
&mut self,
content: &str,
stream: Stream,
) -> Result<String> {
let reader = match stream {
Stream::Stdout => &mut self.stdout as &mut BufRead,
Stream::Stderr => &mut self.stderr as &mut BufRead,
};
let mut buffer = String::new();
loop {
buffer.clear();
reader.read_line(&mut buffer)?;
if buffer.contains(content) {
break;
}
}
buffer.shrink_to_fit();
Ok(buffer)
}
pub fn signal(&mut self, signal: Signal) -> Result<()> {
kill(Pid::from_raw(self.child.id() as i32), signal)?;
Ok(())
}
pub fn stop(&mut self) -> Result<()> {
self.signal(Signal::SIGTERM)?;
self.wait()
}
pub fn wait(&mut self) -> Result<()> {
self.child.wait()?;
Ok(())
}
}
pub struct FisherCommand {
inner: Command,
}
impl FisherCommand {
pub fn new(config: &Config) -> Result<Self> {
Ok(FisherCommand {
inner: Command::new("fisher", &[
config.save()?.to_str().unwrap(),
])?,
})
}
pub fn server_addr(&mut self) -> Result<SocketAddr> {
let line = self.capture_line("listening", Stream::Stdout)?;
let captures = ADDR_RE.captures(&line).unwrap();
println!("{:?}", captures);<|fim▁hole|>impl Deref for FisherCommand {
type Target = Command;
fn deref(&self) -> &Command {
&self.inner
}
}
impl DerefMut for FisherCommand {
fn deref_mut(&mut self) -> &mut Command {
&mut self.inner
}
}<|fim▁end|>
|
Ok((&captures[0]).parse()?)
}
}
|
<|file_name|>Exit.java<|end_file_name|><|fim▁begin|>package edu.isep.sixcolors.view.listener;
import edu.isep.sixcolors.model.Config;
import javax.swing.*;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
/**
* A popup window asking if the player want's to exit Six Colors
*/
public class Exit implements ActionListener {
@Override
public void actionPerformed(ActionEvent e) {
int option = JOptionPane.showConfirmDialog(
null,
Config.EXIT_MESSAGE,
Config.EXIT_TITLE,
JOptionPane.YES_NO_OPTION,
JOptionPane.QUESTION_MESSAGE
);
if (option == JOptionPane.YES_OPTION){
System.exit(0);
}
}<|fim▁hole|><|fim▁end|>
|
}
|
<|file_name|>test_ironic.py<|end_file_name|><|fim▁begin|>#
# Copyright 2014 Red Hat, Inc
#
# Author: Chris Dent <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tests for producing IPMI sample messages from notification events.
"""
import mock
from oslotest import base
from ceilometer.ipmi.notifications import ironic as ipmi
from ceilometer import sample
from ceilometer.tests.ipmi.notifications import ipmi_test_data
class TestNotifications(base.BaseTestCase):
def test_ipmi_temperature_notification(self):
"""Test IPMI Temperature sensor data.
Based on the above ipmi_testdata the expected sample for a single
temperature reading has::
* a resource_id composed from the node_uuid Sensor ID
* a name composed from 'hardware.ipmi.' and 'temperature'
* a volume from the first chunk of the Sensor Reading
* a unit from the last chunk of the Sensor Reading
* some readings are skipped if the value is 'Disabled'
* metatata with the node id
"""
processor = ipmi.TemperatureSensorNotification(None)
counters = dict([(counter.resource_id, counter) for counter in
processor.process_notification(
ipmi_test_data.SENSOR_DATA)])
self.assertEqual(10, len(counters),
'expected 10 temperature readings')
resource_id = (
'f4982fd2-2f2b-4bb5-9aff-48aac801d1ad-dimm_gh_vr_temp_(0x3b)'
)
test_counter = counters[resource_id]
self.assertEqual(26.0, test_counter.volume)
self.assertEqual('C', test_counter.unit)
self.assertEqual(sample.TYPE_GAUGE, test_counter.type)
self.assertEqual('hardware.ipmi.temperature', test_counter.name)
self.assertEqual('hardware.ipmi.metrics.update',
test_counter.resource_metadata['event_type'])
self.assertEqual('f4982fd2-2f2b-4bb5-9aff-48aac801d1ad',
test_counter.resource_metadata['node'])
def test_ipmi_current_notification(self):
"""Test IPMI Current sensor data.
A single current reading is effectively the same as temperature,
modulo "current".
"""
processor = ipmi.CurrentSensorNotification(None)
counters = dict([(counter.resource_id, counter) for counter in
processor.process_notification(
ipmi_test_data.SENSOR_DATA)])
self.assertEqual(1, len(counters), 'expected 1 current reading')
resource_id = (
'f4982fd2-2f2b-4bb5-9aff-48aac801d1ad-avg_power_(0x2e)'
)
test_counter = counters[resource_id]
self.assertEqual(130.0, test_counter.volume)
self.assertEqual('W', test_counter.unit)
self.assertEqual(sample.TYPE_GAUGE, test_counter.type)
self.assertEqual('hardware.ipmi.current', test_counter.name)
def test_ipmi_fan_notification(self):
"""Test IPMI Fan sensor data.
A single fan reading is effectively the same as temperature,
modulo "fan".
"""
processor = ipmi.FanSensorNotification(None)
counters = dict([(counter.resource_id, counter) for counter in
processor.process_notification(
ipmi_test_data.SENSOR_DATA)])
self.assertEqual(12, len(counters), 'expected 12 fan readings')
resource_id = (
'f4982fd2-2f2b-4bb5-9aff-48aac801d1ad-fan_4a_tach_(0x46)'
)
test_counter = counters[resource_id]
self.assertEqual(6900.0, test_counter.volume)
self.assertEqual('RPM', test_counter.unit)
self.assertEqual(sample.TYPE_GAUGE, test_counter.type)
self.assertEqual('hardware.ipmi.fan', test_counter.name)
def test_ipmi_voltage_notification(self):
"""Test IPMI Voltage sensor data.
A single voltage reading is effectively the same as temperature,<|fim▁hole|> """
processor = ipmi.VoltageSensorNotification(None)
counters = dict([(counter.resource_id, counter) for counter in
processor.process_notification(
ipmi_test_data.SENSOR_DATA)])
self.assertEqual(4, len(counters), 'expected 4 volate readings')
resource_id = (
'f4982fd2-2f2b-4bb5-9aff-48aac801d1ad-planar_vbat_(0x1c)'
)
test_counter = counters[resource_id]
self.assertEqual(3.137, test_counter.volume)
self.assertEqual('V', test_counter.unit)
self.assertEqual(sample.TYPE_GAUGE, test_counter.type)
self.assertEqual('hardware.ipmi.voltage', test_counter.name)
def test_disabed_skips_metric(self):
"""Test that a meter which a disabled volume is skipped."""
processor = ipmi.TemperatureSensorNotification(None)
counters = dict([(counter.resource_id, counter) for counter in
processor.process_notification(
ipmi_test_data.SENSOR_DATA)])
self.assertEqual(10, len(counters),
'expected 10 temperature readings')
resource_id = (
'f4982fd2-2f2b-4bb5-9aff-48aac801d1ad-mezz_card_temp_(0x35)'
)
self.assertNotIn(resource_id, counters)
def test_empty_payload_no_metrics_success(self):
processor = ipmi.TemperatureSensorNotification(None)
counters = dict([(counter.resource_id, counter) for counter in
processor.process_notification(
ipmi_test_data.EMPTY_PAYLOAD)])
self.assertEqual(0, len(counters), 'expected 0 readings')
@mock.patch('ceilometer.ipmi.notifications.ironic.LOG')
def test_missing_sensor_data(self, mylog):
processor = ipmi.TemperatureSensorNotification(None)
messages = []
mylog.warn = lambda *args: messages.extend(args)
list(processor.process_notification(ipmi_test_data.MISSING_SENSOR))
self.assertEqual(
'invalid sensor data for '
'f4982fd2-2f2b-4bb5-9aff-48aac801d1ad-pci_riser_1_temp_(0x33): '
"missing 'Sensor Reading' in payload",
messages[0]
)
@mock.patch('ceilometer.ipmi.notifications.ironic.LOG')
def test_sensor_data_malformed(self, mylog):
processor = ipmi.TemperatureSensorNotification(None)
messages = []
mylog.warn = lambda *args: messages.extend(args)
list(processor.process_notification(ipmi_test_data.BAD_SENSOR))
self.assertEqual(
'invalid sensor data for '
'f4982fd2-2f2b-4bb5-9aff-48aac801d1ad-pci_riser_1_temp_(0x33): '
'unable to parse sensor reading: some bad stuff',
messages[0]
)
@mock.patch('ceilometer.ipmi.notifications.ironic.LOG')
def test_missing_node_uuid(self, mylog):
"""Test for desired error message when 'node_uuid' missing.
Presumably this will never happen given the way the data
is created, but better defensive than dead.
"""
processor = ipmi.TemperatureSensorNotification(None)
messages = []
mylog.warn = lambda *args: messages.extend(args)
list(processor.process_notification(ipmi_test_data.NO_NODE_ID))
self.assertEqual(
'invalid sensor data for missing id: missing key in payload: '
"'node_uuid'",
messages[0]
)
@mock.patch('ceilometer.ipmi.notifications.ironic.LOG')
def test_missing_sensor_id(self, mylog):
"""Test for desired error message when 'Sensor ID' missing."""
processor = ipmi.TemperatureSensorNotification(None)
messages = []
mylog.warn = lambda *args: messages.extend(args)
list(processor.process_notification(ipmi_test_data.NO_SENSOR_ID))
self.assertEqual(
'invalid sensor data for missing id: missing key in payload: '
"'Sensor ID'",
messages[0]
)<|fim▁end|>
|
modulo "voltage".
|
<|file_name|>display_list_builder.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Builds display lists from flows and fragments.
//!
//! Other browser engines sometimes call this "painting", but it is more accurately called display
//! list building, as the actual painting does not happen here—only deciding *what* we're going to
//! paint.
#![deny(unsafe_code)]
use app_units::{AU_PER_PX, Au};
use azure::azure_hl::Color;
use block::{BlockFlow, BlockStackingContextType};
use canvas_traits::{CanvasData, CanvasMsg, FromLayoutMsg};
use context::SharedLayoutContext;
use euclid::{Matrix4D, Point2D, Radians, Rect, SideOffsets2D, Size2D};
use flex::FlexFlow;
use flow::{BaseFlow, Flow, IS_ABSOLUTELY_POSITIONED};
use flow_ref;
use fragment::{CoordinateSystem, Fragment, ImageFragmentInfo, ScannedTextFragmentInfo};
use fragment::SpecificFragmentInfo;
use gfx::display_list::{BLUR_INFLATION_FACTOR, BaseDisplayItem, BorderDisplayItem};
use gfx::display_list::{BorderRadii, BoxShadowClipMode, BoxShadowDisplayItem, ClippingRegion};
use gfx::display_list::{DisplayItem, DisplayItemMetadata, DisplayListSection, GradientDisplayItem};
use gfx::display_list::{GradientStop, IframeDisplayItem, ImageDisplayItem, WebGLDisplayItem};
use gfx::display_list::{LineDisplayItem, OpaqueNode};
use gfx::display_list::{SolidColorDisplayItem, StackingContext, StackingContextType};
use gfx::display_list::{TextDisplayItem, TextOrientation, WebRenderImageInfo};
use gfx_traits::{ScrollPolicy, ScrollRootId, StackingContextId, color};
use inline::{FIRST_FRAGMENT_OF_ELEMENT, InlineFlow, LAST_FRAGMENT_OF_ELEMENT};
use ipc_channel::ipc;
use list_item::ListItemFlow;
use model::{self, MaybeAuto, ToGfxMatrix};
use net_traits::image::base::PixelFormat;
use net_traits::image_cache_thread::UsePlaceholder;
use range::Range;
use script_layout_interface::restyle_damage::REPAINT;
use std::{cmp, f32};
use std::default::Default;
use std::sync::Arc;
use style::computed_values::{background_attachment, background_clip, background_origin};
use style::computed_values::{background_repeat, background_size, border_style};
use style::computed_values::{cursor, image_rendering, overflow_x, pointer_events, position};
use style::computed_values::{transform, transform_style, visibility};
use style::computed_values::_servo_overflow_clip_box as overflow_clip_box;
use style::computed_values::filter::Filter;
use style::computed_values::text_shadow::TextShadow;
use style::logical_geometry::{LogicalPoint, LogicalRect, LogicalSize, WritingMode};
use style::properties::{self, ServoComputedValues};
use style::properties::style_structs;
use style::values::RGBA;
use style::values::computed;
use style::values::computed::{Gradient, GradientKind, LengthOrNone, LengthOrPercentage, LengthOrPercentageOrAuto};
use style::values::specified::{AngleOrCorner, HorizontalDirection, VerticalDirection};
use style_traits::cursor::Cursor;
use table_cell::CollapsedBordersForCell;
use url::Url;
use util::opts;
static THREAD_TINT_COLORS: [Color; 8] = [
Color { r: 6.0 / 255.0, g: 153.0 / 255.0, b: 198.0 / 255.0, a: 0.7 },
Color { r: 255.0 / 255.0, g: 212.0 / 255.0, b: 83.0 / 255.0, a: 0.7 },
Color { r: 116.0 / 255.0, g: 29.0 / 255.0, b: 109.0 / 255.0, a: 0.7 },
Color { r: 204.0 / 255.0, g: 158.0 / 255.0, b: 199.0 / 255.0, a: 0.7 },
Color { r: 242.0 / 255.0, g: 46.0 / 255.0, b: 121.0 / 255.0, a: 0.7 },
Color { r: 116.0 / 255.0, g: 203.0 / 255.0, b: 196.0 / 255.0, a: 0.7 },
Color { r: 255.0 / 255.0, g: 249.0 / 255.0, b: 201.0 / 255.0, a: 0.7 },
Color { r: 137.0 / 255.0, g: 196.0 / 255.0, b: 78.0 / 255.0, a: 0.7 },
];
fn get_cyclic<T>(arr: &[T], index: usize) -> &T {
&arr[index % arr.len()]
}
pub struct DisplayListBuildState<'a> {
pub shared_layout_context: &'a SharedLayoutContext,
pub items: Vec<DisplayItem>,
pub stacking_context_id_stack: Vec<StackingContextId>,
pub scroll_root_id_stack: Vec<ScrollRootId>,
}
impl<'a> DisplayListBuildState<'a> {
pub fn new(shared_layout_context: &'a SharedLayoutContext,
stacking_context_id: StackingContextId)
-> DisplayListBuildState<'a> {
DisplayListBuildState {
shared_layout_context: shared_layout_context,
items: Vec::new(),
stacking_context_id_stack: vec!(stacking_context_id),
scroll_root_id_stack: vec!(ScrollRootId::root()),
}
}
fn add_display_item(&mut self, display_item: DisplayItem) {
self.items.push(display_item);
}
pub fn stacking_context_id(&self) -> StackingContextId {
self.stacking_context_id_stack.last().unwrap().clone()
}
pub fn push_stacking_context_id(&mut self, stacking_context_id: StackingContextId) {
self.stacking_context_id_stack.push(stacking_context_id);
}
pub fn pop_stacking_context_id(&mut self) {
self.stacking_context_id_stack.pop();
assert!(!self.stacking_context_id_stack.is_empty());
}
pub fn scroll_root_id(&mut self) -> ScrollRootId {
self.scroll_root_id_stack.last().unwrap().clone()
}
pub fn push_scroll_root_id(&mut self, id: ScrollRootId) {
self.scroll_root_id_stack.push(id);
}
pub fn pop_scroll_root_id(&mut self) {
self.scroll_root_id_stack.pop();
assert!(!self.scroll_root_id_stack.is_empty());
}
fn create_base_display_item(&self,
bounds: &Rect<Au>,
clip: &ClippingRegion,
node: OpaqueNode,
cursor: Option<Cursor>,
section: DisplayListSection)
-> BaseDisplayItem {
BaseDisplayItem::new(&bounds,
DisplayItemMetadata {
node: node,
pointing: cursor,
},
&clip,
section,
self.stacking_context_id())
}
}
/// The logical width of an insertion point: at the moment, a one-pixel-wide line.
const INSERTION_POINT_LOGICAL_WIDTH: Au = Au(1 * AU_PER_PX);
// TODO(gw): The transforms spec says that perspective length must
// be positive. However, there is some confusion between the spec
// and browser implementations as to handling the case of 0 for the
// perspective value. Until the spec bug is resolved, at least ensure
// that a provided perspective value of <= 0.0 doesn't cause panics
// and behaves as it does in other browsers.
// See https://lists.w3.org/Archives/Public/www-style/2016Jan/0020.html for more details.
#[inline]
fn create_perspective_matrix(d: Au) -> Matrix4D<f32> {
let d = d.to_f32_px();
if d <= 0.0 {
Matrix4D::identity()
} else {
Matrix4D::create_perspective(d)
}
}
pub trait FragmentDisplayListBuilding {
/// Adds the display items necessary to paint the background of this fragment to the display
/// list if necessary.
fn build_display_list_for_background_if_applicable(&self,
state: &mut DisplayListBuildState,
style: &ServoComputedValues,
display_list_section: DisplayListSection,
absolute_bounds: &Rect<Au>,
clip: &ClippingRegion);
/// Computes the background size for an image with the given background area according to the
/// rules in CSS-BACKGROUNDS § 3.9.
fn compute_background_image_size(&self,
style: &ServoComputedValues,
bounds: &Rect<Au>,
image: &WebRenderImageInfo, index: usize)
-> Size2D<Au>;
/// Adds the display items necessary to paint the background image of this fragment to the
/// appropriate section of the display list.
fn build_display_list_for_background_image(&self,
state: &mut DisplayListBuildState,
style: &ServoComputedValues,
display_list_section: DisplayListSection,
absolute_bounds: &Rect<Au>,
clip: &ClippingRegion,
image_url: &Url,
background_index: usize);
/// Adds the display items necessary to paint the background linear gradient of this fragment
/// to the appropriate section of the display list.
fn build_display_list_for_background_gradient(&self,
state: &mut DisplayListBuildState,
display_list_section: DisplayListSection,
absolute_bounds: &Rect<Au>,
clip: &ClippingRegion,
gradient: &Gradient,
style: &ServoComputedValues);
/// Adds the display items necessary to paint the borders of this fragment to a display list if
/// necessary.
fn build_display_list_for_borders_if_applicable(
&self,
state: &mut DisplayListBuildState,
style: &ServoComputedValues,
border_painting_mode: BorderPaintingMode,
bounds: &Rect<Au>,
display_list_section: DisplayListSection,
clip: &ClippingRegion);
/// Adds the display items necessary to paint the outline of this fragment to the display list
/// if necessary.
fn build_display_list_for_outline_if_applicable(&self,
state: &mut DisplayListBuildState,
style: &ServoComputedValues,
bounds: &Rect<Au>,
clip: &ClippingRegion);
/// Adds the display items necessary to paint the box shadow of this fragment to the display
/// list if necessary.
fn build_display_list_for_box_shadow_if_applicable(&self,
state: &mut DisplayListBuildState,
style: &ServoComputedValues,
display_list_section: DisplayListSection,
absolute_bounds: &Rect<Au>,
clip: &ClippingRegion);
/// Adds display items necessary to draw debug boxes around a scanned text fragment.
fn build_debug_borders_around_text_fragments(&self,
state: &mut DisplayListBuildState,
style: &ServoComputedValues,
stacking_relative_border_box: &Rect<Au>,
stacking_relative_content_box: &Rect<Au>,
text_fragment: &ScannedTextFragmentInfo,
clip: &ClippingRegion);
/// Adds display items necessary to draw debug boxes around this fragment.
fn build_debug_borders_around_fragment(&self,
state: &mut DisplayListBuildState,
stacking_relative_border_box: &Rect<Au>,
clip: &ClippingRegion);
/// Adds the display items for this fragment to the given display list.
///
/// Arguments:
///
/// * `state`: The display building state, including the display list currently
/// under construction and other metadata useful for constructing it.
/// * `dirty`: The dirty rectangle in the coordinate system of the owning flow.
/// * `stacking_relative_flow_origin`: Position of the origin of the owning flow with respect
/// to its nearest ancestor stacking context.
/// * `relative_containing_block_size`: The size of the containing block that
/// `position: relative` makes use of.
/// * `clip`: The region to clip the display items to.
fn build_display_list(&mut self,
state: &mut DisplayListBuildState,
stacking_relative_flow_origin: &Point2D<Au>,
relative_containing_block_size: &LogicalSize<Au>,
relative_containing_block_mode: WritingMode,
border_painting_mode: BorderPaintingMode,
display_list_section: DisplayListSection,
clip: &ClippingRegion);
/// Adjusts the clipping region for all descendants of this fragment as appropriate.
fn adjust_clipping_region_for_children(&self,
current_clip: &mut ClippingRegion,
stacking_relative_border_box: &Rect<Au>);
/// Adjusts the clipping rectangle for a fragment to take the `clip` property into account
/// per CSS 2.1 § 11.1.2.
fn adjust_clip_for_style(&self,
parent_clip: &mut ClippingRegion,
stacking_relative_border_box: &Rect<Au>);
/// Builds the display items necessary to paint the selection and/or caret for this fragment,
/// if any.
fn build_display_items_for_selection_if_necessary(&self,
state: &mut DisplayListBuildState,
stacking_relative_border_box: &Rect<Au>,
display_list_section: DisplayListSection,
clip: &ClippingRegion);
/// Creates the text display item for one text fragment. This can be called multiple times for
/// one fragment if there are text shadows.
///
/// `text_shadow` will be `Some` if this is rendering a shadow.
fn build_display_list_for_text_fragment(&self,
state: &mut DisplayListBuildState,
text_fragment: &ScannedTextFragmentInfo,
stacking_relative_content_box: &Rect<Au>,
text_shadow: Option<&TextShadow>,
clip: &ClippingRegion);
/// Creates the display item for a text decoration: underline, overline, or line-through.
fn build_display_list_for_text_decoration(&self,
state: &mut DisplayListBuildState,
color: &RGBA,
stacking_relative_box: &LogicalRect<Au>,
clip: &ClippingRegion,
blur_radius: Au);
/// A helper method that `build_display_list` calls to create per-fragment-type display items.
fn build_fragment_type_specific_display_items(&mut self,
state: &mut DisplayListBuildState,
stacking_relative_border_box: &Rect<Au>,
clip: &ClippingRegion);
/// Creates a stacking context for associated fragment.
fn create_stacking_context(&self,
id: StackingContextId,
base_flow: &BaseFlow,
scroll_policy: ScrollPolicy,
mode: StackingContextCreationMode,
scroll_root_id: Option<ScrollRootId>)
-> StackingContext;
/// Returns the 4D matrix representing this fragment's transform.
fn transform_matrix(&self, stacking_relative_border_box: &Rect<Au>) -> Matrix4D<f32>;
}
fn handle_overlapping_radii(size: &Size2D<Au>, radii: &BorderRadii<Au>) -> BorderRadii<Au> {
// No two corners' border radii may add up to more than the length of the edge
// between them. To prevent that, all radii are scaled down uniformly.
fn scale_factor(radius_a: Au, radius_b: Au, edge_length: Au) -> f32 {
let required = radius_a + radius_b;
if required <= edge_length {
1.0
} else {
edge_length.to_f32_px() / required.to_f32_px()
}
}
let top_factor = scale_factor(radii.top_left.width, radii.top_right.width, size.width);
let bottom_factor = scale_factor(radii.bottom_left.width, radii.bottom_right.width, size.width);
let left_factor = scale_factor(radii.top_left.height, radii.bottom_left.height, size.height);
let right_factor = scale_factor(radii.top_right.height, radii.bottom_right.height, size.height);
let min_factor = top_factor.min(bottom_factor).min(left_factor).min(right_factor);
if min_factor < 1.0 {
radii.scale_by(min_factor)
} else {
*radii
}
}
fn build_border_radius(abs_bounds: &Rect<Au>, border_style: &style_structs::Border) -> BorderRadii<Au> {
// TODO(cgaebel): Support border radii even in the case of multiple border widths.
// This is an extension of supporting elliptical radii. For now, all percentage
// radii will be relative to the width.
handle_overlapping_radii(&abs_bounds.size, &BorderRadii {
top_left: model::specified_border_radius(border_style.border_top_left_radius,
abs_bounds.size.width),
top_right: model::specified_border_radius(border_style.border_top_right_radius,
abs_bounds.size.width),
bottom_right: model::specified_border_radius(border_style.border_bottom_right_radius,
abs_bounds.size.width),
bottom_left: model::specified_border_radius(border_style.border_bottom_left_radius,
abs_bounds.size.width),
})
}
impl FragmentDisplayListBuilding for Fragment {
fn build_display_list_for_background_if_applicable(&self,
state: &mut DisplayListBuildState,
style: &ServoComputedValues,
display_list_section: DisplayListSection,
absolute_bounds: &Rect<Au>,
clip: &ClippingRegion) {
// Adjust the clipping region as necessary to account for `border-radius`.
let border_radii = build_border_radius(absolute_bounds, style.get_border());
let mut clip = (*clip).clone();
if !border_radii.is_square() {
clip.intersect_with_rounded_rect(absolute_bounds, &border_radii)
}
let background = style.get_background();
// FIXME: This causes a lot of background colors to be displayed when they are clearly not
// needed. We could use display list optimization to clean this up, but it still seems
// inefficient. What we really want is something like "nearest ancestor element that
// doesn't have a fragment".
let background_color = style.resolve_color(background.background_color);
// 'background-clip' determines the area within which the background is painted.
// http://dev.w3.org/csswg/css-backgrounds-3/#the-background-clip
let mut bounds = *absolute_bounds;
// This is the clip for the color (which is the last element in the bg array)
let color_clip = get_cyclic(&background.background_clip.0,
background.background_image.0.len() - 1);
match *color_clip {
background_clip::single_value::T::border_box => {}
background_clip::single_value::T::padding_box => {
let border = style.logical_border_width().to_physical(style.writing_mode);
bounds.origin.x = bounds.origin.x + border.left;
bounds.origin.y = bounds.origin.y + border.top;
bounds.size.width = bounds.size.width - border.horizontal();
bounds.size.height = bounds.size.height - border.vertical();
}
background_clip::single_value::T::content_box => {
let border_padding = self.border_padding.to_physical(style.writing_mode);
bounds.origin.x = bounds.origin.x + border_padding.left;
bounds.origin.y = bounds.origin.y + border_padding.top;
bounds.size.width = bounds.size.width - border_padding.horizontal();
bounds.size.height = bounds.size.height - border_padding.vertical();
}
}
let base = state.create_base_display_item(&bounds,
&clip,
self.node,
style.get_cursor(Cursor::Default),
display_list_section);
state.add_display_item(
DisplayItem::SolidColor(box SolidColorDisplayItem {
base: base,
color: background_color.to_gfx_color(),
}));
// The background image is painted on top of the background color.
// Implements background image, per spec:
// http://www.w3.org/TR/CSS21/colors.html#background
let background = style.get_background();
for (i, background_image) in background.background_image.0.iter().enumerate().rev() {
match background_image.0 {
None => {}
Some(computed::Image::Gradient(ref gradient)) => {
// FIXME: Radial gradients aren't implemented yet.
if let GradientKind::Linear(_) = gradient.gradient_kind {
self.build_display_list_for_background_gradient(state,
display_list_section,
&bounds,
&clip,
gradient,
style);
}
}
Some(computed::Image::Url(ref image_url, ref _extra_data)) => {
self.build_display_list_for_background_image(state,
style,
display_list_section,
&bounds,
&clip,
image_url,
i);
}
}
}
}
fn compute_background_image_size(&self,
style: &ServoComputedValues,
bounds: &Rect<Au>,
image: &WebRenderImageInfo,
index: usize)
-> Size2D<Au> {
// If `image_aspect_ratio` < `bounds_aspect_ratio`, the image is tall; otherwise, it is
// wide.
let image_aspect_ratio = (image.width as f64) / (image.height as f64);
let bounds_aspect_ratio = bounds.size.width.to_f64_px() / bounds.size.height.to_f64_px();
let intrinsic_size = Size2D::new(Au::from_px(image.width as i32),
Au::from_px(image.height as i32));
let background_size = get_cyclic(&style.get_background().background_size.0, index).clone();
match (background_size, image_aspect_ratio < bounds_aspect_ratio) {
(background_size::single_value::T::Contain, false) |
(background_size::single_value::T::Cover, true) => {
Size2D::new(bounds.size.width,
Au::from_f64_px(bounds.size.width.to_f64_px() / image_aspect_ratio))
}
(background_size::single_value::T::Contain, true) |
(background_size::single_value::T::Cover, false) => {
Size2D::new(Au::from_f64_px(bounds.size.height.to_f64_px() * image_aspect_ratio),
bounds.size.height)
}
(background_size::single_value::T::Explicit(background_size::single_value
::ExplicitSize {
width,
height: LengthOrPercentageOrAuto::Auto,
}), _) => {
let width = MaybeAuto::from_style(width, bounds.size.width)
.specified_or_default(intrinsic_size.width);
Size2D::new(width, Au::from_f64_px(width.to_f64_px() / image_aspect_ratio))
}
(background_size::single_value::T::Explicit(background_size::single_value
::ExplicitSize {
width: LengthOrPercentageOrAuto::Auto,
height
}), _) => {
let height = MaybeAuto::from_style(height, bounds.size.height)
.specified_or_default(intrinsic_size.height);
Size2D::new(Au::from_f64_px(height.to_f64_px() * image_aspect_ratio), height)
}
(background_size::single_value::T::Explicit(background_size::single_value
::ExplicitSize {
width,
height
}), _) => {
Size2D::new(MaybeAuto::from_style(width, bounds.size.width)
.specified_or_default(intrinsic_size.width),
MaybeAuto::from_style(height, bounds.size.height)
.specified_or_default(intrinsic_size.height))
}
}
}
fn build_display_list_for_background_image(&self,
state: &mut DisplayListBuildState,
style: &ServoComputedValues,
display_list_section: DisplayListSection,
absolute_bounds: &Rect<Au>,
clip: &ClippingRegion,
image_url: &Url,
index: usize) {
let background = style.get_background();
let webrender_image = state.shared_layout_context
.get_webrender_image_for_url(image_url,
UsePlaceholder::No);
if let Some(webrender_image) = webrender_image {
debug!("(building display list) building background image");
// Use `background-size` to get the size.
let mut bounds = *absolute_bounds;
let image_size = self.compute_background_image_size(style, &bounds,
&webrender_image, index);
// Clip.
//
// TODO: Check the bounds to see if a clip item is actually required.
let mut clip = clip.clone();
clip.intersect_rect(&bounds);
// Background image should be positioned on the padding box basis.
let border = style.logical_border_width().to_physical(style.writing_mode);
// Use 'background-origin' to get the origin value.
let origin = get_cyclic(&background.background_origin.0, index);
let (mut origin_x, mut origin_y) = match *origin {
background_origin::single_value::T::padding_box => {
(Au(0), Au(0))
}
background_origin::single_value::T::border_box => {
(-border.left, -border.top)
}
background_origin::single_value::T::content_box => {
let border_padding = self.border_padding.to_physical(self.style.writing_mode);
(border_padding.left - border.left, border_padding.top - border.top)
}
};
// Use `background-attachment` to get the initial virtual origin
let attachment = get_cyclic(&background.background_attachment.0, index);
let (virtual_origin_x, virtual_origin_y) = match *attachment {
background_attachment::single_value::T::scroll => {
(absolute_bounds.origin.x, absolute_bounds.origin.y)
}
background_attachment::single_value::T::fixed => {
// If the ‘background-attachment’ value for this image is ‘fixed’, then
// 'background-origin' has no effect.
origin_x = Au(0);
origin_y = Au(0);
(Au(0), Au(0))
}
};
let position = *get_cyclic(&background.background_position.0, index);
// Use `background-position` to get the offset.
let horizontal_position = model::specified(position.horizontal,
bounds.size.width - image_size.width);
let vertical_position = model::specified(position.vertical,
bounds.size.height - image_size.height);
// The anchor position for this background, based on both the background-attachment
// and background-position properties.
let anchor_origin_x = border.left + virtual_origin_x + origin_x + horizontal_position;
let anchor_origin_y = border.top + virtual_origin_y + origin_y + vertical_position;
let mut tile_spacing = Size2D::zero();
let mut stretch_size = image_size;
// Adjust origin and size based on background-repeat
match *get_cyclic(&background.background_repeat.0, index) {
background_repeat::single_value::T::no_repeat => {
bounds.origin.x = anchor_origin_x;
bounds.origin.y = anchor_origin_y;
bounds.size.width = image_size.width;
bounds.size.height = image_size.height;
}
background_repeat::single_value::T::repeat_x => {
bounds.origin.y = anchor_origin_y;
bounds.size.height = image_size.height;
ImageFragmentInfo::tile_image(&mut bounds.origin.x,
&mut bounds.size.width,
anchor_origin_x,
image_size.width);
}
background_repeat::single_value::T::repeat_y => {
bounds.origin.x = anchor_origin_x;
bounds.size.width = image_size.width;
ImageFragmentInfo::tile_image(&mut bounds.origin.y,
&mut bounds.size.height,
anchor_origin_y,
image_size.height);
}
background_repeat::single_value::T::repeat => {
ImageFragmentInfo::tile_image(&mut bounds.origin.x,
&mut bounds.size.width,
anchor_origin_x,
image_size.width);
ImageFragmentInfo::tile_image(&mut bounds.origin.y,
&mut bounds.size.height,
anchor_origin_y,
image_size.height);
}
background_repeat::single_value::T::space => {
ImageFragmentInfo::tile_image_spaced(&mut bounds.origin.x,
&mut bounds.size.width,
&mut tile_spacing.width,
anchor_origin_x,
image_size.width);
ImageFragmentInfo::tile_image_spaced(&mut bounds.origin.y,
&mut bounds.size.height,
&mut tile_spacing.height,
anchor_origin_y,
image_size.height);
}
background_repeat::single_value::T::round => {
ImageFragmentInfo::tile_image_round(&mut bounds.origin.x,
&mut bounds.size.width,
anchor_origin_x,
&mut stretch_size.width);
ImageFragmentInfo::tile_image_round(&mut bounds.origin.y,
&mut bounds.size.height,
anchor_origin_y,
&mut stretch_size.height);
}
};
// Create the image display item.
let base = state.create_base_display_item(&bounds,
&clip,
self.node,
style.get_cursor(Cursor::Default),
display_list_section);
state.add_display_item(DisplayItem::Image(box ImageDisplayItem {
base: base,
webrender_image: webrender_image,
image_data: None,
stretch_size: stretch_size,
tile_spacing: tile_spacing,
image_rendering: style.get_inheritedbox().image_rendering.clone(),
}));
}
}
fn build_display_list_for_background_gradient(&self,
state: &mut DisplayListBuildState,
display_list_section: DisplayListSection,
absolute_bounds: &Rect<Au>,
clip: &ClippingRegion,
gradient: &Gradient,
style: &ServoComputedValues) {
let mut clip = clip.clone();
clip.intersect_rect(absolute_bounds);
// FIXME: Repeating gradients aren't implemented yet.
if gradient.repeating {
return;
}
let angle = if let GradientKind::Linear(angle_or_corner) = gradient.gradient_kind {
match angle_or_corner {
AngleOrCorner::Angle(angle) => angle.radians(),
AngleOrCorner::Corner(horizontal, vertical) => {
// This the angle for one of the diagonals of the box. Our angle
// will either be this one, this one + PI, or one of the other
// two perpendicular angles.
let atan = (absolute_bounds.size.height.to_f32_px() /
absolute_bounds.size.width.to_f32_px()).atan();
match (horizontal, vertical) {
(HorizontalDirection::Right, VerticalDirection::Bottom)
=> f32::consts::PI - atan,
(HorizontalDirection::Left, VerticalDirection::Bottom)
=> f32::consts::PI + atan,
(HorizontalDirection::Right, VerticalDirection::Top)
=> atan,
(HorizontalDirection::Left, VerticalDirection::Top)
=> -atan,
}
}
}
} else {
// FIXME: Radial gradients aren't implemented yet.
return;
};
// Get correct gradient line length, based on:
// https://drafts.csswg.org/css-images-3/#linear-gradients
let dir = Point2D::new(angle.sin(), -angle.cos());
let line_length = (dir.x * absolute_bounds.size.width.to_f32_px()).abs() +
(dir.y * absolute_bounds.size.height.to_f32_px()).abs();
let inv_dir_length = 1.0 / (dir.x * dir.x + dir.y * dir.y).sqrt();
// This is the vector between the center and the ending point; i.e. half
// of the distance between the starting point and the ending point.
let delta = Point2D::new(Au::from_f32_px(dir.x * inv_dir_length * line_length / 2.0),
Au::from_f32_px(dir.y * inv_dir_length * line_length / 2.0));
// This is the length of the gradient line.
let length = Au::from_f32_px(
(delta.x.to_f32_px() * 2.0).hypot(delta.y.to_f32_px() * 2.0));
// Determine the position of each stop per CSS-IMAGES § 3.4.
//
// FIXME(#3908, pcwalton): Make sure later stops can't be behind earlier stops.
let mut stops = Vec::with_capacity(gradient.stops.len());
let mut stop_run = None;
for (i, stop) in gradient.stops.iter().enumerate() {
let offset = match stop.position {
None => {
if stop_run.is_none() {
// Initialize a new stop run.
let start_offset = if i == 0 {
0.0
} else {
// `unwrap()` here should never fail because this is the beginning of
// a stop run, which is always bounded by a length or percentage.
position_to_offset(gradient.stops[i - 1].position.unwrap(), length)
};
let (end_index, end_offset) =
match gradient.stops[i..]
.iter()
.enumerate()
.find(|&(_, ref stop)| stop.position.is_some()) {
None => (gradient.stops.len() - 1, 1.0),
Some((end_index, end_stop)) => {
// `unwrap()` here should never fail because this is the end of
// a stop run, which is always bounded by a length or
// percentage.
(end_index,
position_to_offset(end_stop.position.unwrap(), length))
}
};
stop_run = Some(StopRun {
start_offset: start_offset,
end_offset: end_offset,
start_index: i,
stop_count: end_index - i,
})
}
let stop_run = stop_run.unwrap();
let stop_run_length = stop_run.end_offset - stop_run.start_offset;
if stop_run.stop_count == 0 {
stop_run.end_offset
} else {
stop_run.start_offset +
stop_run_length * (i - stop_run.start_index) as f32 /
(stop_run.stop_count as f32)
}
}
Some(position) => {
stop_run = None;
position_to_offset(position, length)
}
};
stops.push(GradientStop {
offset: offset,
color: style.resolve_color(stop.color).to_gfx_color()
})
}
let center = Point2D::new(absolute_bounds.origin.x + absolute_bounds.size.width / 2,
absolute_bounds.origin.y + absolute_bounds.size.height / 2);
let base = state.create_base_display_item(absolute_bounds,
&clip,
self.node,
style.get_cursor(Cursor::Default),
display_list_section);
let gradient_display_item = DisplayItem::Gradient(box GradientDisplayItem {
base: base,
start_point: center - delta,
end_point: center + delta,
stops: stops,
});
state.add_display_item(gradient_display_item);
}
fn build_display_list_for_box_shadow_if_applicable(&self,
state: &mut DisplayListBuildState,
style: &ServoComputedValues,
display_list_section: DisplayListSection,
absolute_bounds: &Rect<Au>,
clip: &ClippingRegion) {
// NB: According to CSS-BACKGROUNDS, box shadows render in *reverse* order (front to back).
for box_shadow in style.get_effects().box_shadow.0.iter().rev() {
let bounds =
shadow_bounds(&absolute_bounds.translate(&Point2D::new(box_shadow.offset_x,
box_shadow.offset_y)),
box_shadow.blur_radius,
box_shadow.spread_radius);
// TODO(pcwalton): Multiple border radii; elliptical border radii.
let base = state.create_base_display_item(&bounds,
&clip,
self.node,
style.get_cursor(Cursor::Default),
display_list_section);
state.add_display_item(DisplayItem::BoxShadow(box BoxShadowDisplayItem {
base: base,
box_bounds: *absolute_bounds,
color: style.resolve_color(box_shadow.color).to_gfx_color(),
offset: Point2D::new(box_shadow.offset_x, box_shadow.offset_y),
blur_radius: box_shadow.blur_radius,
spread_radius: box_shadow.spread_radius,
border_radius: model::specified_border_radius(style.get_border()
.border_top_left_radius,
absolute_bounds.size.width).width,
clip_mode: if box_shadow.inset {
BoxShadowClipMode::Inset
} else {
BoxShadowClipMode::Outset
},
}));
}
}
fn build_display_list_for_borders_if_applicable(
&self,
state: &mut DisplayListBuildState,
style: &ServoComputedValues,
border_painting_mode: BorderPaintingMode,
bounds: &Rect<Au>,
display_list_section: DisplayListSection,
clip: &ClippingRegion) {
let mut border = style.logical_border_width();
match border_painting_mode {
BorderPaintingMode::Separate => {}
BorderPaintingMode::Collapse(collapsed_borders) => {
collapsed_borders.adjust_border_widths_for_painting(&mut border)
}
BorderPaintingMode::Hidden => return,
}
if border.is_zero() {
return
}
let border_style_struct = style.get_border();
let mut colors = SideOffsets2D::new(border_style_struct.border_top_color,
border_style_struct.border_right_color,
border_style_struct.border_bottom_color,
border_style_struct.border_left_color);
let mut border_style = SideOffsets2D::new(border_style_struct.border_top_style,
border_style_struct.border_right_style,
border_style_struct.border_bottom_style,
border_style_struct.border_left_style);
if let BorderPaintingMode::Collapse(collapsed_borders) = border_painting_mode {
collapsed_borders.adjust_border_colors_and_styles_for_painting(&mut colors,
&mut border_style,
style.writing_mode);
}
let colors = SideOffsets2D::new(style.resolve_color(colors.top),
style.resolve_color(colors.right),
style.resolve_color(colors.bottom),
style.resolve_color(colors.left));
// If this border collapses, then we draw outside the boundaries we were given.
let mut bounds = *bounds;
if let BorderPaintingMode::Collapse(collapsed_borders) = border_painting_mode {
collapsed_borders.adjust_border_bounds_for_painting(&mut bounds, style.writing_mode)
}
// Append the border to the display list.
let base = state.create_base_display_item(&bounds,
&clip,
self.node,
style.get_cursor(Cursor::Default),
display_list_section);
state.add_display_item(DisplayItem::Border(box BorderDisplayItem {
base: base,
border_widths: border.to_physical(style.writing_mode),
color: SideOffsets2D::new(colors.top.to_gfx_color(),
colors.right.to_gfx_color(),
colors.bottom.to_gfx_color(),
colors.left.to_gfx_color()),
style: border_style,
radius: build_border_radius(&bounds, border_style_struct),
}));
}
fn build_display_list_for_outline_if_applicable(&self,
state: &mut DisplayListBuildState,
style: &ServoComputedValues,
bounds: &Rect<Au>,
clip: &ClippingRegion) {
let width = style.get_outline().outline_width;
if width == Au(0) {
return
}
let outline_style = style.get_outline().outline_style;
if outline_style == border_style::T::none {
return
}
// Outlines are not accounted for in the dimensions of the border box, so adjust the
// absolute bounds.
let mut bounds = *bounds;
let offset = width + style.get_outline().outline_offset;
bounds.origin.x = bounds.origin.x - offset;
bounds.origin.y = bounds.origin.y - offset;
bounds.size.width = bounds.size.width + offset + offset;
bounds.size.height = bounds.size.height + offset + offset;
// Append the outline to the display list.
let color = style.resolve_color(style.get_outline().outline_color).to_gfx_color();
let base = state.create_base_display_item(&bounds,
&clip,
self.node,
style.get_cursor(Cursor::Default),
DisplayListSection::Outlines);
state.add_display_item(DisplayItem::Border(box BorderDisplayItem {
base: base,
border_widths: SideOffsets2D::new_all_same(width),
color: SideOffsets2D::new_all_same(color),
style: SideOffsets2D::new_all_same(outline_style),
radius: Default::default(),
}));
}
fn build_debug_borders_around_text_fragments(&self,
state: &mut DisplayListBuildState,
style: &ServoComputedValues,
stacking_relative_border_box: &Rect<Au>,
stacking_relative_content_box: &Rect<Au>,
text_fragment: &ScannedTextFragmentInfo,
clip: &ClippingRegion) {
// FIXME(pcwalton, #2795): Get the real container size.
let container_size = Size2D::zero();
// Compute the text fragment bounds and draw a border surrounding them.
let base = state.create_base_display_item(stacking_relative_border_box,
clip,
self.node,
style.get_cursor(Cursor::Default),
DisplayListSection::Content);
state.add_display_item(DisplayItem::Border(box BorderDisplayItem {
base: base,
border_widths: SideOffsets2D::new_all_same(Au::from_px(1)),
color: SideOffsets2D::new_all_same(color::rgb(0, 0, 200)),
style: SideOffsets2D::new_all_same(border_style::T::solid),
radius: Default::default(),
}));
// Draw a rectangle representing the baselines.
let mut baseline = LogicalRect::from_physical(self.style.writing_mode,
*stacking_relative_content_box,
container_size);
baseline.start.b = baseline.start.b + text_fragment.run.ascent();
baseline.size.block = Au(0);
let baseline = baseline.to_physical(self.style.writing_mode, container_size);
let base = state.create_base_display_item(&baseline,
clip,
self.node,
style.get_cursor(Cursor::Default),
DisplayListSection::Content);
state.add_display_item(DisplayItem::Line(box LineDisplayItem {
base: base,
color: color::rgb(0, 200, 0),
style: border_style::T::dashed,
}));
}
fn build_debug_borders_around_fragment(&self,
state: &mut DisplayListBuildState,
stacking_relative_border_box: &Rect<Au>,
clip: &ClippingRegion) {
// This prints a debug border around the border of this fragment.
let base = state.create_base_display_item(stacking_relative_border_box,
clip,
self.node,
self.style.get_cursor(Cursor::Default),
DisplayListSection::Content);
state.add_display_item(DisplayItem::Border(box BorderDisplayItem {
base: base,
border_widths: SideOffsets2D::new_all_same(Au::from_px(1)),
color: SideOffsets2D::new_all_same(color::rgb(0, 0, 200)),
style: SideOffsets2D::new_all_same(border_style::T::solid),
radius: Default::default(),
}));
}
fn adjust_clip_for_style(&self,
parent_clip: &mut ClippingRegion,
stacking_relative_border_box: &Rect<Au>) {
// Account for `clip` per CSS 2.1 § 11.1.2.
let style_clip_rect = match (self.style().get_box().position,
self.style().get_effects().clip.0) {
(position::T::absolute, Some(style_clip_rect)) => style_clip_rect,
_ => return,
};
// FIXME(pcwalton, #2795): Get the real container size.
let clip_origin = Point2D::new(stacking_relative_border_box.origin.x + style_clip_rect.left,
stacking_relative_border_box.origin.y + style_clip_rect.top);
let right = style_clip_rect.right.unwrap_or(stacking_relative_border_box.size.width);
let bottom = style_clip_rect.bottom.unwrap_or(stacking_relative_border_box.size.height);
let clip_size = Size2D::new(right - clip_origin.x, bottom - clip_origin.y);
parent_clip.intersect_rect(&Rect::new(clip_origin, clip_size))
}
fn build_display_items_for_selection_if_necessary(&self,
state: &mut DisplayListBuildState,
stacking_relative_border_box: &Rect<Au>,
display_list_section: DisplayListSection,
clip: &ClippingRegion) {
let scanned_text_fragment_info = match self.specific {
SpecificFragmentInfo::ScannedText(ref scanned_text_fragment_info) => {
scanned_text_fragment_info
}
_ => return,
};
// Draw a highlighted background if the text is selected.
//
// TODO: Allow non-text fragments to be selected too.
if scanned_text_fragment_info.selected() {
let style = self.selected_style();
let background_color = style.resolve_color(style.get_background().background_color);
let base = state.create_base_display_item(stacking_relative_border_box,
&clip,
self.node,
self.style.get_cursor(Cursor::Default),
display_list_section);
state.add_display_item(
DisplayItem::SolidColor(box SolidColorDisplayItem {
base: base,
color: background_color.to_gfx_color(),
}));
}
// Draw a caret at the insertion point.
let insertion_point_index = match scanned_text_fragment_info.insertion_point {
Some(insertion_point_index) => insertion_point_index,
None => return,
};
let range = Range::new(scanned_text_fragment_info.range.begin(),
insertion_point_index - scanned_text_fragment_info.range.begin());
let advance = scanned_text_fragment_info.run.advance_for_range(&range);
let insertion_point_bounds;
let cursor;
if !self.style.writing_mode.is_vertical() {
insertion_point_bounds =
Rect::new(Point2D::new(stacking_relative_border_box.origin.x + advance,
stacking_relative_border_box.origin.y),
Size2D::new(INSERTION_POINT_LOGICAL_WIDTH,
stacking_relative_border_box.size.height));
cursor = Cursor::Text;
} else {
insertion_point_bounds =
Rect::new(Point2D::new(stacking_relative_border_box.origin.x,
stacking_relative_border_box.origin.y + advance),
Size2D::new(stacking_relative_border_box.size.width,
INSERTION_POINT_LOGICAL_WIDTH));
cursor = Cursor::VerticalText;
};
let base = state.create_base_display_item(&insertion_point_bounds,
&clip,
self.node,
self.style.get_cursor(cursor),
display_list_section);
state.add_display_item(DisplayItem::SolidColor(box SolidColorDisplayItem {
base: base,
color: self.style().get_color().color.to_gfx_color(),
}));
}
fn build_display_list(&mut self,
state: &mut DisplayListBuildState,
stacking_relative_flow_origin: &Point2D<Au>,
relative_containing_block_size: &LogicalSize<Au>,
relative_containing_block_mode: WritingMode,
border_painting_mode: BorderPaintingMode,
display_list_section: DisplayListSection,
clip: &ClippingRegion) {
self.restyle_damage.remove(REPAINT);
if self.style().get_inheritedbox().visibility != visibility::T::visible {
return
}
// Compute the fragment position relative to the parent stacking context. If the fragment
// itself establishes a stacking context, then the origin of its position will be (0, 0)
// for the purposes of this computation.
let stacking_relative_border_box =
self.stacking_relative_border_box(stacking_relative_flow_origin,
relative_containing_block_size,
relative_containing_block_mode,
CoordinateSystem::Own);
debug!("Fragment::build_display_list at rel={:?}, abs={:?}, flow origin={:?}: {:?}",
self.border_box,
stacking_relative_border_box,
stacking_relative_flow_origin,
self);
// Check the clip rect. If there's nothing to render at all, don't even construct display
// list items.
let empty_rect = !clip.might_intersect_rect(&stacking_relative_border_box);
if self.is_primary_fragment() && !empty_rect {
// Add shadows, background, borders, and outlines, if applicable.
if let Some(ref inline_context) = self.inline_context {
for node in inline_context.nodes.iter().rev() {
self.build_display_list_for_background_if_applicable(
state,
&*node.style,
display_list_section,
&stacking_relative_border_box,
clip);
self.build_display_list_for_box_shadow_if_applicable(
state,
&*node.style,
display_list_section,
&stacking_relative_border_box,
clip);
let mut style = node.style.clone();
properties::modify_border_style_for_inline_sides(
&mut style,
node.flags.contains(FIRST_FRAGMENT_OF_ELEMENT),
node.flags.contains(LAST_FRAGMENT_OF_ELEMENT));
self.build_display_list_for_borders_if_applicable(
state,
&*style,
border_painting_mode,
&stacking_relative_border_box,
display_list_section,
clip);
self.build_display_list_for_outline_if_applicable(
state,
&*node.style,
&stacking_relative_border_box,
clip);
}
}
if !self.is_scanned_text_fragment() {
self.build_display_list_for_background_if_applicable(state,
&*self.style,
display_list_section,
&stacking_relative_border_box,
clip);
self.build_display_list_for_box_shadow_if_applicable(state,
&*self.style,
display_list_section,
&stacking_relative_border_box,
clip);
self.build_display_list_for_borders_if_applicable(state,
&*self.style,
border_painting_mode,
&stacking_relative_border_box,
display_list_section,
clip);
self.build_display_list_for_outline_if_applicable(state,
&*self.style,
&stacking_relative_border_box,
clip);
}
}
if self.is_primary_fragment() {
// Paint the selection point if necessary. Even an empty text fragment may have an
// insertion point, so we do this even if `empty_rect` is true.
self.build_display_items_for_selection_if_necessary(state,
&stacking_relative_border_box,
display_list_section,
clip);
}
if empty_rect {
return
}
debug!("Fragment::build_display_list: intersected. Adding display item...");
// Create special per-fragment-type display items.
self.build_fragment_type_specific_display_items(state,
&stacking_relative_border_box,
clip);
if opts::get().show_debug_fragment_borders {
self.build_debug_borders_around_fragment(state, &stacking_relative_border_box, clip)
}
}
fn build_fragment_type_specific_display_items(&mut self,
state: &mut DisplayListBuildState,
stacking_relative_border_box: &Rect<Au>,
clip: &ClippingRegion) {
// Compute the context box position relative to the parent stacking context.
let stacking_relative_content_box =
self.stacking_relative_content_box(stacking_relative_border_box);
match self.specific {
SpecificFragmentInfo::ScannedText(ref text_fragment) => {
// Create items for shadows.
//
// NB: According to CSS-BACKGROUNDS, text shadows render in *reverse* order (front
// to back).
for text_shadow in self.style.get_inheritedtext().text_shadow.0.iter().rev() {
self.build_display_list_for_text_fragment(state,
&**text_fragment,
&stacking_relative_content_box,
Some(text_shadow),
clip);
}
// Create the main text display item.
self.build_display_list_for_text_fragment(state,
&**text_fragment,
&stacking_relative_content_box,
None,
clip);
if opts::get().show_debug_fragment_borders {
self.build_debug_borders_around_text_fragments(state,
self.style(),
stacking_relative_border_box,
&stacking_relative_content_box,
&**text_fragment,
clip);
}
}
SpecificFragmentInfo::Generic |
SpecificFragmentInfo::GeneratedContent(..) |
SpecificFragmentInfo::Table |
SpecificFragmentInfo::TableCell |
SpecificFragmentInfo::TableRow |
SpecificFragmentInfo::TableWrapper |
SpecificFragmentInfo::Multicol |
SpecificFragmentInfo::MulticolColumn |
SpecificFragmentInfo::InlineBlock(_) |
SpecificFragmentInfo::InlineAbsoluteHypothetical(_) |
SpecificFragmentInfo::InlineAbsolute(_) |
SpecificFragmentInfo::Svg(_) => {
if opts::get().show_debug_fragment_borders {
self.build_debug_borders_around_fragment(state,
stacking_relative_border_box,
clip);
}
}
SpecificFragmentInfo::Iframe(ref fragment_info) => {
if !stacking_relative_content_box.is_empty() {
let base = state.create_base_display_item(
&stacking_relative_content_box,
clip,
self.node,
self.style.get_cursor(Cursor::Default),
DisplayListSection::Content);
let item = DisplayItem::Iframe(box IframeDisplayItem {
base: base,
iframe: fragment_info.pipeline_id,
});
state.add_display_item(item);
}
}
SpecificFragmentInfo::Image(ref mut image_fragment) => {
// Place the image into the display list.
if let Some(ref image) = image_fragment.image {
let base = state.create_base_display_item(
&stacking_relative_content_box,
clip,
self.node,
self.style.get_cursor(Cursor::Default),
DisplayListSection::Content);
state.add_display_item(DisplayItem::Image(box ImageDisplayItem {
base: base,
webrender_image: WebRenderImageInfo::from_image(image),
image_data: Some(Arc::new(image.bytes.clone())),
stretch_size: stacking_relative_content_box.size,
tile_spacing: Size2D::zero(),
image_rendering: self.style.get_inheritedbox().image_rendering.clone(),
}));
}
}
SpecificFragmentInfo::Canvas(ref canvas_fragment_info) => {
let width = canvas_fragment_info.replaced_image_fragment_info
.computed_inline_size.map_or(0, |w| w.to_px() as usize);
let height = canvas_fragment_info.replaced_image_fragment_info
.computed_block_size.map_or(0, |h| h.to_px() as usize);
if width > 0 && height > 0 {
let computed_width = canvas_fragment_info.canvas_inline_size().to_px();
let computed_height = canvas_fragment_info.canvas_block_size().to_px();
let canvas_data = match canvas_fragment_info.ipc_renderer {
Some(ref ipc_renderer) => {
let ipc_renderer = ipc_renderer.lock().unwrap();
let (sender, receiver) = ipc::channel().unwrap();
ipc_renderer.send(CanvasMsg::FromLayout(
FromLayoutMsg::SendData(sender))).unwrap();
receiver.recv().unwrap()
},
None => return,
};
let base = state.create_base_display_item(
&stacking_relative_content_box,
clip,
self.node,
self.style.get_cursor(Cursor::Default),
DisplayListSection::Content);
let display_item = match canvas_data {
CanvasData::Pixels(canvas_data) => {
DisplayItem::Image(box ImageDisplayItem {
base: base,
image_data: Some(Arc::new(canvas_data.image_data)),
webrender_image: WebRenderImageInfo {
width: computed_width as u32,
height: computed_height as u32,
format: PixelFormat::RGBA8,
key: Some(canvas_data.image_key),
},
stretch_size: stacking_relative_content_box.size,
tile_spacing: Size2D::zero(),
image_rendering: image_rendering::T::auto,
})
}
CanvasData::WebGL(context_id) => {
DisplayItem::WebGL(box WebGLDisplayItem {
base: base,
context_id: context_id,
})
}
};
state.add_display_item(display_item);
}
}
SpecificFragmentInfo::UnscannedText(_) => {
panic!("Shouldn't see unscanned fragments here.")
}
SpecificFragmentInfo::TableColumn(_) => {
panic!("Shouldn't see table column fragments here.")
}
}
}
fn create_stacking_context(&self,
id: StackingContextId,
base_flow: &BaseFlow,
scroll_policy: ScrollPolicy,
mode: StackingContextCreationMode,
scroll_root_id: Option<ScrollRootId>)
-> StackingContext {
let scrolls_overflow_area = mode == StackingContextCreationMode::ScrollWrapper;
let border_box =
self.stacking_relative_border_box(&base_flow.stacking_relative_position,
&base_flow.early_absolute_position_info
.relative_containing_block_size,
base_flow.early_absolute_position_info
.relative_containing_block_mode,
CoordinateSystem::Parent);
let overflow = if scrolls_overflow_area {
Rect::new(Point2D::zero(), base_flow.overflow.scroll.size)
} else {
// First, compute the offset of our border box (including relative positioning)
// from our flow origin, since that is what `BaseFlow::overflow` is relative to.
let border_box_offset =
border_box.translate(&-base_flow.stacking_relative_position).origin;
// Then, using that, compute our overflow region relative to our border box.
base_flow.overflow.paint.translate(&-border_box_offset)
};
let transform = self.transform_matrix(&border_box);
let perspective = match self.style().get_effects().perspective {
LengthOrNone::Length(d) => {
let perspective_origin = self.style().get_effects().perspective_origin;
let perspective_origin =
Point2D::new(model::specified(perspective_origin.horizontal,
border_box.size.width).to_f32_px(),
model::specified(perspective_origin.vertical,
border_box.size.height).to_f32_px());
let pre_transform = Matrix4D::create_translation(perspective_origin.x,
perspective_origin.y,
0.0);
let post_transform = Matrix4D::create_translation(-perspective_origin.x,
-perspective_origin.y,
0.0);
let perspective_matrix = create_perspective_matrix(d);
pre_transform.pre_mul(&perspective_matrix).pre_mul(&post_transform)
}
LengthOrNone::None => {
Matrix4D::identity()
}
};
// Create the filter pipeline.
let effects = self.style().get_effects();
let mut filters = effects.filter.clone();
if effects.opacity != 1.0 {
filters.push(Filter::Opacity(effects.opacity))
}
let transform_style = self.style().get_used_transform_style();
let establishes_3d_context = scrolls_overflow_area ||
transform_style == transform_style::T::flat;
let context_type = match mode {
StackingContextCreationMode::PseudoFloat => StackingContextType::PseudoFloat,
StackingContextCreationMode::PseudoPositioned => StackingContextType::PseudoPositioned,
_ => StackingContextType::Real,
};
StackingContext::new(id,
context_type,
&border_box,
&overflow,
self.effective_z_index(),
filters,
self.style().get_effects().mix_blend_mode,
transform,
perspective,
establishes_3d_context,
scroll_policy,
scroll_root_id)
}
fn adjust_clipping_region_for_children(&self,
current_clip: &mut ClippingRegion,
stacking_relative_border_box: &Rect<Au>) {
// Don't clip if we're text.
if self.is_scanned_text_fragment() {
return
}
let overflow_x = self.style.get_box().overflow_x;
let overflow_y = self.style.get_box().overflow_y.0;
if overflow_x == overflow_x::T::visible && overflow_y == overflow_x::T::visible {
return
}
let overflow_clip_rect_owner;
let overflow_clip_rect = match self.style.get_box()._servo_overflow_clip_box {
overflow_clip_box::T::padding_box => {
// FIXME(SimonSapin): should be the padding box, not border box.
stacking_relative_border_box
}
overflow_clip_box::T::content_box => {
overflow_clip_rect_owner =
self.stacking_relative_content_box(stacking_relative_border_box);
&overflow_clip_rect_owner
}
};
// Clip according to the values of `overflow-x` and `overflow-y`.
//
// FIXME(pcwalton): This may be more complex than it needs to be, since it seems to be
// impossible with the computed value rules as they are to have `overflow-x: visible`
// with `overflow-y: <scrolling>` or vice versa!
if let overflow_x::T::hidden = self.style.get_box().overflow_x {
let mut bounds = current_clip.bounding_rect();
let max_x = cmp::min(bounds.max_x(), overflow_clip_rect.max_x());
bounds.origin.x = cmp::max(bounds.origin.x, overflow_clip_rect.origin.x);
bounds.size.width = max_x - bounds.origin.x;
current_clip.intersect_rect(&bounds)
}
if let overflow_x::T::hidden = self.style.get_box().overflow_y.0 {
let mut bounds = current_clip.bounding_rect();
let max_y = cmp::min(bounds.max_y(), overflow_clip_rect.max_y());
bounds.origin.y = cmp::max(bounds.origin.y, overflow_clip_rect.origin.y);
bounds.size.height = max_y - bounds.origin.y;
current_clip.intersect_rect(&bounds)
}
let border_radii = build_border_radius(stacking_relative_border_box,
self.style.get_border());
if !border_radii.is_square() {
current_clip.intersect_with_rounded_rect(stacking_relative_border_box,
&border_radii)
}
}
fn build_display_list_for_text_fragment(&self,
state: &mut DisplayListBuildState,
text_fragment: &ScannedTextFragmentInfo,
stacking_relative_content_box: &Rect<Au>,
text_shadow: Option<&TextShadow>,
clip: &ClippingRegion) {
// TODO(emilio): Allow changing more properties by ::selection
let text_color = if let Some(shadow) = text_shadow {
// If we're painting a shadow, paint the text the same color as the shadow.
self.style().resolve_color(shadow.color)
} else if text_fragment.selected() {
// Otherwise, paint the text with the color as described in its styling.
self.selected_style().get_color().color
} else {
self.style().get_color().color
};
let offset = text_shadow.map(|s| Point2D::new(s.offset_x, s.offset_y)).unwrap_or_else(Point2D::zero);
let shadow_blur_radius = text_shadow.map(|s| s.blur_radius).unwrap_or(Au(0));
// Determine the orientation and cursor to use.
let (orientation, cursor) = if self.style.writing_mode.is_vertical() {
if self.style.writing_mode.is_sideways_left() {
(TextOrientation::SidewaysLeft, Cursor::VerticalText)
} else {
(TextOrientation::SidewaysRight, Cursor::VerticalText)
}
} else {
(TextOrientation::Upright, Cursor::Text)
};
// Compute location of the baseline.
//
// FIXME(pcwalton): Get the real container size.
let container_size = Size2D::zero();
let metrics = &text_fragment.run.font_metrics;
let stacking_relative_content_box = stacking_relative_content_box.translate(&offset);
let baseline_origin = stacking_relative_content_box.origin +
LogicalPoint::new(self.style.writing_mode,
Au(0),
metrics.ascent).to_physical(self.style.writing_mode,
container_size);
// Create the text display item.
let base = state.create_base_display_item(&stacking_relative_content_box,
clip,
self.node,
self.style().get_cursor(cursor),
DisplayListSection::Content);
state.add_display_item(DisplayItem::Text(box TextDisplayItem {
base: base,
text_run: text_fragment.run.clone(),
range: text_fragment.range,
text_color: text_color.to_gfx_color(),
orientation: orientation,
baseline_origin: baseline_origin,
blur_radius: shadow_blur_radius,
}));
// Create display items for text decorations.
let mut text_decorations = self.style()
.get_inheritedtext()
._servo_text_decorations_in_effect;
// Note that the text decoration colors are always the same as the text color.
text_decorations.underline = text_decorations.underline.map(|_| text_color);
text_decorations.overline = text_decorations.overline.map(|_| text_color);
text_decorations.line_through = text_decorations.line_through.map(|_| text_color);
let stacking_relative_content_box =
LogicalRect::from_physical(self.style.writing_mode,
stacking_relative_content_box,
container_size);
if let Some(ref underline_color) = text_decorations.underline {
let mut stacking_relative_box = stacking_relative_content_box;
stacking_relative_box.start.b = stacking_relative_content_box.start.b +
metrics.ascent - metrics.underline_offset;
stacking_relative_box.size.block = metrics.underline_size;
self.build_display_list_for_text_decoration(state,
underline_color,
&stacking_relative_box,
clip,
shadow_blur_radius);
}
if let Some(ref overline_color) = text_decorations.overline {
let mut stacking_relative_box = stacking_relative_content_box;
stacking_relative_box.size.block = metrics.underline_size;
self.build_display_list_for_text_decoration(state,
overline_color,
&stacking_relative_box,
clip,
shadow_blur_radius);
}
if let Some(ref line_through_color) = text_decorations.line_through {
let mut stacking_relative_box = stacking_relative_content_box;
stacking_relative_box.start.b = stacking_relative_box.start.b + metrics.ascent -
metrics.strikeout_offset;
stacking_relative_box.size.block = metrics.strikeout_size;
self.build_display_list_for_text_decoration(state,
line_through_color,
&stacking_relative_box,
clip,
shadow_blur_radius);
}
}
fn build_display_list_for_text_decoration(&self,
state: &mut DisplayListBuildState,
color: &RGBA,
stacking_relative_box: &LogicalRect<Au>,
clip: &ClippingRegion,
blur_radius: Au) {
// Perhaps surprisingly, text decorations are box shadows. This is because they may need
// to have blur in the case of `text-shadow`, and this doesn't hurt performance because box
// shadows are optimized into essentially solid colors if there is no need for the blur.
//
// FIXME(pcwalton, #2795): Get the real container size.
let container_size = Size2D::zero();
let stacking_relative_box = stacking_relative_box.to_physical(self.style.writing_mode,
container_size);
let base = state.create_base_display_item(
&shadow_bounds(&stacking_relative_box, blur_radius, Au(0)),
clip,
self.node,
self.style.get_cursor(Cursor::Default),
DisplayListSection::Content);
state.add_display_item(DisplayItem::BoxShadow(box BoxShadowDisplayItem {
base: base,
box_bounds: stacking_relative_box,
color: color.to_gfx_color(),
offset: Point2D::zero(),
blur_radius: blur_radius,
spread_radius: Au(0),
border_radius: Au(0),
clip_mode: BoxShadowClipMode::None,
}));
}
fn transform_matrix(&self, stacking_relative_border_box: &Rect<Au>) -> Matrix4D<f32> {
let mut transform = Matrix4D::identity();
let operations = match self.style.get_effects().transform.0 {
None => return transform,
Some(ref operations) => operations,
};
let transform_origin = &self.style.get_effects().transform_origin;
let transform_origin_x = model::specified(transform_origin.horizontal,
stacking_relative_border_box.size
.width).to_f32_px();
let transform_origin_y = model::specified(transform_origin.vertical,
stacking_relative_border_box.size
.height).to_f32_px();
let transform_origin_z = transform_origin.depth.to_f32_px();
let pre_transform = Matrix4D::create_translation(transform_origin_x,
transform_origin_y,
transform_origin_z);
let post_transform = Matrix4D::create_translation(-transform_origin_x,
-transform_origin_y,
-transform_origin_z);
for operation in operations {
let matrix = match *operation {
transform::ComputedOperation::Rotate(ax, ay, az, theta) => {
let theta = 2.0f32 * f32::consts::PI - theta.radians();
Matrix4D::create_rotation(ax, ay, az, Radians::new(theta))
}
transform::ComputedOperation::Perspective(d) => {<|fim▁hole|> Matrix4D::create_scale(sx, sy, sz)
}
transform::ComputedOperation::Translate(tx, ty, tz) => {
let tx =
model::specified(tx, stacking_relative_border_box.size.width).to_f32_px();
let ty =
model::specified(ty, stacking_relative_border_box.size.height).to_f32_px();
let tz = tz.to_f32_px();
Matrix4D::create_translation(tx, ty, tz)
}
transform::ComputedOperation::Matrix(m) => {
m.to_gfx_matrix()
}
transform::ComputedOperation::Skew(theta_x, theta_y) => {
Matrix4D::create_skew(Radians::new(theta_x.radians()),
Radians::new(theta_y.radians()))
}
};
transform = transform.pre_mul(&matrix);
}
pre_transform.pre_mul(&transform).pre_mul(&post_transform)
}
}
pub trait BlockFlowDisplayListBuilding {
fn collect_stacking_contexts_for_block(&mut self,
parent: &mut StackingContext,
parent_scroll_root_id: ScrollRootId);
fn build_display_list_for_block(&mut self,
state: &mut DisplayListBuildState,
border_painting_mode: BorderPaintingMode);
/// Changes this block's clipping region from its parent's coordinate system to its own
/// coordinate system if necessary (i.e. if this block is a stacking context).
///
/// The clipping region is initially in each block's parent's coordinate system because the
/// parent of each block does not have enough information to determine what the child's
/// coordinate system is on its own. Specifically, if the child is absolutely positioned, the
/// parent does not know where the child's absolute position is at the time it assigns clipping
/// regions, because flows compute their own absolute positions.
fn switch_coordinate_system_if_necessary(&mut self);
}
impl BlockFlowDisplayListBuilding for BlockFlow {
fn collect_stacking_contexts_for_block(&mut self,
parent: &mut StackingContext,
parent_scroll_root_id: ScrollRootId) {
let block_stacking_context_type = self.block_stacking_context_type();
if block_stacking_context_type == BlockStackingContextType::NonstackingContext {
self.base.stacking_context_id = parent.id;
self.base.collect_stacking_contexts_for_children(parent, parent_scroll_root_id);
return;
}
let stacking_context_id = StackingContextId::new_of_type(self.fragment.node.id() as usize,
self.fragment.fragment_type());
let has_scrolling_overflow = self.has_scrolling_overflow();
let scroll_root_id = if has_scrolling_overflow {
ScrollRootId::new_of_type(self.fragment.node.id() as usize,
self.fragment.fragment_type())
} else {
parent_scroll_root_id
};
self.base.scroll_root_id = scroll_root_id;
self.base.stacking_context_id = stacking_context_id;
if block_stacking_context_type == BlockStackingContextType::PseudoStackingContext {
let creation_mode = if self.base.flags.contains(IS_ABSOLUTELY_POSITIONED) ||
self.fragment.style.get_box().position != position::T::static_ {
StackingContextCreationMode::PseudoPositioned
} else {
assert!(self.base.flags.is_float());
StackingContextCreationMode::PseudoFloat
};
let mut new_context = self.fragment.create_stacking_context(stacking_context_id,
&self.base,
ScrollPolicy::Scrollable,
creation_mode,
None);
self.base.collect_stacking_contexts_for_children(&mut new_context, scroll_root_id);
let new_children: Vec<StackingContext> = new_context.children.drain(..).collect();
let mut non_floating_children = Vec::new();
for child in new_children {
if child.context_type == StackingContextType::PseudoFloat {
new_context.children.push(child);
} else {
non_floating_children.push(child);
}
}
parent.add_child(new_context);
parent.children.append(&mut non_floating_children);
return;
}
let scroll_policy = if self.is_fixed() {
ScrollPolicy::FixedPosition
} else {
ScrollPolicy::Scrollable
};
let (creation_mode, internal_id) = if has_scrolling_overflow {
(StackingContextCreationMode::ScrollWrapper, Some(self.base.scroll_root_id))
} else {
(StackingContextCreationMode::Normal, None)
};
let mut stacking_context = self.fragment.create_stacking_context(
stacking_context_id,
&self.base,
scroll_policy,
creation_mode,
internal_id);
self.base.collect_stacking_contexts_for_children(&mut stacking_context, scroll_root_id);
parent.add_child(stacking_context);
}
fn build_display_list_for_block(&mut self,
state: &mut DisplayListBuildState,
border_painting_mode: BorderPaintingMode) {
let establishes_stacking_context = self.fragment.establishes_stacking_context();
let background_border_section = if self.base.flags.is_float() {
DisplayListSection::BackgroundAndBorders
} else if self.base.flags.contains(IS_ABSOLUTELY_POSITIONED) {
if establishes_stacking_context {
DisplayListSection::BackgroundAndBorders
} else {
DisplayListSection::BlockBackgroundsAndBorders
}
} else {
DisplayListSection::BlockBackgroundsAndBorders
};
// Add the box that starts the block context.
self.fragment
.build_display_list(state,
&self.base.stacking_relative_position,
&self.base
.early_absolute_position_info
.relative_containing_block_size,
self.base
.early_absolute_position_info
.relative_containing_block_mode,
border_painting_mode,
background_border_section,
&self.base.clip);
self.base.build_display_items_for_debugging_tint(state, self.fragment.node);
}
fn switch_coordinate_system_if_necessary(&mut self) {
// Avoid overflows!
if self.base.clip.is_max() {
return
}
if !self.fragment.establishes_stacking_context() {
return
}
let stacking_relative_border_box =
self.fragment.stacking_relative_border_box(&self.base.stacking_relative_position,
&self.base
.early_absolute_position_info
.relative_containing_block_size,
self.base
.early_absolute_position_info
.relative_containing_block_mode,
CoordinateSystem::Parent);
self.base.clip = self.base.clip.translate(&-stacking_relative_border_box.origin);
// Account for `transform`, if applicable.
if self.fragment.style.get_effects().transform.0.is_none() {
return
}
let transform = match self.fragment
.transform_matrix(&stacking_relative_border_box)
.inverse() {
Some(transform) => transform,
None => {
// Singular matrix. Ignore it.
return
}
};
// FIXME(pcwalton): This is inaccurate: not all transforms are 2D, and not all clips are
// axis-aligned.
let bounding_rect = self.base.clip.bounding_rect();
let bounding_rect = Rect::new(Point2D::new(bounding_rect.origin.x.to_f32_px(),
bounding_rect.origin.y.to_f32_px()),
Size2D::new(bounding_rect.size.width.to_f32_px(),
bounding_rect.size.height.to_f32_px()));
let clip_rect = transform.to_2d().transform_rect(&bounding_rect);
let clip_rect = Rect::new(Point2D::new(Au::from_f32_px(clip_rect.origin.x),
Au::from_f32_px(clip_rect.origin.y)),
Size2D::new(Au::from_f32_px(clip_rect.size.width),
Au::from_f32_px(clip_rect.size.height)));
self.base.clip = ClippingRegion::from_rect(&clip_rect)
}
}
pub trait InlineFlowDisplayListBuilding {
fn collect_stacking_contexts_for_inline(&mut self,
parent: &mut StackingContext,
parent_scroll_root_id: ScrollRootId);
fn build_display_list_for_inline_fragment_at_index(&mut self,
state: &mut DisplayListBuildState,
index: usize);
fn build_display_list_for_inline(&mut self, state: &mut DisplayListBuildState);
}
impl InlineFlowDisplayListBuilding for InlineFlow {
fn collect_stacking_contexts_for_inline(&mut self,
parent: &mut StackingContext,
parent_scroll_root_id: ScrollRootId) {
self.base.stacking_context_id = parent.id;
self.base.scroll_root_id = parent_scroll_root_id;
for mut fragment in self.fragments.fragments.iter_mut() {
match fragment.specific {
SpecificFragmentInfo::InlineBlock(ref mut block_flow) => {
let block_flow = flow_ref::deref_mut(&mut block_flow.flow_ref);
block_flow.collect_stacking_contexts(parent, parent_scroll_root_id);
}
SpecificFragmentInfo::InlineAbsoluteHypothetical(ref mut block_flow) => {
let block_flow = flow_ref::deref_mut(&mut block_flow.flow_ref);
block_flow.collect_stacking_contexts(parent, parent_scroll_root_id);
}
_ if fragment.establishes_stacking_context() => {
fragment.stacking_context_id =
StackingContextId::new_of_type(fragment.fragment_id(),
fragment.fragment_type());
parent.add_child(fragment.create_stacking_context(
fragment.stacking_context_id,
&self.base,
ScrollPolicy::Scrollable,
StackingContextCreationMode::Normal,
None));
}
_ => fragment.stacking_context_id = parent.id,
}
}
}
fn build_display_list_for_inline_fragment_at_index(&mut self,
state: &mut DisplayListBuildState,
index: usize) {
let fragment = self.fragments.fragments.get_mut(index).unwrap();
fragment.build_display_list(state,
&self.base.stacking_relative_position,
&self.base
.early_absolute_position_info
.relative_containing_block_size,
self.base
.early_absolute_position_info
.relative_containing_block_mode,
BorderPaintingMode::Separate,
DisplayListSection::Content,
&self.base.clip);
}
fn build_display_list_for_inline(&mut self, state: &mut DisplayListBuildState) {
// TODO(#228): Once we form lines and have their cached bounds, we can be smarter and
// not recurse on a line if nothing in it can intersect the dirty region.
debug!("Flow: building display list for {} inline fragments", self.fragments.len());
// We iterate using an index here, because we want to avoid doing a doing
// a double-borrow of self (one mutable for the method call and one immutable
// for the self.fragments.fragment iterator itself).
for index in 0..self.fragments.fragments.len() {
let (establishes_stacking_context, stacking_context_id) = {
let fragment = self.fragments.fragments.get(index).unwrap();
(self.base.stacking_context_id != fragment.stacking_context_id,
fragment.stacking_context_id)
};
if establishes_stacking_context {
state.push_stacking_context_id(stacking_context_id);
}
self.build_display_list_for_inline_fragment_at_index(state, index);
if establishes_stacking_context {
state.pop_stacking_context_id();
}
}
if !self.fragments.fragments.is_empty() {
self.base.build_display_items_for_debugging_tint(state,
self.fragments.fragments[0].node);
}
}
}
pub trait ListItemFlowDisplayListBuilding {
fn build_display_list_for_list_item(&mut self, state: &mut DisplayListBuildState);
}
impl ListItemFlowDisplayListBuilding for ListItemFlow {
fn build_display_list_for_list_item(&mut self, state: &mut DisplayListBuildState) {
// Draw the marker, if applicable.
for marker in &mut self.marker_fragments {
marker.build_display_list(state,
&self.block_flow.base.stacking_relative_position,
&self.block_flow
.base
.early_absolute_position_info
.relative_containing_block_size,
self.block_flow
.base
.early_absolute_position_info
.relative_containing_block_mode,
BorderPaintingMode::Separate,
DisplayListSection::Content,
&self.block_flow.base.clip);
}
// Draw the rest of the block.
self.block_flow.build_display_list_for_block(state, BorderPaintingMode::Separate)
}
}
pub trait FlexFlowDisplayListBuilding {
fn build_display_list_for_flex(&mut self, state: &mut DisplayListBuildState);
}
impl FlexFlowDisplayListBuilding for FlexFlow {
fn build_display_list_for_flex(&mut self, state: &mut DisplayListBuildState) {
// Draw the rest of the block.
self.as_mut_block().build_display_list_for_block(state, BorderPaintingMode::Separate)
}
}
trait BaseFlowDisplayListBuilding {
fn build_display_items_for_debugging_tint(&self,
state: &mut DisplayListBuildState,
node: OpaqueNode);
}
impl BaseFlowDisplayListBuilding for BaseFlow {
fn build_display_items_for_debugging_tint(&self,
state: &mut DisplayListBuildState,
node: OpaqueNode) {
if !opts::get().show_debug_parallel_layout {
return
}
let thread_id = self.thread_id;
let stacking_context_relative_bounds =
Rect::new(self.stacking_relative_position,
self.position.size.to_physical(self.writing_mode));
let mut color = THREAD_TINT_COLORS[thread_id as usize % THREAD_TINT_COLORS.len()];
color.a = 1.0;
let base = state.create_base_display_item(
&stacking_context_relative_bounds.inflate(Au::from_px(2), Au::from_px(2)),
&self.clip,
node,
None,
DisplayListSection::Content);
state.add_display_item(DisplayItem::Border(box BorderDisplayItem {
base: base,
border_widths: SideOffsets2D::new_all_same(Au::from_px(2)),
color: SideOffsets2D::new_all_same(color),
style: SideOffsets2D::new_all_same(border_style::T::solid),
radius: BorderRadii::all_same(Au(0)),
}));
}
}
trait ServoComputedValuesCursorUtility {
fn get_cursor(&self, default_cursor: Cursor) -> Option<Cursor>;
}
impl ServoComputedValuesCursorUtility for ServoComputedValues {
/// Gets the cursor to use given the specific ServoComputedValues. `default_cursor` specifies
/// the cursor to use if `cursor` is `auto`. Typically, this will be `PointerCursor`, but for
/// text display items it may be `TextCursor` or `VerticalTextCursor`.
#[inline]
fn get_cursor(&self, default_cursor: Cursor) -> Option<Cursor> {
match (self.get_pointing().pointer_events, self.get_pointing().cursor) {
(pointer_events::T::none, _) => None,
(pointer_events::T::auto, cursor::T::AutoCursor) => Some(default_cursor),
(pointer_events::T::auto, cursor::T::SpecifiedCursor(cursor)) => Some(cursor),
}
}
}
// A helper data structure for gradients.
#[derive(Copy, Clone)]
struct StopRun {
start_offset: f32,
end_offset: f32,
start_index: usize,
stop_count: usize,
}
fn position_to_offset(position: LengthOrPercentage, Au(total_length): Au) -> f32 {
match position {
LengthOrPercentage::Length(Au(length)) => {
(1.0f32).min(length as f32 / total_length as f32)
}
LengthOrPercentage::Percentage(percentage) => percentage as f32,
LengthOrPercentage::Calc(calc) =>
(1.0f32).min(calc.percentage() + (calc.length().0 as f32) / (total_length as f32)),
}
}
/// Adjusts `content_rect` as necessary for the given spread, and blur so that the resulting
/// bounding rect contains all of a shadow's ink.
fn shadow_bounds(content_rect: &Rect<Au>, blur_radius: Au, spread_radius: Au) -> Rect<Au> {
let inflation = spread_radius + blur_radius * BLUR_INFLATION_FACTOR;
content_rect.inflate(inflation, inflation)
}
/// Allows a CSS color to be converted into a graphics color.
pub trait ToGfxColor {
/// Converts a CSS color to a graphics color.
fn to_gfx_color(&self) -> Color;
}
impl ToGfxColor for RGBA {
fn to_gfx_color(&self) -> Color {
color::rgba(self.red, self.green, self.blue, self.alpha)
}
}
/// Describes how to paint the borders.
#[derive(Copy, Clone)]
pub enum BorderPaintingMode<'a> {
/// Paint borders separately (`border-collapse: separate`).
Separate,
/// Paint collapsed borders.
Collapse(&'a CollapsedBordersForCell),
/// Paint no borders.
Hidden,
}
#[derive(Copy, Clone, PartialEq)]
pub enum StackingContextCreationMode {
Normal,
ScrollWrapper,
PseudoPositioned,
PseudoFloat,
}<|fim▁end|>
|
create_perspective_matrix(d)
}
transform::ComputedOperation::Scale(sx, sy, sz) => {
|
<|file_name|>brightnessfilter.rs<|end_file_name|><|fim▁begin|>#pragma version(1)
#pragma rs java_package_name(com.dss.renderscripttest)
float brightnessValue;<|fim▁hole|>rs_script gScript;
static int mImageWidth;
const uchar4 *gPixels;
void root(const uchar4 *v_in, uchar4 *v_out, const void *usrData, uint32_t x, uint32_t y) {
float4 apixel = rsUnpackColor8888(*v_in);
float3 pixel = apixel.rgb;
float factor = brightnessValue;
pixel = pixel + factor;
pixel = clamp(pixel,0.0f,1.0f);
*v_out = rsPackColorTo8888(pixel.rgb);
}
void filter() {
mImageWidth = rsAllocationGetDimX(gIn);
rsDebug("Image size is ", rsAllocationGetDimX(gIn), rsAllocationGetDimY(gOut));
rsForEach(gScript, gIn, gOut, 0, 0);
}<|fim▁end|>
|
rs_allocation gIn;
rs_allocation gOut;
|
<|file_name|>issue-4333.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::io;<|fim▁hole|>}<|fim▁end|>
|
fn main() {
let stdout = &io::stdout() as &io::WriterUtil;
stdout.write_line("Hello!");
|
<|file_name|>test_create_server.py<|end_file_name|><|fim▁begin|># Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import netaddr
import testtools
from tempest.api.compute import base
from tempest.common.utils import data_utils
from tempest.common.utils.linux import remote_client
from tempest.common import waiters
from tempest import config
from tempest import test
CONF = config.CONF
class ServersTestJSON(base.BaseV2ComputeTest):
disk_config = 'AUTO'
@classmethod
def setup_credentials(cls):
cls.prepare_instance_network()
super(ServersTestJSON, cls).setup_credentials()
@classmethod
def setup_clients(cls):
super(ServersTestJSON, cls).setup_clients()
cls.client = cls.servers_client
cls.networks_client = cls.os.networks_client
cls.subnets_client = cls.os.subnets_client
@classmethod
def resource_setup(cls):
cls.set_validation_resources()
super(ServersTestJSON, cls).resource_setup()
cls.meta = {'hello': 'world'}
cls.accessIPv4 = '1.1.1.1'
cls.accessIPv6 = '0000:0000:0000:0000:0000:babe:220.12.22.2'
cls.name = data_utils.rand_name(cls.__name__ + '-server')
cls.password = data_utils.rand_password()
disk_config = cls.disk_config
cls.server_initial = cls.create_test_server(
validatable=True,
wait_until='ACTIVE',
name=cls.name,
metadata=cls.meta,
accessIPv4=cls.accessIPv4,
accessIPv6=cls.accessIPv6,
disk_config=disk_config,
adminPass=cls.password)
cls.server = (cls.client.show_server(cls.server_initial['id'])
['server'])
def _create_net_subnet_ret_net_from_cidr(self, cidr):
name_net = data_utils.rand_name(self.__class__.__name__)
net = self.networks_client.create_network(name=name_net)
self.addCleanup(self.networks_client.delete_network,
net['network']['id'])
subnet = self.subnets_client.create_subnet(
network_id=net['network']['id'],
cidr=cidr,
ip_version=4)
self.addCleanup(self.subnets_client.delete_subnet,
subnet['subnet']['id'])
return net
@test.attr(type='smoke')
@test.idempotent_id('5de47127-9977-400a-936f-abcfbec1218f')
def test_verify_server_details(self):
# Verify the specified server attributes are set correctly
self.assertEqual(self.accessIPv4, self.server['accessIPv4'])
# NOTE(maurosr): See http://tools.ietf.org/html/rfc5952 (section 4)
# Here we compare directly with the canonicalized format.
self.assertEqual(self.server['accessIPv6'],
str(netaddr.IPAddress(self.accessIPv6)))
self.assertEqual(self.name, self.server['name'])
self.assertEqual(self.image_ref, self.server['image']['id'])
self.assertEqual(self.flavor_ref, self.server['flavor']['id'])
self.assertEqual(self.meta, self.server['metadata'])
@test.attr(type='smoke')
@test.idempotent_id('9a438d88-10c6-4bcd-8b5b-5b6e25e1346f')
def test_list_servers(self):
# The created server should be in the list of all servers
body = self.client.list_servers()
servers = body['servers']
found = any([i for i in servers if i['id'] == self.server['id']])
self.assertTrue(found)
@test.idempotent_id('585e934c-448e-43c4-acbf-d06a9b899997')
def test_list_servers_with_detail(self):
# The created server should be in the detailed list of all servers
body = self.client.list_servers(detail=True)
servers = body['servers']
found = any([i for i in servers if i['id'] == self.server['id']])
self.assertTrue(found)
@test.idempotent_id('cbc0f52f-05aa-492b-bdc1-84b575ca294b')
@testtools.skipUnless(CONF.validation.run_validation,
'Instance validation tests are disabled.')
def test_verify_created_server_vcpus(self):
# Verify that the number of vcpus reported by the instance matches
# the amount stated by the flavor
flavor = self.flavors_client.show_flavor(self.flavor_ref)['flavor']
linux_client = remote_client.RemoteClient(
self.get_server_ip(self.server),
self.ssh_user,
self.password,
self.validation_resources['keypair']['private_key'],
server=self.server,
servers_client=self.client)
self.assertEqual(flavor['vcpus'], linux_client.get_number_of_vcpus())
@test.idempotent_id('ac1ad47f-984b-4441-9274-c9079b7a0666')
@testtools.skipUnless(CONF.validation.run_validation,
'Instance validation tests are disabled.')
def test_host_name_is_same_as_server_name(self):
# Verify the instance host name is the same as the server name
linux_client = remote_client.RemoteClient(
self.get_server_ip(self.server),
self.ssh_user,
self.password,
self.validation_resources['keypair']['private_key'],
server=self.server,
servers_client=self.client)
hostname = linux_client.get_hostname()
msg = ('Failed while verifying servername equals hostname. Expected '
'hostname "%s" but got "%s".' % (self.name, hostname))
self.assertEqual(self.name.lower(), hostname, msg)
@test.idempotent_id('ed20d3fb-9d1f-4329-b160-543fbd5d9811')
@testtools.skipUnless(
test.is_scheduler_filter_enabled("ServerGroupAffinityFilter"),
'ServerGroupAffinityFilter is not available.')
def test_create_server_with_scheduler_hint_group(self):
# Create a server with the scheduler hint "group".
group_id = self.create_test_server_group()['id']
hints = {'group': group_id}
server = self.create_test_server(scheduler_hints=hints,
wait_until='ACTIVE')
# Check a server is in the group
server_group = (self.server_groups_client.show_server_group(group_id)
['server_group'])
self.assertIn(server['id'], server_group['members'])
@test.idempotent_id('0578d144-ed74-43f8-8e57-ab10dbf9b3c2')
@testtools.skipUnless(CONF.service_available.neutron,
'Neutron service must be available.')
def test_verify_multiple_nics_order(self):
# Verify that the networks order given at the server creation is
# preserved within the server.
net1 = self._create_net_subnet_ret_net_from_cidr('19.80.0.0/24')
net2 = self._create_net_subnet_ret_net_from_cidr('19.86.0.0/24')
networks = [{'uuid': net1['network']['id']},
{'uuid': net2['network']['id']}]
server_multi_nics = self.create_test_server(
networks=networks, wait_until='ACTIVE')
# Cleanup server; this is needed in the test case because with the LIFO
# nature of the cleanups, if we don't delete the server first, the port
# will still be part of the subnet and we'll get a 409 from Neutron
# when trying to delete the subnet. The tear down in the base class
# will try to delete the server and get a 404 but it's ignored so
# we're OK.
def cleanup_server():
self.client.delete_server(server_multi_nics['id'])
waiters.wait_for_server_termination(self.client,
server_multi_nics['id'])
self.addCleanup(cleanup_server)
addresses = (self.client.list_addresses(server_multi_nics['id'])
['addresses'])
# We can't predict the ip addresses assigned to the server on networks.
# Sometimes the assigned addresses are ['19.80.0.2', '19.86.0.2'], at
# other times ['19.80.0.3', '19.86.0.3']. So we check if the first
# address is in first network, similarly second address is in second
# network.
addr = [addresses[net1['network']['name']][0]['addr'],
addresses[net2['network']['name']][0]['addr']]
networks = [netaddr.IPNetwork('19.80.0.0/24'),
netaddr.IPNetwork('19.86.0.0/24')]
for address, network in zip(addr, networks):
self.assertIn(address, network)
@test.idempotent_id('1678d144-ed74-43f8-8e57-ab10dbf9b3c2')
@testtools.skipUnless(CONF.service_available.neutron,
'Neutron service must be available.')
def test_verify_duplicate_network_nics(self):
# Verify that server creation does not fail when more than one nic
# is created on the same network.
net1 = self._create_net_subnet_ret_net_from_cidr('19.80.0.0/24')
net2 = self._create_net_subnet_ret_net_from_cidr('19.86.0.0/24')
networks = [{'uuid': net1['network']['id']},<|fim▁hole|> networks=networks, wait_until='ACTIVE')
def cleanup_server():
self.client.delete_server(server_multi_nics['id'])
waiters.wait_for_server_termination(self.client,
server_multi_nics['id'])
self.addCleanup(cleanup_server)
addresses = (self.client.list_addresses(server_multi_nics['id'])
['addresses'])
addr = [addresses[net1['network']['name']][0]['addr'],
addresses[net2['network']['name']][0]['addr'],
addresses[net1['network']['name']][1]['addr']]
networks = [netaddr.IPNetwork('19.80.0.0/24'),
netaddr.IPNetwork('19.86.0.0/24'),
netaddr.IPNetwork('19.80.0.0/24')]
for address, network in zip(addr, networks):
self.assertIn(address, network)
class ServersWithSpecificFlavorTestJSON(base.BaseV2ComputeAdminTest):
disk_config = 'AUTO'
@classmethod
def setup_credentials(cls):
cls.prepare_instance_network()
super(ServersWithSpecificFlavorTestJSON, cls).setup_credentials()
@classmethod
def setup_clients(cls):
super(ServersWithSpecificFlavorTestJSON, cls).setup_clients()
cls.flavor_client = cls.os_adm.flavors_client
cls.client = cls.servers_client
@classmethod
def resource_setup(cls):
cls.set_validation_resources()
super(ServersWithSpecificFlavorTestJSON, cls).resource_setup()
@test.idempotent_id('b3c7bcfc-bb5b-4e22-b517-c7f686b802ca')
@testtools.skipUnless(CONF.validation.run_validation,
'Instance validation tests are disabled.')
def test_verify_created_server_ephemeral_disk(self):
# Verify that the ephemeral disk is created when creating server
flavor_base = self.flavors_client.show_flavor(
self.flavor_ref)['flavor']
def create_flavor_with_ephemeral(ephem_disk):
flavor_with_eph_disk_id = data_utils.rand_int_id(start=1000)
ram = flavor_base['ram']
vcpus = flavor_base['vcpus']
disk = flavor_base['disk']
if ephem_disk > 0:
# Create a flavor with ephemeral disk
flavor_name = data_utils.rand_name('eph_flavor')
flavor = self.flavor_client.create_flavor(
name=flavor_name, ram=ram, vcpus=vcpus, disk=disk,
id=flavor_with_eph_disk_id, ephemeral=ephem_disk)['flavor']
else:
# Create a flavor without ephemeral disk
flavor_name = data_utils.rand_name('no_eph_flavor')
flavor = self.flavor_client.create_flavor(
name=flavor_name, ram=ram, vcpus=vcpus, disk=disk,
id=flavor_with_eph_disk_id)['flavor']
self.addCleanup(flavor_clean_up, flavor['id'])
return flavor['id']
def flavor_clean_up(flavor_id):
self.flavor_client.delete_flavor(flavor_id)
self.flavor_client.wait_for_resource_deletion(flavor_id)
flavor_with_eph_disk_id = create_flavor_with_ephemeral(ephem_disk=1)
flavor_no_eph_disk_id = create_flavor_with_ephemeral(ephem_disk=0)
admin_pass = self.image_ssh_password
server_no_eph_disk = self.create_test_server(
validatable=True,
wait_until='ACTIVE',
adminPass=admin_pass,
flavor=flavor_no_eph_disk_id)
# Get partition number of server without ephemeral disk.
server_no_eph_disk = self.client.show_server(
server_no_eph_disk['id'])['server']
linux_client = remote_client.RemoteClient(
self.get_server_ip(server_no_eph_disk),
self.ssh_user,
admin_pass,
self.validation_resources['keypair']['private_key'],
server=server_no_eph_disk,
servers_client=self.client)
partition_num = len(linux_client.get_partitions().split('\n'))
# Explicit server deletion necessary for Juno compatibility
self.client.delete_server(server_no_eph_disk['id'])
server_with_eph_disk = self.create_test_server(
validatable=True,
wait_until='ACTIVE',
adminPass=admin_pass,
flavor=flavor_with_eph_disk_id)
server_with_eph_disk = self.client.show_server(
server_with_eph_disk['id'])['server']
linux_client = remote_client.RemoteClient(
self.get_server_ip(server_with_eph_disk),
self.ssh_user,
admin_pass,
self.validation_resources['keypair']['private_key'],
server=server_with_eph_disk,
servers_client=self.client)
partition_num_emph = len(linux_client.get_partitions().split('\n'))
self.assertEqual(partition_num + 1, partition_num_emph)
class ServersTestManualDisk(ServersTestJSON):
disk_config = 'MANUAL'
@classmethod
def skip_checks(cls):
super(ServersTestManualDisk, cls).skip_checks()
if not CONF.compute_feature_enabled.disk_config:
msg = "DiskConfig extension not enabled."
raise cls.skipException(msg)<|fim▁end|>
|
{'uuid': net2['network']['id']},
{'uuid': net1['network']['id']}]
server_multi_nics = self.create_test_server(
|
<|file_name|>main.js<|end_file_name|><|fim▁begin|>/*
*Resize the graph container
*/
function resizegraph(){
var windowHeight = $( window ).innerHeight();
$("#stacked").css('min-height',(windowHeight * 0.35) );
}
//From facebook
window.fbAsyncInit = function(){
FB.init({
appId: facebook_api_key, status: true, cookie: true, xfbml: true });
};
(function(d, debug){var js, id = 'facebook-jssdk', ref = d.getElementsByTagName('script')[0];
if(d.getElementById(id)) {return;}
js = d.createElement('script'); js.id = id;
js.async = true;js.src = "//connect.facebook.net/fr_CA/all" + (debug ? "/debug" : "") + ".js";
ref.parentNode.insertBefore(js, ref);}(document, /*debug*/ false));
function postToFeed(title, desc, url, image){
desc = desc + url;
var obj = {method: 'feed',link: url, picture: image,name: title,description: desc};
function callback(response){}
FB.ui(obj, callback);
}
/*
* Sharing functions
* @param {string} url - An url
* @param {string} desc - A small description
* @param {int} winWidth - Width of the popup
* @param {int} winHeight - height of the popup
*/
function twittershare(url, descr, winWidth, winHeight) {
var winTop = (screen.height / 2) - (winHeight / 2);
var winLeft = (screen.width / 2) - (winWidth / 2);
descr = encodeURIComponent(descr);
url = encodeURIComponent(url);
window.open('https://twitter.com/share?url=' + url + '&text='+ descr , 'Partage', 'top=' + winTop + ',left=' + winLeft + ',toolbar=0,status=0,width='+winWidth+',height='+winHeight);
}
function linkedinshare(url, title, descr, winWidth, winHeight) {
var winTop = (screen.height / 2) - (winHeight / 2);
var winLeft = (screen.width / 2) - (winWidth / 2);
title = encodeURIComponent(title);
descr = encodeURIComponent(descr + url);
window.open('http://www.linkedin.com/shareArticle?mini=true&url=' + url + '&title='+ title + '&summary='+ descr , 'Partage', 'top=' + winTop + ',left=' + winLeft + ',toolbar=0,status=0,width='+winWidth+',height='+winHeight);
}
/*
* Sharing functions
* @param {string} url - An url
* Set sharing functions to contracts by ID
*/
function setSocialMedia(contractId){
var base_url = window.location.protocol + '//' + window.location.hostname + location.pathname;
var url = base_url + '?q=' + contractId + '&type='+$('[name=type]:checked').val();
var image_url = base_url + 'img/rosette.jpg';
var info = $("#contract_"+contractId);
var titlet = "Vue sur les contrats. Consultez les contrats et subventions octroyés par la Ville de Montréal.";
var titlefb = "Vue sur les contrats de la ville de Montréal";
var descriptionfb = "Vue sur les contrats est un outil de visualisation qui permet de consulter les contrats et les subventions octroyés par la Ville de façon simple et conviviale. ";
var titleli = "Vue sur les contrats de la Ville de Montréal";
var descriptionli = "Vue sur les contrats est un outil de visualisation qui permet de consulter les contrats et les subventions octroyés par la Ville de façon simple et conviviale. ";
var formattedBody = encodeURIComponent("Vue sur les contrats est un outil de visualisation qui permet de consulter les contrats et les subventions octroyés par la Ville de façon simple et conviviale. \n \n "+ url);
var box = $("#modalsocialmedia");
box.find(".sharetwitter").attr("href",'javascript:twittershare("'+url+'", "'+titlet+'", 520, 350);');
box.find(".sharefacebook").attr("href",'javascript:postToFeed("'+titlefb+'", "'+descriptionfb+'","'+url+'", "'+image_url+'");');
box.find(".sharelinkedin").attr("href",'javascript:linkedinshare("'+url+'", "'+titleli+'", "'+descriptionli+'", 520, 350);');
box.find(".shareemail").attr('href','mailto:?body='+formattedBody+'&subject=Vue sur les contrats de la Ville de Montréal');
box.find(".sharelink").val(url);
box.modal('show');
}
/*
* @param {string} result - object from the API
* @return {{ boolean }}
* Define if data was received
*/
function results_accepted(results) {
if (results && results.meta) {
if (results.meta.count) {
$(".graph-container").css('visibility','visible');
$(".mainContent").show();
$(".filters").show();
$(".noResults").hide();
return true;
}else{
$('html, body').animate({ scrollTop: 0 }, 300);
$(".mainContent").hide();
$(".graph-container").css('visibility','hidden');
$(".filters").hide();
$(".noResults").show();
return false;
}
}else{
$('html, body').animate({ scrollTop: 0 }, 300);
$(".mainContent").hide();
$(".graph-container").css('visibility','hidden');
$(".filters").hide();
$(".noResults").show();
return false;
}
}
/*
* @param {string} text
* @param {int} length
* @return {{ text }}
* add an ellipsis if needed
*/
function TextAbstract(text, length) {
if (text == null) {
return "";
}
if (text.length <= length) {
return text;
}
text = text.substring(0, length);
return text + "...";
}
/*
*bootstrap-selectpicker init
*/
$.fn.selectpicker.defaults = {
mobile : false,
selectAllText: "Tout sélectionner",
deselectAllText: "Désélectionner",
noneSelectedText: "Aucune sélection",
countSelectedText: function (numSelected, numTotal) {
if (numTotal == numSelected) {
return "Tous";
}else{
return (numSelected == 1) ? "{0} sélection" : "{0} sélections";
}
},
}
/*
* Clear the form
*/
function clearForm()
{
$(':input').not(':button, :submit, :reset, :checkbox, :radio, [name="limit"]').val('');
$(':checkbox, :radio').prop('checked', false);
$("#offset").val('0');
}
function renameLabels(){
if ($( ".switchgraph span" ).hasClass('value')) {
$( ".switchgraph span" ).html('Voir nombre de '+defTypeName()+'s par mois');
$(".graphTitle").html('Montant total des '+defTypeName()+'s par mois');
}else if($( ".switchgraph span" ).hasClass('count')) {
$( ".switchgraph span" ).html('Voir montant des '+defTypeName()+'s par mois');
$(".graphTitle").html('Nombre total de '+defTypeName()+'s par mois');
}
}
/*
* Add a buyer ID to the request
*/
function bybuyer(buyerid, reset) {
init = true;
$('.searchboxLabel').html("Octroyé par");
$("input#offset").val('0');
$("input#supplier").val('');
$("input#buyer").val(buyerid);
$('.searchboxhidden').tagsinput('removeAll');
$('.searchboxhidden').tagsinput('add', buyerid);
$(".bootstrap-tagsinput").find('.tag').addClass('buyer');
}
/*
* Add a supplier ID to the request
*/
function bysupplier(supplierid, reset) {
init = true;
$('.searchboxLabel').html("Fournisseur");
$("input#offset").val('0');
$("input#buyer").val('');
$("input#supplier").val(supplierid);
$('.searchboxhidden').tagsinput('removeAll');
$('.searchboxhidden').tagsinput('add', supplierid);
$(".bootstrap-tagsinput").find('.tag').addClass('supplier');
}
var previousPoint = null, previousLabel = null;
var monthNames = ["Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"];
/*
* Tooltip
*/
$.fn.UseTooltip = function () {
if($(window).width() > 768){
$(this).bind("plothover", function (event, pos, item) {
if (item) {
if ((previousLabel != item.series.label) || (previousPoint != item.dataIndex)) {
previousPoint = item.dataIndex;
previousLabel = item.series.label;
$("#tooltip").remove();
var color = item.series.color;
showTooltip(item.pageX, item.pageY, item.series.color, getTemplateForToolip(item));
}
} else {
$("#tooltip").remove();
previousPoint = null;
}
});
}
};
function showTooltip(x, y, color, contents) {
var tooltip = $('<div id="tooltip">' + contents + '</div>').css({
'border-color': color
});
tooltip.find('#tooltip-arrow').css('border-top-color', color);
tooltip.appendTo("body").fadeIn(200);
tooltip.css({
top: y - (tooltip[0].offsetHeight + 10),
left: x - (tooltip[0].offsetWidth - 18)
});
}
function getTemplateForToolip(item){
if(dataSet == 'amount'){
var amount = formatedData.value[item.seriesIndex].data[item.dataIndex][1];
var count = formatedData.value_count[item.seriesIndex].data[item.dataIndex][1];
}else{
var count = formatedData.count[item.seriesIndex].data[item.dataIndex][1];
var amount = formatedData.count_value[item.seriesIndex].data[item.dataIndex][1];
}
var x = item.datapoint[0];
labelContrat = (count == 1) ? " "+defTypeName() : " "+defTypeName()+'s';
var text = "";
text += "<div style='position: relative;'>";
text += " <div style='' class='domaineName'><strong>{month}</strong></div>";
text += " <h5 style='margin: 0;'>{label}</h5 style='margin: 0;'>";
text += " <div><strong>{money} - {count} "+labelContrat+"</strong></div>";
text += " <div id='tooltip-arrow'></div>";
text += "</div>";
var amountFormatted = amount.formatMoney('0', ',', ' ')+ ' $';
return text
.replace('{month}', new Date(x).toLongFrenchFormatMonth())
.replace('{label}', item.series.label)
.replace('{money}', amountFormatted)
.replace('{count}', count);
}
/*
* from http://stackoverflow.com/a/14994860
* Transform Y labels - money
*/
function valueFormatter(num) {
if (num >= 1000000000) {
return (num / 1000000000).toFixed(1).replace(/\.0$/, '') + ' G $';
}
if (num >= 1000000) {
return (num / 1000000).toFixed(1).replace(/\.0$/, '') + ' M $';
}
if (num >= 1000) {
return (num / 1000).toFixed(1).replace(/\.0$/, '') + ' K $';
}
return num + ' $';
}
/*
* from http://stackoverflow.com/a/14994860
* Transform Y labels
*/
function countFormatter(num) {
if (num >= 1000000000) {
return (num / 1000000000).toFixed(1).replace(/\.0$/, '') + ' G';
}
if (num >= 1000000) {
return (num / 1000000).toFixed(1).replace(/\.0$/, '') + ' M';
}
if (num >= 1000) {
return (num / 1000).toFixed(1).replace(/\.0$/, '') + ' K';
}
if (num === +num && num !== (num|0)) {
return num.toFixed(1);
}
return num;
}
/*
* Init plot
*/
function plotWithOptions(formatedData,options) {
if (formatedData.length > 0) {
window.plot = $.plot("#stacked", formatedData, options);
$("#stacked").UseTooltip();
}
}
/*
* Refresh the plot with a new width
*/
function resizedw(){
resizegraph();
plotWithOptions(wg(formatedData), options);
}
/*
* define which graph is currently displayed
*/
function wgcheck() {
if ($(".switchgraph span").hasClass('value')) {
return 'value';
}else{
return 'count';
}
}
/*
* define which graph is currently displayed
* return the good dataset
*/
function wg(data) {
if (data) {
if ($(".switchgraph span").hasClass('value')) {
return data.value;
}else{
return data.count;
}
}return {};
}
/*
* Plot configuration
*/
var options = {
series: {
grow: {
active: true,
duration: 400,
reanimate: false,
valueIndex: 1
},
stack: true,
lines: {
show: false,
fill: true,
steps: false
},
bars: {
align: "left",
lineWidth: 0,
fill: 0.7,
show: true,
barWidth: 800 * 60 * 60 * 24 * 30
}
},
yaxis:{
min: 0,
tickFormatter: valueFormatter
},
xaxis: {
mode: "time",
timeformat: "%Y",
timezone: "browser",
minTickSize: [1, "year"]
},
grid: {
hoverable: true,
mouseActiveRadius:1,
},
legend: {
container: "#legend",
labelFormatter: function (label, series) {
$('<div class="labelG col-lg-4 col-md-6 col-sm-12"><i class="fa fa-circle-o colorG" style="color:'+series.color+'"></i><span class="innerTextG">'+label+'</span></div>').appendTo("#legend");
return false;
}
//sorted: "ascending",
}
};
/*
* Launch a query with the selected page
* linked with simplepagination
*/
function page(num) {
$('html, body').animate({ scrollTop: $(".filters").offset().top - 150 }, 600);
var limit = $("#limit").val();
var offset = (num - 1)*limit;
$("#offset").val(offset);
$("#offset").trigger('change');
}
/*
* Launch a query with the selected page
* Linked with simplepagination
*/
function calculHeight() {
if ($(window).width() > 1200) {
return {
padding:150,
height:150
}
}else if($(window).width() <= 1200 && $(window).width() > 992){
return {
padding:240,
height:240
}
}else if($(window).width() <= 992) {
return {
padding:60,
height:60
}
}else {
return {
padding:60,
height:60
}
}
}
/*
* Return the french translation of the current type
*/
function defTypeName(){
if ($("input[type='radio'][name=type]:checked").val() == 'contract') {
return 'contrat';
}else{
return 'subvention';
}
}
/*
*Global variables
*/
var dataSet = 'amount';
var formatedData;
var ovc;
var init = true;
var navmod;
resizegraph();
$(function() {
$(".noResults").hide();
$(".searchbar").sticky({
responsiveWidth: true,
topSpacing: 0,
})
/*
//Uncomment if you want to have the "filters bar" sticky
$(".filters").sticky({
responsiveWidth: true,
topSpacing: 0,
})
.on('sticky-start', function() {
$(".toolbars.filters").css('padding-top', calculHeight().padding);
$(".toolbars.filters").parent().css('height',calculHeight().height);
})
.on('sticky-end', function() {
$(".toolbars.filters").css('padding-top', '60px');
$(".toolbars.filters").parent().css('height','auto');
});
$(".toolbars.filters").css('padding-top', '60px');
$(".toolbars.filters").parent().css('height','auto');
*/
// Init API client
ovc = new OvcMtlApi();
if (ovc_api_url) {
ovc.base_url = ovc_api_url;
}
ovc.init();
// Init pagination
var pages = $(ovc.paginationSelector).pagination({
displayedPages : 3,
edges: 1,
prevText: '<',
nextText: '>',
hrefTextPrefix : "javascript:page('",
hrefTextSuffix : "')",
});
// the default request
if(ovc.historyState()){
var apiData = ovc.byMonthActivity();
if (results_accepted(apiData.stats)) {
formatedData = ovc.flotChartsFormat(apiData.stats);
var links = ovc.export();
for (var l in links) {
if(links[l].enabled){
$(".export."+l).attr('href',links[l].link_to);
$(".export."+l).css('cursor','pointer');
$(".export."+l).attr('title','Exportation limitée à '+links[l].limit +' fiches');
}else{
$(".export."+l).css('cursor','not-allowed');
$(".export."+l).attr('title','Exportation limitée à '+links[l].limit +' fiches');
}
}
pages.pagination('updateItemsOnPage', ovc.itemsOnPage);
pages.pagination('updateItems', ovc.items);
pages.pagination('selectPage', ovc.currentPageByOffset());
renameLabels();
}
}
//Needed - Growraf is not enough fast to calculate on each screen resize, Y axis bug
var doit;
$( window ).resize(function() {
if (window.plot) {
window.plot.shutdown();
clearTimeout(doit);
doit = setTimeout(resizedw, 1000);
}
});
$('#procuring_entity').selectpicker('render');
$('#activity').selectpicker('render');
if( /Android|webOS|iPhone|iPad|iPod|BlackBerry/i.test(navigator.userAgent) ) {
$('#procuring_entity').selectpicker('mobile');
$('#activity').selectpicker('mobile');
}
/*
* On each request
*/
if (ovc.detectIE() > 9 || !ovc.detectIE()) {
$(window).bind('popstate', function(event){
init = false
navmod = 'popstate';
if(ovc.refresh()){
var apiData = ovc.byMonthActivity();
if (results_accepted(apiData.stats)) {
formatedData = ovc.flotChartsFormat(apiData.stats);
var links = ovc.export();
for (var l in links) {
if(links[l].enabled){
$(".export."+l).attr('href',links[l].link_to);
$(".export."+l).css('cursor','pointer');
$(".export."+l).attr('title','Exportation limitée à '+links[l].limit +' fiches');
}else{
$(".export."+l).css('cursor','not-allowed');
$(".export."+l).attr('title','Exportation limitée à '+links[l].limit +' fiches');
}
}
pages.pagination('updateItemsOnPage', ovc.itemsOnPage);
pages.pagination('updateItems', ovc.items);
pages.pagination('selectPage', ovc.currentPageByOffset());
plotWithOptions(wg(formatedData), options);
}
var qd = ovc.paramsExtract();
$('.searchboxhidden').tagsinput('removeAll');
if (qd.supplier) {
$('.searchboxLabel').html("Fournisseur");
$(".bootstrap-tagsinput").find('.searchbox').hide();
$(".bootstrap-tagsinput").find('.searchbutton').hide();
$('.searchboxhidden').tagsinput('removeAll');
$('.searchboxhidden').tagsinput('add', ovc.decode(qd).supplier);
$(".bootstrap-tagsinput").find('.tag').addClass('supplier');
}else if(qd.buyer) {
$('.searchboxLabel').html("Octroyé par");
$(".bootstrap-tagsinput").find('.searchbox').hide();
$(".bootstrap-tagsinput").find('.searchbutton').hide();
$('.searchboxhidden').tagsinput('add', ovc.decode(qd).buyer);
$(".bootstrap-tagsinput").find('.tag').addClass('buyer');
}else if(qd.q){
$('.searchboxLabel').html("Mots clés");
$(".bootstrap-tagsinput").find('.searchbox').hide();
$(".bootstrap-tagsinput").find('.searchbutton').hide();
$('.searchboxhidden').tagsinput('add',ovc.decode(qd).q);
}else{
$('.searchboxLabel').html("Mots clés");
$("#theMainTag").css('display','none');
$(".bootstrap-tagsinput").find('.searchbox').show();
$(".bootstrap-tagsinput").find('.searchbutton').show();
}
$('#procuring_entity').selectpicker('refresh');
$('#activity').selectpicker('refresh');
renameLabels();
}
});
}else{
$(window).bind('hashchange', function() {
init = false
navmod = 'hashchange';
if(ovc.refresh()){
var apiData = ovc.byMonthActivity();
if (results_accepted(apiData.stats)) {
formatedData = ovc.flotChartsFormat(apiData.stats);
var links = ovc.export();
for (var l in links) {
if(links[l].enabled){
$(".export."+l).attr('href',links[l].link_to);
$(".export."+l).css('cursor','pointer');
$(".export."+l).attr('title','Exportation limitée à '+links[l].limit +' fiches');
}else{
$(".export."+l).css('cursor','not-allowed');
$(".export."+l).attr('title','Exportation limitée à '+links[l].limit +' fiches');
}
}
pages.pagination('updateItemsOnPage', ovc.itemsOnPage);
pages.pagination('updateItems', ovc.items);
pages.pagination('selectPage', ovc.currentPageByOffset());
plotWithOptions(wg(formatedData), options);
}
var qd = ovc.paramsExtract();
$('.searchboxhidden').tagsinput('removeAll');
if (qd.supplier) {
$('.searchboxLabel').html("Fournisseur");
$(".bootstrap-tagsinput").find('.searchbox').hide();
$(".bootstrap-tagsinput").find('.searchbutton').hide();
$('.searchboxhidden').tagsinput('removeAll');
$('.searchboxhidden').tagsinput('add', ovc.decode(qd).supplier);
$(".bootstrap-tagsinput").find('.tag').addClass('supplier');
}else if(qd.buyer) {
$('.searchboxLabel').html("Octroyé par");
$(".bootstrap-tagsinput").find('.searchbox').hide();
$(".bootstrap-tagsinput").find('.searchbutton').hide();
$('.searchboxhidden').tagsinput('add', ovc.decode(qd).buyer);
$(".bootstrap-tagsinput").find('.tag').addClass('buyer');
}else if(qd.q){
$('.searchboxLabel').html("Mots clés");
$(".bootstrap-tagsinput").find('.searchbox').hide();
$(".bootstrap-tagsinput").find('.searchbutton').hide();
$('.searchboxhidden').tagsinput('add',ovc.decode(qd).q);
}else{
$('.searchboxLabel').html("Mots clés");
$("#theMainTag").css('display','none');
$(".bootstrap-tagsinput").find('.searchbox').show();
$(".bootstrap-tagsinput").find('.searchbutton').show();
}
$('#procuring_entity').selectpicker('refresh');
$('#activity').selectpicker('refresh');
renameLabels();
}
});
}
$("input, select, textarea").not('[name=q]').not("[type=hidden]").not('.loading').change(function(event){
//offset to zero, it is a new search
if (!$(event.currentTarget).hasClass('loading')) {
$(ovc.currOffsetFieldSelector).val(0);
//remove buyer and supplier input values
if(ovc.historyState()){
var apiData = ovc.byMonthActivity();
if (results_accepted(apiData.stats)) {
formatedData = ovc.flotChartsFormat(apiData.stats);
var links = ovc.export();
for (var l in links) {
if(links[l].enabled){
$(".export."+l).attr('href',links[l].link_to);
$(".export."+l).css('cursor','pointer');
$(".export."+l).attr('title','Exportation limitée à '+links[l].limit +' fiches');
}else{
$(".export."+l).css('cursor','not-allowed');
$(".export."+l).attr('title','Exportation limitée à '+links[l].limit +' fiches');
}
}
}
}
pages.pagination('updateItems', ovc.items);
pages.pagination('selectPage', ovc.currentPageByOffset());
plotWithOptions(wg(formatedData), options);
$('html, body').animate({ scrollTop: 0 }, 300);
}
}).keyup(function(event){
if(event.keyCode == 13){
$("[name=q]").trigger("change");<|fim▁hole|> $("input[type='hidden']").not(".buyer").not(".supplier").change(function(event){
if(event.currentTarget.name == 'date_lt' || event.currentTarget.name == 'date_gt' || event.currentTarget.name == 'order_by'){
$(ovc.currOffsetFieldSelector).val(0);
pages.pagination('selectPage', '1');
}
console.log('keyword');
$("[name=q]").val($("[name=q]").val().replace(/\?/g,'').replace(/&/g,'').replace(/%/g,''));
if(ovc.historyState()){
var apiData = ovc.byMonthActivity();
if (results_accepted(apiData.stats)) {
formatedData = ovc.flotChartsFormat(apiData.stats);
var links = ovc.export();
for (var l in links) {
if(links[l].enabled){
$(".export."+l).attr('href',links[l].link_to);
$(".export."+l).css('cursor','pointer');
$(".export."+l).attr('title','Exportation limitée à '+links[l].limit +' fiches');
}else{
$(".export."+l).css('cursor','not-allowed');
$(".export."+l).attr('title','Exportation limitée à '+links[l].limit +' fiches');
}
}
}
}
pages.pagination('updateItems', ovc.items);
plotWithOptions(wg(formatedData), options);
});
plotWithOptions(wg(formatedData), options);
$("#date_gt_view").datepicker( {
format: "yyyy-mm",
viewMode: "months",
minViewMode: "months",
minDate: '2012/01/01',
}).on('changeDate', function(ev){
var unix_timestamp = ev.date.valueOf();
formatedDate = new Date(unix_timestamp ).yyyymmdd();
$("#date_gt").val(formatedDate);
$("#date_gt").trigger('change');
$(this).datepicker('hide');
});
$("#date_lt_view").datepicker( {
format: "yyyy-mm",
viewMode: "months",
minViewMode: "months",
minDate: '2012/01/01',
}).on('changeDate', function(ev){
var unix_timestamp = ev.date.valueOf();
formatedDate = new Date(unix_timestamp ).yyyymmlastdd();
$("#date_lt").val(formatedDate);
$("#date_lt").trigger('change');
$(this).datepicker('hide');
});
$("#ob_value").on('click', function(){
$("#order_by").val('value');
$("#order_dir").val('desc');
$("#order_by").trigger('change');
$(".orderby").removeClass("active");
$(this).addClass("active");
});
$("#ob_date").on('click', function(){
$("#order_by").val('date');
$("#order_dir").val('desc');
$("#order_by").trigger('change');
$(".orderby").removeClass("active");
$(this).addClass("active");
});
$("#od_supplier").on('click', function(){
$("#order_by").val('supplier');
$("#order_dir").val('asc');
$("#order_by").trigger('change');
$(".orderby").removeClass("active");
$(this).addClass("active");
});
$( ".upbtn-container" ).click(function() {
$(".graph-container").toggle(function(e){
if ($(this).is(":visible") ) {
$(".filters").css("padding-top","0px");
$("#toggleGraph").find('i').removeClass('fa-angle-double-down');
$("#toggleGraph").find('i').addClass('fa-angle-double-up');
$('html, body').animate({ scrollTop: 0 }, 300);
}else{
$(".filters").css("padding-top","70px");
$("#toggleGraph").find('i').removeClass('fa-angle-double-up');
$("#toggleGraph").find('i').addClass('fa-angle-double-down');
}
$(".filters").sticky('update');
});
});
$(".btnmenu").on('click', function(){
if (!$(".searchbar").is(':visible')) {
$(".searchbar").show();
$(".btnmenu i").removeClass('fa-bars');
$(".btnmenu i").addClass('fa-close');
}else {
$(".searchbar").hide();
$(".btnmenu i").removeClass('fa-close');
$(".btnmenu i").addClass('fa-bars');
}
return false;
});
$('.money').mask('00000000000000', {reverse: true});
$( ".switchgraph span" ).click(function() {
var optionsx = options;
if ($(this).hasClass('count')) {
var apiData = ovc.byMonthActivity('count');
$(this).removeClass('count');
$(this).addClass('value');
$(this).html('Voir nombre de '+defTypeName()+'s par mois');
$(".graphTitle").html('Montant total des '+defTypeName()+'s par mois');
dataSet = 'amount';
optionsx.yaxis.tickFormatter = valueFormatter;
}else if($(this).hasClass('value')) {
var apiData = ovc.byMonthActivity('value');
$(this).removeClass('value');
$(this).addClass('count');
$(this).html('Voir montant des '+defTypeName()+'s par mois');
$(".graphTitle").html('Nombre total de '+defTypeName()+'s par mois');
dataSet = 'count';
optionsx.yaxis.tickFormatter = countFormatter;
}
formatedData = ovc.flotChartsFormat(apiData.stats);
plotWithOptions(wg(formatedData), options);
});
$('.searchboxhidden').tagsinput({
maxTags: 1,
trimValue: true,
addOnBlur: false,
});
if ($(".searchboxhidden").val()) {
$(".bootstrap-tagsinput").find('.searchbox').hide();
$(".bootstrap-tagsinput").find('.searchbutton').hide();
//$('.searchboxhidden').tagsinput('refresh');
}
$('.searchboxhidden').on('beforeItemAdd', function(event) {
$(".bootstrap-tagsinput").find('.searchbox').hide();
$(".bootstrap-tagsinput").find('.searchbutton').hide();
$("input#offset").val('0');
pages.pagination('selectPage', '1');
})
$('.searchboxhidden').on('itemAdded', function(event) {
if (init || (ovc.decode(ovc.paramsExtract()).q != event.item && navmod == 'hashchange') || /Android|webOS|iPhone|iPad|iPod|BlackBerry/i.test(navigator.userAgent) ) {
$("#supplier").trigger('change');
}
init = true;
})
$('.searchboxhidden').on('itemRemoved', function(event) {
$('.searchboxLabel').html("Mots clés");
$(".bootstrap-tagsinput").find('.searchbox').show();
$(".bootstrap-tagsinput").find('.searchbutton').show();
$("#buyer").val('');
$("#supplier").val('');
if (init) {
$("#supplier").trigger('change');
}
init = true;
});
if ($('#supplier').val()) {
bysupplier($('#supplier').val());
}
if ($('#buyer').val()) {
bybuyer($('#buyer').val());
}
$(".scrolltop").on('click', function(){
$('html, body').animate({ scrollTop: 0 }, 300);
});
$(".scrolldown").on('click', function(){
$('html, body').animate({ scrollTop: $(".filters").offset().top - 140 }, 600);
});
$(".searchbutton i.fa").on('click', function(){
$('.searchbox').trigger($.Event( "keypress", { which: 13 } ));
});
$("[name=type]").on('change', function(){renameLabels()});
$(".export").hover(
function() // on mouseover
{
$(".exportInfo").html($(this).attr('title'));
},
function() // on mouseout
{
$(".exportInfo").html('');
});
});
$(window).load(function() {
$(".loading").fadeOut(500);
})<|fim▁end|>
|
}
});
|
<|file_name|>menudata.js<|end_file_name|><|fim▁begin|>/*
@licstart The following is the entire license notice for the
JavaScript code in this file.
Copyright (C) 1997-2019 by Dimitri van Heesch
This program is free software; you can redistribute it and/or modify
it under the terms of version 2 of the GNU General Public License as published by
the Free Software Foundation
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of<|fim▁hole|>MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License along
with this program; if not, write to the Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
@licend The above is the entire license notice
for the JavaScript code in this file
*/
var menudata={children:[
{text:"Introduction",url:"index.html"},
{text:"Tutorial",url:"quick_guide.html"},
{text:"Guides",url:"pages.html"},
{text:"Reference",url:"modules.html"},
{text:"Files",url:"files.html"}]}<|fim▁end|>
| |
<|file_name|>Vector.java<|end_file_name|><|fim▁begin|>package fr.aumgn.bukkitutils.geom;
import fr.aumgn.bukkitutils.geom.direction.VectorDirection;
import fr.aumgn.bukkitutils.geom.vector.VectorIterator;
import org.apache.commons.lang.builder.HashCodeBuilder;
import org.bukkit.Location;
import org.bukkit.World;
import org.bukkit.block.Block;
import org.bukkit.entity.Entity;
import java.util.Iterator;
/**
* Immutable Vector class.
* Inspired from WorldEdit.
*/
public class Vector implements Iterable<Vector> {
private final double x, y, z;
public Vector() {
this.x = 0;
this.y = 0;
this.z = 0;
}
public Vector(int x, int y, int z) {
this.x = x;
this.y = y;
this.z = z;
}
public Vector(double x, double y, double z) {
this.x = x;
this.y = y;
this.z = z;
}
public Vector(Location loc) {
this(loc.getX(), loc.getY(), loc.getZ());
}
public Vector(Entity entity) {
this(entity.getLocation());
}
public Vector(Block block) {
this(block.getX(), block.getY(), block.getZ());
}
<|fim▁hole|> return x;
}
public Vector setX(double x) {
return new Vector(x, y, z);
}
public int getBlockX() {
return (int) Math.round(x);
}
public double getY() {
return y;
}
public Vector setY(double y) {
return new Vector(x, y, z);
}
public int getBlockY() {
return (int) Math.round(y);
}
public double getZ() {
return z;
}
public Vector setZ(double z) {
return new Vector(x, y, z);
}
public int getBlockZ() {
return (int) Math.round(z);
}
public Vector add(double i) {
return new Vector(this.x + i, this.y + i, this.z + i);
}
public Vector add(double ox, double oy, double oz) {
return new Vector(x + ox, y + oy, z + oz);
}
public Vector add(Vector other) {
return new Vector(x + other.x, y + other.y, z + other.z);
}
public Vector addX(double ox) {
return new Vector(x + ox, y, z);
}
public Vector addY(double oy) {
return new Vector(x, y + oy, z);
}
public Vector addZ(double oz) {
return new Vector(x, y, z + oz);
}
public Vector subtract(double i) {
return new Vector(x - i, y - i, z - i);
}
public Vector subtract(double ox, double oy, double oz) {
return new Vector(x - ox, y - oy, z - oz);
}
public Vector subtract(Vector other) {
return new Vector(x - other.x, y - other.y, z - other.z);
}
public Vector subtractX(double ox) {
return new Vector(x - ox, y, z);
}
public Vector subtractY(double oy) {
return new Vector(x, y - oy, z);
}
public Vector subtractZ(double oz) {
return new Vector(x, y, z - oz);
}
public Vector multiply(double i) {
return new Vector(x * i, y * i, z * i);
}
public Vector multiply(double ox, double oy, double oz) {
return new Vector(x * ox, y * oy, z * oz);
}
public Vector multiply(Vector other) {
return new Vector(x * other.x, y * other.y, z * other.z);
}
public Vector divide(double i) {
return new Vector(x / i, y / i, z / i);
}
public Vector divide(double ox, double oy, double oz) {
return new Vector(x / ox, y / oy, z / oz);
}
public Vector divide(Vector other) {
return new Vector(x / other.x, y / other.y, z / other.z);
}
public Vector getMiddle(Vector other) {
return new Vector(
(x + other.x) / 2,
(y + other.y) / 2,
(z + other.z) / 2);
}
public boolean isInside(Vector min, Vector max) {
return x >= min.x && x <= max.x
&& y >= min.y && y <= max.y
&& z >= min.z && z <= max.z;
}
public boolean isZero() {
return x == 0.0 && y == 0.0 && z == 0;
}
public Vector positive() {
return new Vector(Math.abs(x), Math.abs(y), Math.abs(z));
}
public double lengthSq() {
return x * x + y * y + z * z;
}
public double length() {
return Math.sqrt(lengthSq());
}
public double distanceSq(Vector other) {
return subtract(other).lengthSq();
}
public double distance(Vector other) {
return subtract(other).length();
}
public Vector normalize() {
return divide(length());
}
public Vector2D to2D() {
return new Vector2D(x, z);
}
public Block toBlock(World world) {
return world.getBlockAt(getBlockX(), getBlockY(), getBlockZ());
}
public Direction toDirection() {
if (isZero()) {
return Direction.NONE;
}
return new VectorDirection(this);
}
public Direction towards(Vector to) {
return to.subtract(this).toDirection();
}
public org.bukkit.util.Vector toBukkit() {
return new org.bukkit.util.Vector(x, y, z);
}
public Location toLocation(World world) {
return toLocation(world, 0.0f, 0.0f);
}
public Location toLocation(World world, Vector2D direction) {
return toLocation(world, direction.toDirection());
}
public Location toLocation(World world, Direction dir) {
return toLocation(world, dir.getYaw(), dir.getPitch());
}
public Location toLocation(World world, float yaw, float pitch) {
return new Location(world, x, getBlockY() + 0.1, z, yaw, pitch);
}
@Override
public Iterator<Vector> iterator() {
return new VectorIterator(new Vector(), this);
}
public Iterable<Vector> rectangle(final Vector max) {
return new Iterable<Vector>() {
@Override
public Iterator<Vector> iterator() {
return new VectorIterator(Vector.this, max);
}
};
}
@Override
public String toString() {
return "(" + x + ", " + y + ", " + z + ")";
}
@Override
public int hashCode() {
return new HashCodeBuilder(23, 11)
.append(x)
.append(y)
.append(z)
.toHashCode();
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (!(obj instanceof Vector)) {
return false;
}
Vector o = (Vector) obj;
return x == o.x && y == o.y && z == o.z;
}
public boolean equalsBlock(Object obj) {
if (this == obj) {
return true;
}
if (!(obj instanceof Vector)) {
return false;
}
Vector other = (Vector) obj;
return getBlockX() == other.getBlockX()
&& getBlockY() == other.getBlockY()
&& getBlockZ() == other.getBlockZ();
}
}<|fim▁end|>
|
public double getX() {
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.