prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>0005_auto_20160918_0221.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-09-18 06:21
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
<|fim▁hole|> operations = [
migrations.RenameField(
model_name='hiree',
old_name='first_name',
new_name='email',
),
migrations.RenameField(
model_name='hiree',
old_name='last_name',
new_name='name',
),
]<|fim▁end|> | dependencies = [
('CareerTinder', '0004_auto_20160918_0152'),
]
|
<|file_name|>parser-postfix-exp-assign.js<|end_file_name|><|fim▁begin|>/* Copyright JS Foundation and other contributors, http://js.foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// tests for ECMA-262 v6 12.4.3
var tests = [
// IdentifierReference
'var obj = {a : 10, ret : function(params) {return a++ = 42;}}',
'var obj = {a : 10, ret : function(params) {return a-- = 42;}}',
// NullLiteral
'var a = null; a++ = 42',
'var a = null; a-- = 42',
// BooleanLiteral
'var a = true; a++ = 42',
'var a = false; a++ = 42',
'var a = true; a-- = 42',
'var a = false; a-- = 42',
// DecimalLiteral
'var a = 5; a++ = 42',
'var a = 1.23e4; a++ = 42',
'var a = 5; a-- = 42',
'var a = 1.23e4; a-- = 42',
// BinaryIntegerLiteral
'var a = 0b11; a++ = 42',
'var a = 0B11; a++ = 42',
'var a = 0b11; a-- = 42',
'var a = 0B11; a-- = 42',
// OctalIntegerLiteral
'var a = 0o66; a++ = 42',
'var a = 0O66; a++ = 42',
'var a = 0o66; a-- = 42',
'var a = 0O66; a-- = 42',
// HexIntegerLiteral
'var a = 0xFF; a++ = 42',
'var a = 0xFF; a++ = 42',
'var a = 0xFF; a-- = 42',
'var a = 0xFF; a-- = 42',
// StringLiteral
'var a = "foo"; a++ = 42',
'var a = "\\n"; a++ = 42',
'var a = "\\uFFFF"; a++ = 42',
'var a ="\\u{F}"; a++ = 42',
'var a = "foo"; a-- = 42',
'var a = "\\n"; a-- = 42',
'var a = "\\uFFFF"; a-- = 42',
'var a ="\\u{F}"; a-- = 42',
// ArrayLiteral
'var a = []; a++ = 42',
'var a = [1,a=5]; a++ = 42',
'var a = []; a-- = 42',
'var a = [1,a=5]; a-- = 42',
// ObjectLiteral
'var a = {}; a++ = 42',
'var a = {"foo" : 5}; a++ = 42',
'var a = {5 : 5}; a++ = 42',
'var a = {a : 5}; a++ = 42',
'var a = {[key] : 5}; a++ = 42',
'var a = {func(){}}; a++ = 42',
'var a = {get(){}}; a++ = 42',
'var a = {set(prop){}}; a++ = 42',
'var a = {*func(){}}; a++ = 42',
'var a = {}; a-- = 42',<|fim▁hole|> 'var a = {func(){}}; a-- = 42',
'var a = {get(){}}; a-- = 42',
'var a = {set(prop){}}; a-- = 42',
'var a = {*func(){}}; a-- = 42',
// ClassExpression
'class a {}; a++ = 42',
'class a {}; class b extends a {}; b++ = 42',
'class a {function(){}}; a++ = 42',
'class a {}; a-- = 42',
'class a {}; class b extends a {}; b-- = 42',
'class a {function(){}}; a-- = 42',
// GeneratorExpression
'function *a (){}; a++ = 42',
'function *a (){}; a-- = 42',
// RegularExpressionLiteral
'var a = /(?:)/; a++ = 42',
'var a = /a/; a++ = 42',
'var a = /[a]/; a++ = 42',
'var a = /a/g; a++ = 42',
'var a = /(?:)/; a-- = 42',
'var a = /a/; a-- = 42',
'var a = /[a]/; a-- = 42',
'var a = /a/g; a-- = 42',
// TemplateLiteral
'var a = ``; a++ = 42',
'a = 5; var b = (`${a}`); b++ = 42',
'var a = `foo`; a++ = 42',
'var a = `\\uFFFF`; a++ = 42',
'var a = ``; a-- = 42',
'a = 5; var b = (`${a}`); b-- = 42',
'var a = `foo`; a-- = 42',
'var a = `\\uFFFF`; a-- = 42',
// MemberExpression
'var a = [1,2,3]; a[0]++ = 42',
'var a = {0:12}; a[0]++ = 42',
'var a = {"foo":12}; a.foo++ = 42',
'var a = {func: function(){}}; a.func++ = 42',
'var a = [1,2,3]; a[0]-- = 42',
'var a = {0:12}; a[0]-- = 42',
'var a = {"foo":12}; a.foo-- = 42',
'var a = {func: function(){}}; a.func-- = 42',
// SuperProperty
'class a {constructor() {Object.defineProperty(this, \'foo\', {configurable:true, writable:true, value:1}); }} ' +
'class b extends a {constructor() {super();} foo() {super.foo++ = 42;}}',
'class a {constructor() {Object.defineProperty(this, \'foo\', {configurable:true, writable:true, value:1}); }} ' +
'class b extends a {constructor() {super();} foo() {super.foo-- = 42;}}',
// NewExpression
'function a() {}; var b = new a(); b++ = 42',
'function a() {}; var b = new a(); b-- = 42',
'class g {constructor() {Object.defineProperty(this, \'foo\', {configurable:true, writable:true, value:1}); }}; ' +
'var a = new g(); a++ = 42',
'class g {constructor() {Object.defineProperty(this, \'foo\', {configurable:true, writable:true, value:1}); }}; ' +
'var a = new g(); a-- = 42',
'class a {}; var n = new a(); a++ = 42',
'class a {}; var n = new a(); a-- = 42',
// CallExpression
'function a(prop){return prop}; var b = a(12); b++ = 42',
'function a(prop){return prop}; var b = a(12); b-- = 42',
];
for (var i = 0; i < tests.length; i++)
{
try {
eval(tests[i]);
assert(false);
} catch (e) {
assert(e instanceof SyntaxError);
}
}<|fim▁end|> | 'var a = {"foo" : 5}; a-- = 42',
'var a = {5 : 5}; a-- = 42',
'var a = {a : 5}; a-- = 42',
'var a = {[key] : 5}; a-- = 42', |
<|file_name|>userauth.py<|end_file_name|><|fim▁begin|># -*- test-case-name: twisted.conch.test.test_userauth -*-
# Copyright (c) 2001-2009 Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Implementation of the ssh-userauth service.
Currently implemented authentication types are public-key and password.
Maintainer: Paul Swartz
"""
import struct, warnings
from twisted.conch import error, interfaces
from twisted.conch.ssh import keys, transport, service
from twisted.conch.ssh.common import NS, getNS
from twisted.cred import credentials
from twisted.cred.error import UnauthorizedLogin
from twisted.internet import defer, reactor
from twisted.python import failure, log, util
class SSHUserAuthServer(service.SSHService):
"""
A service implementing the server side of the 'ssh-userauth' service. It
is used to authenticate the user on the other side as being able to access
this server.
@ivar name: the name of this service: 'ssh-userauth'
@type name: C{str}
@ivar authenticatedWith: a list of authentication methods that have
already been used.
@type authenticatedWith: C{list}
@ivar loginTimeout: the number of seconds we wait before disconnecting
the user for taking too long to authenticate
@type loginTimeout: C{int}
@ivar attemptsBeforeDisconnect: the number of failed login attempts we
allow before disconnecting.
@type attemptsBeforeDisconnect: C{int}
@ivar loginAttempts: the number of login attempts that have been made
@type loginAttempts: C{int}
@ivar passwordDelay: the number of seconds to delay when the user gives
an incorrect password
@type passwordDelay: C{int}
@ivar interfaceToMethod: a C{dict} mapping credential interfaces to
authentication methods. The server checks to see which of the
cred interfaces have checkers and tells the client that those methods
are valid for authentication.
@type interfaceToMethod: C{dict}
@ivar supportedAuthentications: A list of the supported authentication
methods.
@type supportedAuthentications: C{list} of C{str}
@ivar user: the last username the client tried to authenticate with
@type user: C{str}
@ivar method: the current authentication method
@type method: C{str}
@ivar nextService: the service the user wants started after authentication
has been completed.
@type nextService: C{str}
@ivar portal: the L{twisted.cred.portal.Portal} we are using for
authentication
@type portal: L{twisted.cred.portal.Portal}
@ivar clock: an object with a callLater method. Stubbed out for testing.
"""
name = 'ssh-userauth'
loginTimeout = 10 * 60 * 60
# 10 minutes before we disconnect them
attemptsBeforeDisconnect = 20
# 20 login attempts before a disconnect
passwordDelay = 1 # number of seconds to delay on a failed password
clock = reactor
interfaceToMethod = {
credentials.ISSHPrivateKey : 'publickey',
credentials.IUsernamePassword : 'password',
credentials.IPluggableAuthenticationModules : 'keyboard-interactive',
}
def serviceStarted(self):
"""
Called when the userauth service is started. Set up instance
variables, check if we should allow password/keyboard-interactive
authentication (only allow if the outgoing connection is encrypted) and
set up a login timeout.
"""
self.authenticatedWith = []
self.loginAttempts = 0
self.user = None
self.nextService = None
self._pamDeferred = None
self.portal = self.transport.factory.portal
self.supportedAuthentications = []
for i in self.portal.listCredentialsInterfaces():
if i in self.interfaceToMethod:
self.supportedAuthentications.append(self.interfaceToMethod[i])
if not self.transport.isEncrypted('in'):
# don't let us transport password in plaintext
if 'password' in self.supportedAuthentications:
self.supportedAuthentications.remove('password')
if 'keyboard-interactive' in self.supportedAuthentications:
self.supportedAuthentications.remove('keyboard-interactive')
self._cancelLoginTimeout = self.clock.callLater(
self.loginTimeout,
self.timeoutAuthentication)
def serviceStopped(self):
"""
Called when the userauth service is stopped. Cancel the login timeout
if it's still going.
"""
if self._cancelLoginTimeout:
self._cancelLoginTimeout.cancel()
self._cancelLoginTimeout = None
def timeoutAuthentication(self):
"""
Called when the user has timed out on authentication. Disconnect
with a DISCONNECT_NO_MORE_AUTH_METHODS_AVAILABLE message.
"""
self._cancelLoginTimeout = None
self.transport.sendDisconnect(
transport.DISCONNECT_NO_MORE_AUTH_METHODS_AVAILABLE,
'you took too long')
def tryAuth(self, kind, user, data):
"""
Try to authenticate the user with the given method. Dispatches to a
auth_* method.
@param kind: the authentication method to try.
@type kind: C{str}
@param user: the username the client is authenticating with.
@type user: C{str}
@param data: authentication specific data sent by the client.
@type data: C{str}
@return: A Deferred called back if the method succeeded, or erred back
if it failed.
@rtype: C{defer.Deferred}
"""
log.msg('%s trying auth %s' % (user, kind))
if kind not in self.supportedAuthentications:
return defer.fail(
error.ConchError('unsupported authentication, failing'))
kind = kind.replace('-', '_')
f = getattr(self,'auth_%s'%kind, None)
if f:
ret = f(data)
if not ret:
return defer.fail(
error.ConchError('%s return None instead of a Deferred'
% kind))
else:
return ret
return defer.fail(error.ConchError('bad auth type: %s' % kind))
def ssh_USERAUTH_REQUEST(self, packet):
"""
The client has requested authentication. Payload::
string user
string next service
string method
<authentication specific data>
@type packet: C{str}
"""
user, nextService, method, rest = getNS(packet, 3)
if user != self.user or nextService != self.nextService:
self.authenticatedWith = [] # clear auth state
self.user = user
self.nextService = nextService
self.method = method
d = self.tryAuth(method, user, rest)
if not d:
self._ebBadAuth(
failure.Failure(error.ConchError('auth returned none')))
return
d.addCallback(self._cbFinishedAuth)
d.addErrback(self._ebMaybeBadAuth)
d.addErrback(self._ebBadAuth)
return d
def _cbFinishedAuth(self, (interface, avatar, logout)):
"""
The callback when user has successfully been authenticated. For a
description of the arguments, see L{twisted.cred.portal.Portal.login}.
We start the service requested by the user.
"""
self.transport.avatar = avatar
self.transport.logoutFunction = logout
service = self.transport.factory.getService(self.transport,
self.nextService)
if not service:
raise error.ConchError('could not get next service: %s'
% self.nextService)
log.msg('%s authenticated with %s' % (self.user, self.method))
self.transport.sendPacket(MSG_USERAUTH_SUCCESS, '')
self.transport.setService(service())
def _ebMaybeBadAuth(self, reason):
"""
An intermediate errback. If the reason is
error.NotEnoughAuthentication, we send a MSG_USERAUTH_FAILURE, but
with the partial success indicator set.
@type reason: L{twisted.python.failure.Failure}
"""
reason.trap(error.NotEnoughAuthentication)
self.transport.sendPacket(MSG_USERAUTH_FAILURE,
NS(','.join(self.supportedAuthentications)) + '\xff')
def _ebBadAuth(self, reason):
"""
The final errback in the authentication chain. If the reason is
error.IgnoreAuthentication, we simply return; the authentication
method has sent its own response. Otherwise, send a failure message
and (if the method is not 'none') increment the number of login
attempts.
@type reason: L{twisted.python.failure.Failure}
"""
if reason.check(error.IgnoreAuthentication):
return
if self.method != 'none':
log.msg('%s failed auth %s' % (self.user, self.method))
if reason.check(UnauthorizedLogin):
log.msg('unauthorized login: %s' % reason.getErrorMessage())
elif reason.check(error.ConchError):
log.msg('reason: %s' % reason.getErrorMessage())
else:
log.msg(reason.getTraceback())
self.loginAttempts += 1
if self.loginAttempts > self.attemptsBeforeDisconnect:
self.transport.sendDisconnect(
transport.DISCONNECT_NO_MORE_AUTH_METHODS_AVAILABLE,
'too many bad auths')
return
self.transport.sendPacket(
MSG_USERAUTH_FAILURE,
NS(','.join(self.supportedAuthentications)) + '\x00')
def auth_publickey(self, packet):
"""
Public key authentication. Payload::
byte has signature
string algorithm name
string key blob
[string signature] (if has signature is True)
Create a SSHPublicKey credential and verify it using our portal.
"""
hasSig = ord(packet[0])
algName, blob, rest = getNS(packet[1:], 2)
pubKey = keys.Key.fromString(blob)
signature = hasSig and getNS(rest)[0] or None
if hasSig:
b = (NS(self.transport.sessionID) + chr(MSG_USERAUTH_REQUEST) +
NS(self.user) + NS(self.nextService) + NS('publickey') +
chr(hasSig) + NS(pubKey.sshType()) + NS(blob))
c = credentials.SSHPrivateKey(self.user, algName, blob, b,
signature)
return self.portal.login(c, None, interfaces.IConchUser)
else:
c = credentials.SSHPrivateKey(self.user, algName, blob, None, None)
return self.portal.login(c, None,
interfaces.IConchUser).addErrback(self._ebCheckKey,
packet[1:])
def _ebCheckKey(self, reason, packet):
"""
Called back if the user did not sent a signature. If reason is
error.ValidPublicKey then this key is valid for the user to
authenticate with. Send MSG_USERAUTH_PK_OK.
"""
reason.trap(error.ValidPublicKey)
# if we make it here, it means that the publickey is valid
self.transport.sendPacket(MSG_USERAUTH_PK_OK, packet)
return failure.Failure(error.IgnoreAuthentication())
def auth_password(self, packet):
"""
Password authentication. Payload::
string password
Make a UsernamePassword credential and verify it with our portal.
"""
password = getNS(packet[1:])[0]
c = credentials.UsernamePassword(self.user, password)
return self.portal.login(c, None, interfaces.IConchUser).addErrback(
self._ebPassword)
def _ebPassword(self, f):
"""
If the password is invalid, wait before sending the failure in order
to delay brute-force password guessing.
"""
d = defer.Deferred()
self.clock.callLater(self.passwordDelay, d.callback, f)
return d
def auth_keyboard_interactive(self, packet):
"""
Keyboard interactive authentication. No payload. We create a
PluggableAuthenticationModules credential and authenticate with our
portal.
"""
if self._pamDeferred is not None:
self.transport.sendDisconnect(
transport.DISCONNECT_PROTOCOL_ERROR,
"only one keyboard interactive attempt at a time")
return defer.fail(error.IgnoreAuthentication())
c = credentials.PluggableAuthenticationModules(self.user,
self._pamConv)
return self.portal.login(c, None, interfaces.IConchUser)
def _pamConv(self, items):
"""
Convert a list of PAM authentication questions into a
MSG_USERAUTH_INFO_REQUEST. Returns a Deferred that will be called
back when the user has responses to the questions.
@param items: a list of 2-tuples (message, kind). We only care about
kinds 1 (password) and 2 (text).
@type items: C{list}
@rtype: L{defer.Deferred}
"""
resp = []
for message, kind in items:
if kind == 1: # password
resp.append((message, 0))
elif kind == 2: # text
resp.append((message, 1))
elif kind in (3, 4):
return defer.fail(error.ConchError(
'cannot handle PAM 3 or 4 messages'))
else:
return defer.fail(error.ConchError(
'bad PAM auth kind %i' % kind))
packet = NS('') + NS('') + NS('')
packet += struct.pack('>L', len(resp))
for prompt, echo in resp:
packet += NS(prompt)
packet += chr(echo)
self.transport.sendPacket(MSG_USERAUTH_INFO_REQUEST, packet)
self._pamDeferred = defer.Deferred()
return self._pamDeferred
def ssh_USERAUTH_INFO_RESPONSE(self, packet):
"""
The user has responded with answers to PAMs authentication questions.
Parse the packet into a PAM response and callback self._pamDeferred.
Payload::
uint32 numer of responses
string response 1
...
string response n
"""
d, self._pamDeferred = self._pamDeferred, None
try:
resp = []
numResps = struct.unpack('>L', packet[:4])[0]
packet = packet[4:]
while len(resp) < numResps:
response, packet = getNS(packet)
resp.append((response, 0))
if packet:
raise error.ConchError("%i bytes of extra data" % len(packet))
except:
d.errback(failure.Failure())
else:
d.callback(resp)
class SSHUserAuthClient(service.SSHService):
"""
A service implementing the client side of 'ssh-userauth'.
@ivar name: the name of this service: 'ssh-userauth'
@type name: C{str}
@ivar preferredOrder: a list of authentication methods we support, in
order of preference. The client will try authentication methods in
this order, making callbacks for information when necessary.
@type preferredOrder: C{list}
@ivar user: the name of the user to authenticate as
@type user: C{str}
@ivar instance: the service to start after authentication has finished
@type instance: L{service.SSHService}
@ivar authenticatedWith: a list of strings of authentication methods we've tried
@type authenticatedWith: C{list} of C{str}
@ivar triedPublicKeys: a list of public key objects that we've tried to
authenticate with
@type triedPublicKeys: C{list} of L{Key}
@ivar lastPublicKey: the last public key object we've tried to authenticate
with
@type lastPublicKey: L{Key}
"""
name = 'ssh-userauth'
preferredOrder = ['publickey', 'password', 'keyboard-interactive']
def __init__(self, user, instance):
self.user = user
self.instance = instance
def serviceStarted(self):
self.authenticatedWith = []
self.triedPublicKeys = []
self.lastPublicKey = None
self.askForAuth('none', '')
def askForAuth(self, kind, extraData):
"""
Send a MSG_USERAUTH_REQUEST.
@param kind: the authentication method to try.
@type kind: C{str}
@param extraData: method-specific data to go in the packet
@type extraData: C{str}
"""
self.lastAuth = kind
self.transport.sendPacket(MSG_USERAUTH_REQUEST, NS(self.user) +
NS(self.instance.name) + NS(kind) + extraData)
def tryAuth(self, kind):
"""
Dispatch to an authentication method.<|fim▁hole|> """
kind = kind.replace('-', '_')
log.msg('trying to auth with %s' % (kind,))
f = getattr(self,'auth_%s' % (kind,), None)
if f:
return f()
def _ebAuth(self, ignored, *args):
"""
Generic callback for a failed authentication attempt. Respond by
asking for the list of accepted methods (the 'none' method)
"""
self.askForAuth('none', '')
def ssh_USERAUTH_SUCCESS(self, packet):
"""
We received a MSG_USERAUTH_SUCCESS. The server has accepted our
authentication, so start the next service.
"""
self.transport.setService(self.instance)
def ssh_USERAUTH_FAILURE(self, packet):
"""
We received a MSG_USERAUTH_FAILURE. Payload::
string methods
byte partial success
If partial success is True, then the previous method succeeded but is
not sufficent for authentication. methods is a comma-separated list of
accepted authentication methods.
We sort the list of methods by their position in self.preferredOrder,
removing methods that have already succeeded. We then call
self.tryAuth with the most preferred method,
"""
canContinue, partial = getNS(packet)
partial = ord(partial)
if partial:
self.authenticatedWith.append(self.lastAuth)
def orderByPreference(meth):
if meth in self.preferredOrder:
return self.preferredOrder.index(meth)
else:
return -1
canContinue = util.dsu([meth for meth in canContinue.split(',')
if meth not in self.authenticatedWith],
orderByPreference)
log.msg('can continue with: %s' % canContinue)
return self._cbUserauthFailure(None, iter(canContinue))
def _cbUserauthFailure(self, result, iterator):
if result:
return
try:
method = iterator.next()
except StopIteration:
self.transport.sendDisconnect(
transport.DISCONNECT_NO_MORE_AUTH_METHODS_AVAILABLE,
'no more authentication methods available')
else:
d = defer.maybeDeferred(self.tryAuth, method)
d.addCallback(self._cbUserauthFailure, iterator)
return d
def ssh_USERAUTH_PK_OK(self, packet):
"""
This message (number 60) can mean several different messages depending
on the current authentication type. We dispatch to individual methods
in order to handle this request.
"""
func = getattr(self, 'ssh_USERAUTH_PK_OK_%s' %
self.lastAuth.replace('-', '_'), None)
if func is not None:
return func(packet)
else:
self.askForAuth('none', '')
def ssh_USERAUTH_PK_OK_publickey(self, packet):
"""
This is MSG_USERAUTH_PK. Our public key is valid, so we create a
signature and try to authenticate with it.
"""
publicKey = self.lastPublicKey
b = (NS(self.transport.sessionID) + chr(MSG_USERAUTH_REQUEST) +
NS(self.user) + NS(self.instance.name) + NS('publickey') +
'\xff' + NS(publicKey.sshType()) + NS(publicKey.blob()))
d = self.signData(publicKey, b)
if not d:
self.askForAuth('none', '')
# this will fail, we'll move on
return
d.addCallback(self._cbSignedData)
d.addErrback(self._ebAuth)
def ssh_USERAUTH_PK_OK_password(self, packet):
"""
This is MSG_USERAUTH_PASSWD_CHANGEREQ. The password given has expired.
We ask for an old password and a new password, then send both back to
the server.
"""
prompt, language, rest = getNS(packet, 2)
self._oldPass = self._newPass = None
d = self.getPassword('Old Password: ')
d = d.addCallbacks(self._setOldPass, self._ebAuth)
d.addCallback(lambda ignored: self.getPassword(prompt))
d.addCallbacks(self._setNewPass, self._ebAuth)
def ssh_USERAUTH_PK_OK_keyboard_interactive(self, packet):
"""
This is MSG_USERAUTH_INFO_RESPONSE. The server has sent us the
questions it wants us to answer, so we ask the user and sent the
responses.
"""
name, instruction, lang, data = getNS(packet, 3)
numPrompts = struct.unpack('!L', data[:4])[0]
data = data[4:]
prompts = []
for i in range(numPrompts):
prompt, data = getNS(data)
echo = bool(ord(data[0]))
data = data[1:]
prompts.append((prompt, echo))
d = self.getGenericAnswers(name, instruction, prompts)
d.addCallback(self._cbGenericAnswers)
d.addErrback(self._ebAuth)
def _cbSignedData(self, signedData):
"""
Called back out of self.signData with the signed data. Send the
authentication request with the signature.
@param signedData: the data signed by the user's private key.
@type signedData: C{str}
"""
publicKey = self.lastPublicKey
self.askForAuth('publickey', '\xff' + NS(publicKey.sshType()) +
NS(publicKey.blob()) + NS(signedData))
def _setOldPass(self, op):
"""
Called back when we are choosing a new password. Simply store the old
password for now.
@param op: the old password as entered by the user
@type op: C{str}
"""
self._oldPass = op
def _setNewPass(self, np):
"""
Called back when we are choosing a new password. Get the old password
and send the authentication message with both.
@param np: the new password as entered by the user
@type np: C{str}
"""
op = self._oldPass
self._oldPass = None
self.askForAuth('password', '\xff' + NS(op) + NS(np))
def _cbGenericAnswers(self, responses):
"""
Called back when we are finished answering keyboard-interactive
questions. Send the info back to the server in a
MSG_USERAUTH_INFO_RESPONSE.
@param responses: a list of C{str} responses
@type responses: C{list}
"""
data = struct.pack('!L', len(responses))
for r in responses:
data += NS(r.encode('UTF8'))
self.transport.sendPacket(MSG_USERAUTH_INFO_RESPONSE, data)
def auth_publickey(self):
"""
Try to authenticate with a public key. Ask the user for a public key;
if the user has one, send the request to the server and return True.
Otherwise, return False.
@rtype: C{bool}
"""
d = defer.maybeDeferred(self.getPublicKey)
d.addBoth(self._cbGetPublicKey)
return d
def _cbGetPublicKey(self, publicKey):
if isinstance(publicKey, str):
warnings.warn("Returning a string from "
"SSHUserAuthClient.getPublicKey() is deprecated "
"since Twisted 9.0. Return a keys.Key() instead.",
DeprecationWarning)
publicKey = keys.Key.fromString(publicKey)
if not isinstance(publicKey, keys.Key): # failure or None
publicKey = None
if publicKey is not None:
self.lastPublicKey = publicKey
self.triedPublicKeys.append(publicKey)
log.msg('using key of type %s' % publicKey.type())
self.askForAuth('publickey', '\x00' + NS(publicKey.sshType()) +
NS(publicKey.blob()))
return True
else:
return False
def auth_password(self):
"""
Try to authenticate with a password. Ask the user for a password.
If the user will return a password, return True. Otherwise, return
False.
@rtype: C{bool}
"""
d = self.getPassword()
if d:
d.addCallbacks(self._cbPassword, self._ebAuth)
return True
else: # returned None, don't do password auth
return False
def auth_keyboard_interactive(self):
"""
Try to authenticate with keyboard-interactive authentication. Send
the request to the server and return True.
@rtype: C{bool}
"""
log.msg('authing with keyboard-interactive')
self.askForAuth('keyboard-interactive', NS('') + NS(''))
return True
def _cbPassword(self, password):
"""
Called back when the user gives a password. Send the request to the
server.
@param password: the password the user entered
@type password: C{str}
"""
self.askForAuth('password', '\x00' + NS(password))
def signData(self, publicKey, signData):
"""
Sign the given data with the given public key.
By default, this will call getPrivateKey to get the private key,
then sign the data using Key.sign().
This method is factored out so that it can be overridden to use
alternate methods, such as a key agent.
@param publicKey: The public key object returned from L{getPublicKey}
@type publicKey: L{keys.Key}
@param signData: the data to be signed by the private key.
@type signData: C{str}
@return: a Deferred that's called back with the signature
@rtype: L{defer.Deferred}
"""
key = self.getPrivateKey()
if not key:
return
return key.addCallback(self._cbSignData, signData)
def _cbSignData(self, privateKey, signData):
"""
Called back when the private key is returned. Sign the data and
return the signature.
@param privateKey: the private key object
@type publicKey: L{keys.Key}
@param signData: the data to be signed by the private key.
@type signData: C{str}
@return: the signature
@rtype: C{str}
"""
if not isinstance(privateKey, keys.Key):
warnings.warn("Returning a PyCrypto key object from "
"SSHUserAuthClient.getPrivateKey() is deprecated "
"since Twisted 9.0. Return a keys.Key() instead.",
DeprecationWarning)
privateKey = keys.Key(privateKey)
return privateKey.sign(signData)
def getPublicKey(self):
"""
Return a public key for the user. If no more public keys are
available, return C{None}.
This implementation always returns C{None}. Override it in a
subclass to actually find and return a public key object.
@rtype: L{Key} or L{NoneType}
"""
return None
def getPrivateKey(self):
"""
Return a L{Deferred} that will be called back with the private key
object corresponding to the last public key from getPublicKey().
If the private key is not available, errback on the Deferred.
@rtype: L{Deferred} called back with L{Key}
"""
return defer.fail(NotImplementedError())
def getPassword(self, prompt = None):
"""
Return a L{Deferred} that will be called back with a password.
prompt is a string to display for the password, or None for a generic
'user@hostname's password: '.
@type prompt: C{str}/C{None}
@rtype: L{defer.Deferred}
"""
return defer.fail(NotImplementedError())
def getGenericAnswers(self, name, instruction, prompts):
"""
Returns a L{Deferred} with the responses to the promopts.
@param name: The name of the authentication currently in progress.
@param instruction: Describes what the authentication wants.
@param prompts: A list of (prompt, echo) pairs, where prompt is a
string to display and echo is a boolean indicating whether the
user's response should be echoed as they type it.
"""
return defer.fail(NotImplementedError())
MSG_USERAUTH_REQUEST = 50
MSG_USERAUTH_FAILURE = 51
MSG_USERAUTH_SUCCESS = 52
MSG_USERAUTH_BANNER = 53
MSG_USERAUTH_PASSWD_CHANGEREQ = 60
MSG_USERAUTH_INFO_REQUEST = 60
MSG_USERAUTH_INFO_RESPONSE = 61
MSG_USERAUTH_PK_OK = 60
messages = {}
for k, v in locals().items():
if k[:4]=='MSG_':
messages[v] = k # doesn't handle doubles
SSHUserAuthServer.protocolMessages = messages
SSHUserAuthClient.protocolMessages = messages
del messages
del v<|fim▁end|> |
@param kind: the authentication method
@type kind: C{str} |
<|file_name|>binder.py<|end_file_name|><|fim▁begin|>'''
Copyright 2013 George Caley
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
import codecs
import os
import re
import sqlite3
# woo regular expressions
PREREQS_RE = re.compile(r"Pre-?req(?:uisites?)?:(.*?)(?:</p>|;)")
EXCLUSIONS_RE = re.compile(r"((?:Excluded|Exclusion|Exclusions|(?:and )?Excludes)[: ](.*?))(?:</p>|<br />)", re.IGNORECASE)
COREQS_RE = re.compile(r"Co-?requisite:(.*?)</p>", re.IGNORECASE)
NAME_RE = re.compile(r"<title>UNSW Handbook Course - (.*?) - [A-Z]{4}[0-9]{4}</title>", re.DOTALL)
DESC_RE = re.compile(r"<!-- Start Course Description -->(.*?)<!-- End Course description -->", re.DOTALL | re.IGNORECASE)
GENED_RE = re.compile(r"Available for General Education:")
OUTLINE_RE = re.compile(r"Course Outline:.*?<a .*?href=[\"'](.*?)[\"']")
UOC_RE = re.compile(r"Units of Credit:.*?([0-9]+)")
COURSE_RE = re.compile(r"[A-Z]{4}[0-9]{4}", re.IGNORECASE)
BR_RE = re.compile(r"<br ?/?>", re.IGNORECASE)
TAG_RE = re.compile(r"</?.*?>")
TYPE_PREREQUISITE = "prerequisite"
TYPE_COREQUISITE = "corequisite"
TYPE_EXCLUSION = "exclusion"
DATABASE_FILENAME = "courses.db"
COURSE_DIR = "courses"
if os.path.exists(DATABASE_FILENAME):
print "Deleting existing database"
os.unlink(DATABASE_FILENAME)
print "Creating new database"
conn = sqlite3.connect(DATABASE_FILENAME)
cur = conn.cursor()
print "Creating tables"
cur.execute("CREATE TABLE courses (code text primary key, name text, description text, prerequisites text, corequisites text, exclusions text, gened integer, outline text, uoc integer)")
cur.execute("CREATE TABLE relationships (source text, destination text, type text)")
print "Loading course list"
print
filenames = os.listdir(COURSE_DIR)
i = 0
for filename in filenames:
i += 1
code = filename.rstrip(".html")
print "Reading %s (%d/%d)" % (code, i, len(filenames))
# open with unicode support
f = codecs.open("%s/%s" % (COURSE_DIR, filename), encoding="utf-8", mode="r")
data = f.read()
f.close()
# strip 's and <strong> tags
data = data.replace(" ", " ")
data = data.replace("<strong>", "")
data = data.replace("</strong>", "")
# find name
match = re.search(NAME_RE, data)
if match:
name = match.group(1).strip().replace("\n", "")
print "Found name:", name
else:
name = None
print "Couldn't find name"
print "Fatal error!"
quit()
# find exclusions. all of them.
exclusions = ""
exclusions_list = []
while True:
match = re.search(EXCLUSIONS_RE, data)
if match:
exclusions = match.group(2).strip()
print "Found exclusions:", exclusions
data = data.replace(match.group(1), "")
exclusions_list = re.findall(COURSE_RE, exclusions)<|fim▁hole|> #print "Couldn't find exclusions"
break
# find corequisites
match = re.search(COREQS_RE, data)
if match:
coreqs = match.group(1).strip()
print "Found corequisites:", coreqs
data = data.replace(match.group(0), "")
coreqs_list = map(unicode.upper, re.findall(COURSE_RE, coreqs))
print "Corequisites list:", coreqs_list
else:
coreqs = None
coreqs_list = []
print "Couldn't find corequisites"
# find prerequisites
match = re.search(PREREQS_RE, data)
if match:
prereqs = match.group(1).strip()
print "Found prerequisites:", prereqs
data = data.replace(match.group(0), "")
prereqs_list = map(unicode.upper, re.findall(COURSE_RE, prereqs))
print "Prerequisites list:", prereqs_list
else:
prereqs = None
prereqs_list = []
print "Couldn't find prerequisites"
# find description
match = re.search(DESC_RE, data)
if match:
desc = match.group(1).strip()
# change <br>'s
#desc = re.sub(BR_RE, "\n", desc)
# strip tags
#desc = re.sub(TAG_RE, "", desc)
#print "Found description:", desc
print "Found description"
else:
desc = None
print "Couldn't find description"
# find general education statement
match = re.search(GENED_RE, data)
if match:
gened = 1
else:
gened = 0
# find course outline
match = re.search(OUTLINE_RE, data)
if match:
outline = match.group(1).strip()
print "Found course outline:", outline
else:
outline = None
print "Couldn't find course outline"
# find uoc
match = re.search(UOC_RE, data)
if match:
uoc = match.group(1).strip()
try:
uoc = int(uoc)
print "Found UoC:", uoc
except:
print "UoC was not an integer: '%s'" % uoc
uoc = None
else:
uoc = None
print "Couldn't find UoC"
print "Writing to database"
cur.execute("INSERT INTO courses (code, name, description, prerequisites, corequisites, exclusions, gened, outline, uoc) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)", (code, name, desc, prereqs, coreqs, exclusions, gened, outline, uoc))
for prereq in prereqs_list:
cur.execute("INSERT INTO relationships (source, destination, type) VALUES (?, ?, ?)", (code, prereq, TYPE_PREREQUISITE))
for coreq in coreqs_list:
cur.execute("INSERT INTO relationships (source, destination, type) VALUES (?, ?, ?)", (code, coreq, TYPE_COREQUISITE))
for exclusion in exclusions_list:
cur.execute("INSERT INTO relationships (source, destination, type) VALUES (?, ?, ?)", (code, exclusion, TYPE_EXCLUSION))
print
conn.commit()
conn.close()<|fim▁end|> | print "Exclusions list:", exclusions_list
else:
#exclusions = None
#exclusions_list = [] |
<|file_name|>handler.py<|end_file_name|><|fim▁begin|>__author__ = 'Marco Maio'
import time
class Handler():
def __init__(self, stocks_today=None, investments_by_name=None, investments_by_availability=None):
# input data assessment
if stocks_today is None:
raise ValueError('Stocks_today container not specified!')
elif investments_by_name is None:
raise ValueError('Investments_by_name container not specified!')
elif investments_by_availability is None:
raise ValueError('Investments_by_availability container not specified!')
self.__stocks_today = stocks_today
self.__investments_by_name = investments_by_name
self.__investments_by_availability = investments_by_availability
def get_amount_by_stock_name(self, stock_name):
if stock_name is None or len(stock_name) == 0:
raise ValueError('Stock name not specified!')
return self.__stocks_today[stock_name]["EUR"] *\
self.__stocks_today[stock_name]["Numbers of parts"]
def get_amount_total_investment(self):
tot = 0
for i in self.__stocks_today:
tot += self.get_amount_by_stock_name(i)
return tot
def get_total_amount_by_date(self, date=None, stock_name="", closest_availability_only=False):
if date is None or len(date) == 0:
raise ValueError('Date not specified!')
dates = [d for d in self.__investments_by_availability.keys() if len(d) > 0]
eligible_dates =[]
for d in dates:
if time.strptime(date, "%d/%m/%Y") >= time.strptime(d, "%d/%m/%Y"):
if not closest_availability_only or date.split('/')[2] == d.split('/')[2]:
eligible_dates.append(d)
if len(eligible_dates)== 0:
raise ValueError('No fund available by the ' + date)
tot = 0
stocks = set()
for ed in eligible_dates:
for k, v in self.__investments_by_availability[ed].items():
if stock_name in k:
stocks.add(k)
tot += self.__stocks_today[k]["EUR"] * v
return tot, stocks
<|fim▁hole|>
def get_paid_by_stock_name(self, stock_name=None):
if stock_name is None or len(stock_name) == 0:
raise ValueError('Stock name not specified!')
if stock_name not in self.__stocks_today:
raise ValueError('Please provide a valid stock name!')
tot = 0.0
for k, v in self.__investments_by_name[stock_name].items():
tot += v['Number of actions bought'] * v['Purchase value']
return tot
def get_total_gain(self):
tot_paid = 0.0
for stock_name in self.__investments_by_name:
tot_paid += self.get_paid_by_stock_name(stock_name)
tot = self.get_amount_total_investment()
gain = tot - tot_paid
percentage_gain = (tot/tot_paid - 1)*100
return gain, percentage_gain
def get_gain_by_stock_name(self, stock_name):
if stock_name is None or len(stock_name) == 0:
raise ValueError('Stock name not specified!')
if stock_name not in self.__stocks_today:
raise ValueError('Please provide a valid stock name!')
tot_paid = self.get_paid_by_stock_name(stock_name)
tot = self.get_amount_by_stock_name(stock_name)
gain = tot - tot_paid
percentage_gain = (tot/tot_paid - 1)*100
return gain, percentage_gain
def get_next_available_amount(self):
dates = [d for d in self.__investments_by_availability.keys() if len(d) > 0]
min_date = None
min_date_str = ""
for d in dates:
current_date = time.strptime(d, "%d/%m/%Y")
if min_date is None or min_date > current_date:
min_date = current_date
min_date_str = d
return min_date_str, self.get_total_amount_by_date(min_date_str)<|fim▁end|> | |
<|file_name|>virtualnetworkpeerings.go<|end_file_name|><|fim▁begin|>package network
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
import (
"context"
"github.com/Azure/go-autorest/autorest"
"github.com/Azure/go-autorest/autorest/azure"
"net/http"
)
// VirtualNetworkPeeringsClient is the network Client
type VirtualNetworkPeeringsClient struct {
BaseClient
}
// NewVirtualNetworkPeeringsClient creates an instance of the VirtualNetworkPeeringsClient client.
func NewVirtualNetworkPeeringsClient(subscriptionID string) VirtualNetworkPeeringsClient {
return NewVirtualNetworkPeeringsClientWithBaseURI(DefaultBaseURI, subscriptionID)
}
// NewVirtualNetworkPeeringsClientWithBaseURI creates an instance of the VirtualNetworkPeeringsClient client.
func NewVirtualNetworkPeeringsClientWithBaseURI(baseURI string, subscriptionID string) VirtualNetworkPeeringsClient {
return VirtualNetworkPeeringsClient{NewWithBaseURI(baseURI, subscriptionID)}
}
// CreateOrUpdate creates or updates a peering in the specified virtual network.
// Parameters:
// resourceGroupName - the name of the resource group.
// virtualNetworkName - the name of the virtual network.
// virtualNetworkPeeringName - the name of the peering.
// virtualNetworkPeeringParameters - parameters supplied to the create or update virtual network peering
// operation.
func (client VirtualNetworkPeeringsClient) CreateOrUpdate(ctx context.Context, resourceGroupName string, virtualNetworkName string, virtualNetworkPeeringName string, virtualNetworkPeeringParameters VirtualNetworkPeering) (result VirtualNetworkPeeringsCreateOrUpdateFuture, err error) {
req, err := client.CreateOrUpdatePreparer(ctx, resourceGroupName, virtualNetworkName, virtualNetworkPeeringName, virtualNetworkPeeringParameters)
if err != nil {
err = autorest.NewErrorWithError(err, "network.VirtualNetworkPeeringsClient", "CreateOrUpdate", nil, "Failure preparing request")
return
}
result, err = client.CreateOrUpdateSender(req)
if err != nil {
err = autorest.NewErrorWithError(err, "network.VirtualNetworkPeeringsClient", "CreateOrUpdate", result.Response(), "Failure sending request")
return
}
return
}
// CreateOrUpdatePreparer prepares the CreateOrUpdate request.
func (client VirtualNetworkPeeringsClient) CreateOrUpdatePreparer(ctx context.Context, resourceGroupName string, virtualNetworkName string, virtualNetworkPeeringName string, virtualNetworkPeeringParameters VirtualNetworkPeering) (*http.Request, error) {
pathParameters := map[string]interface{}{
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
"virtualNetworkName": autorest.Encode("path", virtualNetworkName),
"virtualNetworkPeeringName": autorest.Encode("path", virtualNetworkPeeringName),
}
const APIVersion = "2018-05-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsContentType("application/json; charset=utf-8"),
autorest.AsPut(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/virtualNetworkPeerings/{virtualNetworkPeeringName}", pathParameters),
autorest.WithJSON(virtualNetworkPeeringParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// CreateOrUpdateSender sends the CreateOrUpdate request. The method will close the
// http.Response Body if it receives an error.
func (client VirtualNetworkPeeringsClient) CreateOrUpdateSender(req *http.Request) (future VirtualNetworkPeeringsCreateOrUpdateFuture, err error) {
var resp *http.Response
resp, err = autorest.SendWithSender(client, req,
azure.DoRetryWithRegistration(client.Client))
if err != nil {
return
}
err = autorest.Respond(resp, azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusCreated))
if err != nil {
return
}
future.Future, err = azure.NewFutureFromResponse(resp)
return
}
// CreateOrUpdateResponder handles the response to the CreateOrUpdate request. The method always
// closes the http.Response Body.
func (client VirtualNetworkPeeringsClient) CreateOrUpdateResponder(resp *http.Response) (result VirtualNetworkPeering, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusCreated),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// Delete deletes the specified virtual network peering.
// Parameters:
// resourceGroupName - the name of the resource group.
// virtualNetworkName - the name of the virtual network.
// virtualNetworkPeeringName - the name of the virtual network peering.
func (client VirtualNetworkPeeringsClient) Delete(ctx context.Context, resourceGroupName string, virtualNetworkName string, virtualNetworkPeeringName string) (result VirtualNetworkPeeringsDeleteFuture, err error) {
req, err := client.DeletePreparer(ctx, resourceGroupName, virtualNetworkName, virtualNetworkPeeringName)
if err != nil {
err = autorest.NewErrorWithError(err, "network.VirtualNetworkPeeringsClient", "Delete", nil, "Failure preparing request")
return
}
result, err = client.DeleteSender(req)
if err != nil {
err = autorest.NewErrorWithError(err, "network.VirtualNetworkPeeringsClient", "Delete", result.Response(), "Failure sending request")
return
}
return
}
// DeletePreparer prepares the Delete request.
func (client VirtualNetworkPeeringsClient) DeletePreparer(ctx context.Context, resourceGroupName string, virtualNetworkName string, virtualNetworkPeeringName string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
"virtualNetworkName": autorest.Encode("path", virtualNetworkName),
"virtualNetworkPeeringName": autorest.Encode("path", virtualNetworkPeeringName),
}
const APIVersion = "2018-05-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsDelete(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/virtualNetworkPeerings/{virtualNetworkPeeringName}", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// DeleteSender sends the Delete request. The method will close the
// http.Response Body if it receives an error.
func (client VirtualNetworkPeeringsClient) DeleteSender(req *http.Request) (future VirtualNetworkPeeringsDeleteFuture, err error) {
var resp *http.Response
resp, err = autorest.SendWithSender(client, req,
azure.DoRetryWithRegistration(client.Client))
if err != nil {
return
}
err = autorest.Respond(resp, azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted, http.StatusNoContent))
if err != nil {
return<|fim▁hole|> }
future.Future, err = azure.NewFutureFromResponse(resp)
return
}
// DeleteResponder handles the response to the Delete request. The method always
// closes the http.Response Body.
func (client VirtualNetworkPeeringsClient) DeleteResponder(resp *http.Response) (result autorest.Response, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted, http.StatusNoContent),
autorest.ByClosing())
result.Response = resp
return
}
// Get gets the specified virtual network peering.
// Parameters:
// resourceGroupName - the name of the resource group.
// virtualNetworkName - the name of the virtual network.
// virtualNetworkPeeringName - the name of the virtual network peering.
func (client VirtualNetworkPeeringsClient) Get(ctx context.Context, resourceGroupName string, virtualNetworkName string, virtualNetworkPeeringName string) (result VirtualNetworkPeering, err error) {
req, err := client.GetPreparer(ctx, resourceGroupName, virtualNetworkName, virtualNetworkPeeringName)
if err != nil {
err = autorest.NewErrorWithError(err, "network.VirtualNetworkPeeringsClient", "Get", nil, "Failure preparing request")
return
}
resp, err := client.GetSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "network.VirtualNetworkPeeringsClient", "Get", resp, "Failure sending request")
return
}
result, err = client.GetResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "network.VirtualNetworkPeeringsClient", "Get", resp, "Failure responding to request")
}
return
}
// GetPreparer prepares the Get request.
func (client VirtualNetworkPeeringsClient) GetPreparer(ctx context.Context, resourceGroupName string, virtualNetworkName string, virtualNetworkPeeringName string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
"virtualNetworkName": autorest.Encode("path", virtualNetworkName),
"virtualNetworkPeeringName": autorest.Encode("path", virtualNetworkPeeringName),
}
const APIVersion = "2018-05-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/virtualNetworkPeerings/{virtualNetworkPeeringName}", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// GetSender sends the Get request. The method will close the
// http.Response Body if it receives an error.
func (client VirtualNetworkPeeringsClient) GetSender(req *http.Request) (*http.Response, error) {
return autorest.SendWithSender(client, req,
azure.DoRetryWithRegistration(client.Client))
}
// GetResponder handles the response to the Get request. The method always
// closes the http.Response Body.
func (client VirtualNetworkPeeringsClient) GetResponder(resp *http.Response) (result VirtualNetworkPeering, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// List gets all virtual network peerings in a virtual network.
// Parameters:
// resourceGroupName - the name of the resource group.
// virtualNetworkName - the name of the virtual network.
func (client VirtualNetworkPeeringsClient) List(ctx context.Context, resourceGroupName string, virtualNetworkName string) (result VirtualNetworkPeeringListResultPage, err error) {
result.fn = client.listNextResults
req, err := client.ListPreparer(ctx, resourceGroupName, virtualNetworkName)
if err != nil {
err = autorest.NewErrorWithError(err, "network.VirtualNetworkPeeringsClient", "List", nil, "Failure preparing request")
return
}
resp, err := client.ListSender(req)
if err != nil {
result.vnplr.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "network.VirtualNetworkPeeringsClient", "List", resp, "Failure sending request")
return
}
result.vnplr, err = client.ListResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "network.VirtualNetworkPeeringsClient", "List", resp, "Failure responding to request")
}
return
}
// ListPreparer prepares the List request.
func (client VirtualNetworkPeeringsClient) ListPreparer(ctx context.Context, resourceGroupName string, virtualNetworkName string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
"virtualNetworkName": autorest.Encode("path", virtualNetworkName),
}
const APIVersion = "2018-05-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/virtualNetworkPeerings", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// ListSender sends the List request. The method will close the
// http.Response Body if it receives an error.
func (client VirtualNetworkPeeringsClient) ListSender(req *http.Request) (*http.Response, error) {
return autorest.SendWithSender(client, req,
azure.DoRetryWithRegistration(client.Client))
}
// ListResponder handles the response to the List request. The method always
// closes the http.Response Body.
func (client VirtualNetworkPeeringsClient) ListResponder(resp *http.Response) (result VirtualNetworkPeeringListResult, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// listNextResults retrieves the next set of results, if any.
func (client VirtualNetworkPeeringsClient) listNextResults(lastResults VirtualNetworkPeeringListResult) (result VirtualNetworkPeeringListResult, err error) {
req, err := lastResults.virtualNetworkPeeringListResultPreparer()
if err != nil {
return result, autorest.NewErrorWithError(err, "network.VirtualNetworkPeeringsClient", "listNextResults", nil, "Failure preparing next results request")
}
if req == nil {
return
}
resp, err := client.ListSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
return result, autorest.NewErrorWithError(err, "network.VirtualNetworkPeeringsClient", "listNextResults", resp, "Failure sending next results request")
}
result, err = client.ListResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "network.VirtualNetworkPeeringsClient", "listNextResults", resp, "Failure responding to next results request")
}
return
}
// ListComplete enumerates all values, automatically crossing page boundaries as required.
func (client VirtualNetworkPeeringsClient) ListComplete(ctx context.Context, resourceGroupName string, virtualNetworkName string) (result VirtualNetworkPeeringListResultIterator, err error) {
result.page, err = client.List(ctx, resourceGroupName, virtualNetworkName)
return
}<|fim▁end|> | |
<|file_name|>issue-3991.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
struct HasNested {<|fim▁hole|> fn method_push_local(&mut self) {
self.nest[0].push(0);
}
}
pub fn main() {}<|fim▁end|> | nest: Vec<Vec<int> > ,
}
impl HasNested { |
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from django import http
from django.contrib.contenttypes.models import ContentType
from django.contrib.sites.models import Site
from django.core.exceptions import ObjectDoesNotExist
def shortcut(request, content_type_id, object_id):
"Redirect to an object's page based on a content-type ID and an object ID."
# Look up the object, making sure it's got a get_absolute_url() function.
try:
content_type = ContentType.objects.get(pk=content_type_id)
obj = content_type.get_object_for_this_type(pk=object_id)
except ObjectDoesNotExist:
raise http.Http404("Content type %s object %s doesn't exist" % (content_type_id, object_id))
try:
absurl = obj.get_absolute_url()
except AttributeError:
raise http.Http404("%s objects don't have get_absolute_url() methods" % content_type.name)
# Try to figure out the object's domain, so we can do a cross-site redirect
# if necessary.
# If the object actually defines a domain, we're done.
if absurl.startswith('http://') or absurl.startswith('https://'):
return http.HttpResponseRedirect(absurl)
# Otherwise, we need to introspect the object's relationships for a<|fim▁hole|>
# First, look for an many-to-many relationship to Site.
for field in opts.many_to_many:
if field.rel.to is Site:
try:
# Caveat: In the case of multiple related Sites, this just
# selects the *first* one, which is arbitrary.
object_domain = getattr(obj, field.name).all()[0].domain
except IndexError:
pass
if object_domain is not None:
break
# Next, look for a many-to-one relationship to Site.
if object_domain is None:
for field in obj._meta.fields:
if field.rel and field.rel.to is Site:
try:
object_domain = getattr(obj, field.name).domain
except Site.DoesNotExist:
pass
if object_domain is not None:
break
# Fall back to the current site (if possible).
if object_domain is None:
try:
object_domain = Site.objects.get_current().domain
except Site.DoesNotExist:
pass
# If all that malarkey found an object domain, use it. Otherwise, fall back
# to whatever get_absolute_url() returned.
if object_domain is not None:
protocol = request.is_secure() and 'https' or 'http'
return http.HttpResponseRedirect('%s://%s%s' % (protocol, object_domain, absurl))
else:
return http.HttpResponseRedirect(absurl)<|fim▁end|> | # relation to the Site object
object_domain = None
opts = obj._meta |
<|file_name|>gaia_auth.py<|end_file_name|><|fim▁begin|># Copyright (c) 2010 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import getpass
import os
import urllib
DEFAULT_GAIA_URL = "https://www.google.com:443/accounts/ClientLogin"
class GaiaAuthenticator:
def __init__(self, service, url = DEFAULT_GAIA_URL):
self._service = service
self._url = url
## Logins to gaia and returns auth token.
def authenticate(self, email, passwd):<|fim▁hole|> 'PersistentCookie': 'true',
'accountType': 'GOOGLE'})
f = urllib.urlopen(self._url, params);
result = f.read()
for line in result.splitlines():
if line.startswith('Auth='):
auth_string = line[5:]
return auth_string
raise Exception("Gaia didn't return auth token: " + result)<|fim▁end|> | params = urllib.urlencode({'Email': email, 'Passwd': passwd,
'source': 'chromoting',
'service': self._service, |
<|file_name|>zoom-normalSpec.js<|end_file_name|><|fim▁begin|>describe('zoom normal animation', function() {
var prefixes = {
'-webkit-transform': true,
'-moz-transform': true,
'-o-transform': true,
'transform': true
};
var transform;
beforeEach(module('ngAnimate'));
beforeEach(module('ngAnimateMock'));
beforeEach(module('fx.animations'));
it("should zoom-normal in", function(done) {
inject(function($animate, $compile, $document, $rootScope, $rootElement, $window, $timeout) {
var element = $compile('<div class="fx-zoom-normal">zoom-normal</div>')($rootScope);
$rootElement.append(element);
angular.element($document[0].body).append($rootElement);
$rootScope.$digest();
$animate.enabled(true);
$animate.enter(element, $rootElement);
$rootScope.$digest();
$timeout.flush();
$window.setTimeout(function(){
angular.forEach(prefixes, function(bool, prefix){
if(element.css(prefix)){
transform = prefix;
}
});
expect(element.css('opacity')).to.be('1');
expect(element.css(transform)).to.be('matrix(1, 0, 0, 1, 0, 0)');
done();
},500);
});
});
it('should zoom-normal out', function(done){
inject(function($animate, $compile, $document, $rootScope, $rootElement, $window, $timeout) {
var element = $compile('<div class="fx-zoom-normal">zoom-normal</div>')($rootScope);
$rootElement.append(element);
angular.element($document[0].body).append($rootElement);
$rootScope.$digest();
$animate.enabled(true);
$animate.leave(element);
$rootScope.$digest();
$timeout.flush();
$window.setTimeout(function(){
expect(element.css('opacity')).to.be('0');
done();
},500);
});
});
it('should zoom-normal move', function(done){
inject(function($animate, $compile, $document, $rootScope, $rootElement, $window, $timeout) {
var element = $compile('<div class="fx-zoom-normal">zoom-normal</div>')($rootScope);
$rootElement.append(element);
angular.element($document[0].body).append($rootElement);
$rootScope.$digest();
$animate.enabled(true);
$animate.move(element, $rootElement);
$rootScope.$digest();
$timeout.flush();
$window.setTimeout(function(){
angular.forEach(prefixes, function(bool, prefix){
if(element.css(prefix)){
transform = prefix;
}
});
expect(element.css('opacity')).to.be('1');
expect(element.css(transform)).to.be('matrix(1, 0, 0, 1, 0, 0)');
done();
},500);
});
});
xit('should zoom-normal removeClass', function(done){
inject(function($animate, $compile, $document, $rootScope, $rootElement, $window, $timeout) {
var element = $compile('<div class="fx-zoom-normal ng-hide">zoom-normal</div>')($rootScope);
$rootElement.append(element);
angular.element($document[0].body).append($rootElement);
$rootScope.$digest();
$animate.enabled(true);
$animate.removeClass(element, 'ng-hide');
$rootScope.$digest();
$window.setTimeout(function(){
angular.forEach(prefixes, function(bool, prefix){
if(element.css(prefix)){
transform = prefix;
}
});
expect(element.css('opacity')).to.be('1');
expect(element.css(transform)).to.be('matrix(1, 0, 0, 1, 0, 0)');<|fim▁hole|> });
});
xit('should zoom-normal addClass', function(done){
inject(function($animate, $compile, $document, $rootScope, $rootElement, $window, $timeout) {
var element = $compile('<div class="fx-zoom-normal">zoom-normal</div>')($rootScope);
$rootElement.append(element);
angular.element($document[0].body).append($rootElement);
$rootScope.$digest();
$animate.enabled(true);
$animate.addClass(element, 'ng-hide');
$rootScope.$digest();
$window.setTimeout(function(){
expect(element.css('opacity')).to.be('0');
done();
},500);
});
});
});<|fim▁end|> | done();
},500); |
<|file_name|>wae.ts<|end_file_name|><|fim▁begin|>/**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
// THIS CODE IS GENERATED - DO NOT MODIFY
// See angular/tools/gulp-tasks/cldr/extract.js
const u = undefined;
<|fim▁hole|> return 5;
}
export default [
'wae',
[['AM', 'PM'], u, u],
u,
[
['S', 'M', 'Z', 'M', 'F', 'F', 'S'], ['Sun', 'Män', 'Ziš', 'Mit', 'Fró', 'Fri', 'Sam'],
['Sunntag', 'Mäntag', 'Zištag', 'Mittwuč', 'Fróntag', 'Fritag', 'Samštag'],
['Sun', 'Män', 'Ziš', 'Mit', 'Fró', 'Fri', 'Sam']
],
u,
[
['J', 'H', 'M', 'A', 'M', 'B', 'H', 'Ö', 'H', 'W', 'W', 'C'],
['Jen', 'Hor', 'Mär', 'Abr', 'Mei', 'Brá', 'Hei', 'Öig', 'Her', 'Wím', 'Win', 'Chr'],
[
'Jenner', 'Hornig', 'Märze', 'Abrille', 'Meije', 'Bráčet', 'Heiwet', 'Öigšte', 'Herbštmánet',
'Wímánet', 'Wintermánet', 'Chrištmánet'
]
],
u,
[['v. Chr.', 'n. Chr'], u, u],
1,
[6, 0],
['y-MM-dd', 'd. MMM y', 'd. MMMM y', 'EEEE, d. MMMM y'],
['HH:mm', 'HH:mm:ss', 'HH:mm:ss z', 'HH:mm:ss zzzz'],
['{1} {0}', u, u, u],
[',', '’', ';', '%', '+', '-', 'E', '×', '‰', '∞', 'NaN', ':'],
['#,##0.###', '#,##0%', '¤ #,##0.00', '#E0'],
'CHF',
'CHF',
'CHF',
{},
'ltr',
plural
];<|fim▁end|> | function plural(n: number): number {
if (n === 1) return 1; |
<|file_name|>tree-view.e2e-spec.ts<|end_file_name|><|fim▁begin|>/// <reference path="../../matchers/custom-matchers.d.ts"/>
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import {customMatchers} from '../../matchers/custom-matchers';
import {LoginPage} from '../../login/login.po';
import {TreeViewPage} from './tree-view.po';
import {loadTestData, deleteTestData} from '../../utils/e2e_util';
import {MetronAlertsPage} from '../alerts-list.po';
describe('metron-alerts tree view', function () {
let page: TreeViewPage;
let listPage: MetronAlertsPage;
let loginPage: LoginPage;
<|fim▁hole|> loginPage = new LoginPage();
page = new TreeViewPage();
listPage = new MetronAlertsPage();
loginPage.login();
page.navigateToAlertsList();
});
afterAll(() => {
loginPage.logout();
deleteTestData();
});
beforeEach(() => {
jasmine.addMatchers(customMatchers);
});
it('should have all group by elements', () => {
let groupByItems = {
'source:type': '1',
'ip_dst_addr': '8',
'enrichm...:country': '3',
'ip_src_addr': '2'
};
expect(page.getGroupByCount()).toEqualBcoz(Object.keys(groupByItems).length, '4 Group By Elements should be present');
expect(page.getGroupByItemNames()).toEqualBcoz(Object.keys(groupByItems), 'Group By Elements names should be present');
expect(page.getGroupByItemCounts()).toEqualBcoz(Object.keys(groupByItems).map(key => groupByItems[key]),
'4 Group By Elements values should be present');
});
it('drag and drop should change group order', () => {
let before = {
'firstDashRow': ['0', 'alerts_ui_e2e', 'ALERTS', '169'],
'firstSubGroup': '0 US (22)',
'secondSubGroup': '0 RU (44)',
'thirdSubGroup': '0 FR (25)'
};
let after = {
'firstDashRow': ['0', 'US', 'ALERTS', '22'],
'secondDashRow': ['0', 'RU', 'ALERTS', '44'],
'thirdDashRow': ['0', 'FR', 'ALERTS', '25'],
'firstDashSubGroup': '0 alerts_ui_e2e (22)',
'secondDashSubGroup': '0 alerts_ui_e2e (44)',
'thirdDashSubGroup': '0 alerts_ui_e2e (25)'
};
page.selectGroup('source:type');
page.selectGroup('enrichments:geo:ip_dst_addr:country');
page.expandDashGroup('alerts_ui_e2e');
expect(page.getDashGroupValues('alerts_ui_e2e')).toEqualBcoz(before.firstDashRow, 'First Dash Row should be correct');
expect(page.getSubGroupValues('alerts_ui_e2e', 'US')).toEqualBcoz(before.firstSubGroup,
'Dash Group Values should be correct for US');
expect(page.getSubGroupValues('alerts_ui_e2e', 'RU')).toEqualBcoz(before.secondSubGroup,
'Dash Group Values should be present for RU');
expect(page.getSubGroupValues('alerts_ui_e2e', 'FR')).toEqualBcoz(before.thirdSubGroup,
'Dash Group Values should be present for FR');
page.dragGroup('source:type', 'ip_src_addr');
//page.selectGroup('source:type');
expect(page.getDashGroupValues('US')).toEqualBcoz(after.firstDashRow, 'First Dash Row after ' +
'reorder should be correct');
expect(page.getDashGroupValues('RU')).toEqualBcoz(after.secondDashRow, 'Second Dash Row after ' +
'reorder should be correct');
expect(page.getDashGroupValues('FR')).toEqualBcoz(after.thirdDashRow, 'Third Dash Row after ' +
'reorder should be correct');
page.expandDashGroup('US');
expect(page.getSubGroupValues('US', 'alerts_ui_e2e')).toEqualBcoz(after.firstDashSubGroup,
'First Dash Group Values should be present for alerts_ui_e2e');
page.expandDashGroup('RU');
expect(page.getSubGroupValues('RU', 'alerts_ui_e2e')).toEqualBcoz(after.secondDashSubGroup,
'Second Dash Group Values should be present for alerts_ui_e2e');
page.expandDashGroup('FR');
expect(page.getSubGroupValues('FR', 'alerts_ui_e2e')).toEqualBcoz(after.thirdDashSubGroup,
'Third Dash Group Values should be present for alerts_ui_e2e');
page.dragGroup('source:type', 'ip_dst_addr');
page.unGroup();
});
it('should have group details for single group by', () => {
let dashRowValues = ['0', 'alerts_ui_e2e', 'ALERTS', '169'];
let row1_page1 = ['-', 'dcda4423-7...0962fafc47', '2017-09-13 17:59:32', 'alerts_ui_e2e',
'192.168.138.158', 'US', '72.34.49.86', 'comarksecurity.com', 'NEW', '', ''];
let row1_page2 = ['-', '07b29c29-9...ff19eaa888', '2017-09-13 17:59:37', 'alerts_ui_e2e',
'192.168.138.158', 'FR', '62.75.195.236', '62.75.195.236', 'NEW', '', ''];
page.selectGroup('source:type');
expect(page.getActiveGroups()).toEqualBcoz(['source:type'], 'only source type group should be selected');
expect(page.getDashGroupValues('alerts_ui_e2e')).toEqualBcoz(dashRowValues, 'Dash Group Values should be present');
page.expandDashGroup('alerts_ui_e2e');
expect(page.getDashGroupTableValuesForRow('alerts_ui_e2e', 0)).toEqualBcoz(row1_page1, 'Dash Group Values should be present');
page.clickOnNextPage('alerts_ui_e2e');
expect(page.getTableValuesByRowId('alerts_ui_e2e', 0, 'FR')).toEqualBcoz(row1_page2, 'Dash Group Values should be present');
page.unGroup();
expect(page.getActiveGroups()).toEqualBcoz([], 'no groups should be selected');
});
it('should have group details for multiple group by', () => {
let usGroupIds = ['9a969c64-b...001cb011a3','a651f7c3-1...a97d4966c9','afc36901-3...d931231ab2','d860ac35-1...f9e282d571','04a5c3d0-9...af17c06fbc'];
let frGroupIds = ['07b29c29-9...ff19eaa888','7cd91565-1...de5be54a6e','ca5bde58-a...f3a88d2df4','5d6faf83-8...b88a407647','e2883424-f...79bb8b0606'];
page.selectGroup('source:type');
page.selectGroup('ip_dst_addr');
page.selectGroup('enrichments:geo:ip_dst_addr:country');
expect(page.getActiveGroups()).toEqualBcoz(['source:type', 'ip_dst_addr', 'enrichments:geo:ip_dst_addr:country'], '3 groups should be selected');
expect(page.getDashGroupValues('alerts_ui_e2e')).toEqualBcoz(['0', 'alerts_ui_e2e', 'ALERTS', '169'],
'Top Level Group Values should be present for alerts_ui_e2e');
page.expandDashGroup('alerts_ui_e2e');
expect(page.getSubGroupValuesByPosition('alerts_ui_e2e', '204.152.254.221', 0)).toEqualBcoz('0 204.152.254.221 (13)',
'Second Level Group Values should be present for 204.152.254.221');
page.expandSubGroupByPosition('alerts_ui_e2e', '204.152.254.221', 0);
expect(page.getSubGroupValuesByPosition('alerts_ui_e2e', 'US', 0)).toEqualBcoz('0 US (13)',
'Third Level Group Values should be present for US');
page.expandSubGroup('alerts_ui_e2e', 'US');
expect(page.getSubGroupValuesByPosition('alerts_ui_e2e', 'US', 0)).toEqualBcoz('0 US (13)',
'Third Level Group Values should not change when expanded for US');
expect(page.getCellValuesFromTable('alerts_ui_e2e', 'id', '04a5c3d0-9...af17c06fbc')).toEqual(usGroupIds, 'rows should be present for US');
page.expandSubGroup('alerts_ui_e2e', '62.75.195.236');
expect(page.getSubGroupValuesByPosition('alerts_ui_e2e', 'FR', 1)).toEqualBcoz('0 FR (23)',
'Third Level Group Values should be present for FR');
page.expandSubGroupByPosition('alerts_ui_e2e', 'FR', 1);
expect(page.getSubGroupValuesByPosition('alerts_ui_e2e', 'FR', 1)).toEqualBcoz('0 FR (23)',
'Third Level Group Values should not change when expanded for FR');
expect(page.getCellValuesFromTable('alerts_ui_e2e', 'id', 'e2883424-f...79bb8b0606')).toEqual(usGroupIds.concat(frGroupIds), 'rows should be present for FR');
page.unGroup();
expect(page.getActiveGroups()).toEqualBcoz([], 'no groups should be selected');
});
it('should have sort working for group details for multiple sub groups', () => {
let usTSCol = ['2017-09-13 17:59:32', '2017-09-13 17:59:42', '2017-09-13 17:59:53', '2017-09-13 18:00:02', '2017-09-13 18:00:14'];
let ruTSCol = ['2017-09-13 17:59:33', '2017-09-13 17:59:48', '2017-09-13 17:59:51', '2017-09-13 17:59:54', '2017-09-13 17:59:57'];
let frTSCol = ['2017-09-13 17:59:37', '2017-09-13 17:59:46', '2017-09-13 18:00:31', '2017-09-13 18:00:33', '2017-09-13 18:00:37'];
let usSortedTSCol = ['2017-09-13 18:02:19', '2017-09-13 18:02:16', '2017-09-13 18:02:09', '2017-09-13 18:01:58', '2017-09-13 18:01:52'];
let ruSortedTSCol = ['2017-09-14 06:29:40', '2017-09-14 06:29:40', '2017-09-14 06:29:40', '2017-09-14 06:29:40', '2017-09-13 18:02:13'];
let frSortedTSCol = ['2017-09-14 06:29:40', '2017-09-14 04:29:40', '2017-09-13 18:02:20', '2017-09-13 18:02:05', '2017-09-13 18:02:04'];
page.selectGroup('source:type');
page.selectGroup('enrichments:geo:ip_dst_addr:country');
page.expandDashGroup('alerts_ui_e2e');
page.expandSubGroup('alerts_ui_e2e', 'US');
page.expandSubGroup('alerts_ui_e2e', 'RU');
page.expandSubGroup('alerts_ui_e2e', 'FR');
let unsortedTS = [...usTSCol, ...ruTSCol, ...frTSCol];
let sortedTS = [...usSortedTSCol, ...ruSortedTSCol, ...frSortedTSCol];
page.sortSubGroup('alerts_ui_e2e', 'timestamp');
expect(page.getCellValuesFromTable('alerts_ui_e2e', 'timestamp', '2017-09-13 18:00:37')).toEqual(unsortedTS,
'timestamp should be sorted asc');
page.sortSubGroup('alerts_ui_e2e', 'timestamp');
expect(page.getCellValuesFromTable('alerts_ui_e2e', 'timestamp', '2017-09-13 18:02:04')).toEqual(sortedTS,
'timestamp should be sorted dsc');
page.unGroup();
expect(page.getActiveGroups()).toEqualBcoz([], 'no groups should be selected');
});
it('should have search working for group details for multiple sub groups', () => {
page.selectGroup('source:type');
page.selectGroup('enrichments:geo:ip_dst_addr:country');
page.expandDashGroup('alerts_ui_e2e');
expect(page.getNumOfSubGroups('alerts_ui_e2e')).toEqual(3, 'three sub groups should be present');
listPage.setSearchText('enrichments:geo:ip_dst_addr:country:FR');
expect(page.getNumOfSubGroups('alerts_ui_e2e')).toEqual(1, 'one sub groups should be present');
page.expandSubGroup('alerts_ui_e2e', 'FR');
let expected = ['FR', 'FR', 'FR', 'FR', 'FR'];
expect(page.getCellValuesFromTable('alerts_ui_e2e', 'enrichments:geo:ip_dst_addr:country', 'FR')).toEqual(expected,
'id should be sorted');
page.unGroup();
expect(page.getActiveGroups()).toEqualBcoz([], 'no groups should be selected');
});
});<|fim▁end|> | beforeAll(() => {
loadTestData(); |
<|file_name|>replicationControllersMock.js<|end_file_name|><|fim▁begin|>(function() {
'use strict';
angular.module('replicationControllers', [])
.service('replicationControllerService', ['$q', ReplicationControllerDataService]);
/**
* Replication Controller DataService
* Mock async data service.
*
* @returns {{loadAll: Function}}
* @constructor
*/
function ReplicationControllerDataService($q) {
var replicationControllers = {
"kind": "List",
"apiVersion": "v1",
"metadata": {},
"items": [
{
"kind": "ReplicationController",
"apiVersion": "v1",
"metadata": {
"name": "redis-master",
"namespace": "default",
"selfLink": "/api/v1/namespaces/default/replicationcontrollers/redis-master",
"uid": "f12969e0-ff77-11e4-8f2d-080027213276",
"resourceVersion": "28",
"creationTimestamp": "2015-05-21T05:12:14Z",
"labels": {
"name": "redis-master"
}
},
"spec": {
"replicas": 1,
"selector": {
"name": "redis-master"
},
"template": {
"metadata": {
"creationTimestamp": null,
"labels": {
"name": "redis-master"
}
},
"spec": {
"containers": [
{
"name": "master",
"image": "redis",
"ports": [
{
"containerPort": 6379,
"protocol": "TCP"
}
],
"resources": {},
"terminationMessagePath": "/dev/termination-log",
"imagePullPolicy": "IfNotPresent",
"capabilities": {},
"securityContext": {
"capabilities": {},
"privileged": false
}
}
],
"restartPolicy": "Always",
"dnsPolicy": "ClusterFirst",
"serviceAccount": ""
}<|fim▁hole|> "replicas": 1
}
}
]};
// Uses promises
return {
loadAll: function() {
// Simulate async call
return $q.when(replicationControllers);
}
};
}
})();<|fim▁end|> | }
},
"status": { |
<|file_name|>unbind_client_cluster.py<|end_file_name|><|fim▁begin|># -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# ex: set expandtab softtabstop=4 shiftwidth=4:
#
# Copyright (C) 2008,2009,2010,2011,2012,2013,2014,2015,2016 Contributor
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,<|fim▁hole|># WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Contains the logic for `aq unbind client --cluster`."""
from aquilon.aqdb.model import Cluster
from aquilon.worker.broker import BrokerCommand # pylint: disable=W0611
from aquilon.worker.commands.unbind_client_hostname import \
CommandUnbindClientHostname
class CommandUnbindClientCluster(CommandUnbindClientHostname):
required_parameters = ["cluster", "service"]
def get_dbobj(self, session, cluster=None, **_):
return Cluster.get_unique(session, cluster, compel=True)<|fim▁end|> | |
<|file_name|>cache.rs<|end_file_name|><|fim▁begin|>use api::{ErrorCode, CommandHandle, WalletHandle, PoolHandle};
use commands::{Command, CommandExecutor};
use commands::cache::CacheCommand;
use errors::prelude::*;
use utils::ctypes;
use domain::cache::{GetCacheOptions, PurgeOptions};
use domain::anoncreds::schema::SchemaId;
use domain::anoncreds::credential_definition::CredentialDefinitionId;
use utils::validation::Validatable;
use
libc::c_char;
/// Gets credential definition json data for specified credential definition id.
/// If data is present inside of cache, cached data is returned.
/// Otherwise data is fetched from the ledger and stored inside of cache for future use.
///
/// EXPERIMENTAL
///
/// #Params
/// command_handle: command handle to map callback to caller context.
/// pool_handle: pool handle (created by open_pool_ledger).
/// wallet_handle: wallet handle (created by open_wallet).
/// submitter_did: DID of the submitter stored in secured Wallet.
/// id: identifier of credential definition.
/// options_json:
/// {
/// forceUpdate: (optional, false by default) Force update of record in cache from the ledger,
/// }
/// cb: Callback that takes command result as parameter.
#[no_mangle]
pub extern fn indy_get_cred_def(command_handle: CommandHandle,
pool_handle: PoolHandle,
wallet_handle: WalletHandle,
submitter_did: *const c_char,
id: *const c_char,
options_json: *const c_char,
cb: Option<extern fn(command_handle_: CommandHandle,
err: ErrorCode,
cred_def_json: *const c_char)>) -> ErrorCode {
trace!("indy_get_cred_def: >>> pool_handle: {:?}, wallet_handle: {:?}, submitter_did: {:?}, id: {:?}, options_json: {:?}",
pool_handle, wallet_handle, submitter_did, id, options_json);
check_useful_c_str!(submitter_did, ErrorCode::CommonInvalidParam4);
check_useful_validatable_string!(id, ErrorCode::CommonInvalidParam5, CredentialDefinitionId);
check_useful_json!(options_json, ErrorCode::CommonInvalidParam6, GetCacheOptions);
check_useful_c_callback!(cb, ErrorCode::CommonInvalidParam7);
trace!("indy_get_cred_def: entities >>> pool_handle: {:?}, wallet_handle: {:?}, submitter_did: {:?}, id: {:?}, options_json: {:?}",
pool_handle, wallet_handle, submitter_did, id, options_json);
let result = CommandExecutor::instance()
.send(Command::Cache(CacheCommand::GetCredDef(
pool_handle,
wallet_handle,
submitter_did,
id,
options_json,
boxed_callback_string!("indy_get_cred_def", cb, command_handle)
)));
let res = prepare_result!(result);
trace!("indy_get_schema: <<< res: {:?}", res);
res
}
/// Gets schema json data for specified schema id.
/// If data is present inside of cache, cached data is returned.
/// Otherwise data is fetched from the ledger and stored inside of cache for future use.
///
/// EXPERIMENTAL
///
/// #Params
/// command_handle: command handle to map callback to caller context.
/// pool_handle: pool handle (created by open_pool_ledger).
/// wallet_handle: wallet handle (created by open_wallet).
/// submitter_did: DID of the submitter stored in secured Wallet.
/// id: identifier of schema.
/// options_json:
/// {
/// noCache: (bool, optional, false by default) Skip usage of cache,
/// noUpdate: (bool, optional, false by default) Use only cached data, do not try to update.
/// noStore: (bool, optional, false by default) Skip storing fresh data if updated,
/// minFresh: (int, optional, -1 by default) Return cached data if not older than this many seconds. -1 means do not check age.
/// }
/// cb: Callback that takes command result as parameter.
#[no_mangle]
pub extern fn indy_get_schema(command_handle: CommandHandle,
pool_handle: PoolHandle,
wallet_handle: WalletHandle,
submitter_did: *const c_char,
id: *const c_char,
options_json: *const c_char,
cb: Option<extern fn(command_handle_: CommandHandle,
err: ErrorCode,
schema_json: *const c_char)>) -> ErrorCode {
trace!("indy_get_schema: >>> pool_handle: {:?}, wallet_handle: {:?}, submitter_did: {:?}, id: {:?}, options_json: {:?}",
pool_handle, wallet_handle, submitter_did, id, options_json);
check_useful_c_str!(submitter_did, ErrorCode::CommonInvalidParam4);
check_useful_validatable_string!(id, ErrorCode::CommonInvalidParam5, SchemaId);
check_useful_json!(options_json, ErrorCode::CommonInvalidParam6, GetCacheOptions);
check_useful_c_callback!(cb, ErrorCode::CommonInvalidParam7);
trace!("indy_get_schema: entities >>> pool_handle: {:?}, wallet_handle: {:?}, submitter_did: {:?}, id: {:?}, options_json: {:?}",
pool_handle, wallet_handle, submitter_did, id, options_json);
let result = CommandExecutor::instance()
.send(Command::Cache(CacheCommand::GetSchema(
pool_handle,
wallet_handle,
submitter_did,
id,
options_json,
boxed_callback_string!("indy_get_schema", cb, command_handle)
)));
let res = prepare_result!(result);
trace!("indy_get_schema: <<< res: {:?}", res);
res
}
/// Purge credential definition cache.
///
/// EXPERIMENTAL
///
/// #Params
/// command_handle: command handle to map callback to caller context.
/// wallet_handle: wallet handle (created by open_wallet).
/// options_json:
/// {
/// minFresh: (int, optional, -1 by default) Purge cached data if older than this many seconds. -1 means purge all.
/// }
/// cb: Callback that takes command result as parameter.
#[no_mangle]
pub extern fn indy_purge_cred_def_cache(command_handle: CommandHandle,
wallet_handle: WalletHandle,
options_json: *const c_char,
cb: Option<extern fn(command_handle_: CommandHandle,
err: ErrorCode)>) -> ErrorCode {
trace!("indy_purge_cred_def_cache: >>> wallet_handle: {:?}, options_json: {:?}",
wallet_handle, options_json);
check_useful_json!(options_json, ErrorCode::CommonInvalidParam3, PurgeOptions);
check_useful_c_callback!(cb, ErrorCode::CommonInvalidParam4);
trace!("indy_purge_cred_def_cache: entities >>> wallet_handle: {:?}, options_json: {:?}",
wallet_handle, options_json);
let result = CommandExecutor::instance()
.send(Command::Cache(CacheCommand::PurgeCredDefCache(
wallet_handle,
options_json,
Box::new(move |result| {
let err = prepare_result!(result);
trace!("indy_purge_cred_def_cache:");
cb(command_handle, err)
})
)));
let res = prepare_result!(result);
trace!("indy_purge_cred_def_cache: <<< res: {:?}", res);
res
}
/// Purge schema cache.
///
/// EXPERIMENTAL
///
/// #Params
/// command_handle: command handle to map callback to caller context.
/// wallet_handle: wallet handle (created by open_wallet).
/// options_json:
/// {
/// minFresh: (int, optional, -1 by default) Purge cached data if older than this many seconds. -1 means purge all.
/// }
/// cb: Callback that takes command result as parameter.<|fim▁hole|> options_json: *const c_char,
cb: Option<extern fn(command_handle_: CommandHandle,
err: ErrorCode)>) -> ErrorCode {
trace!("indy_purge_schema_cache: >>> wallet_handle: {:?}, options_json: {:?}",
wallet_handle, options_json);
check_useful_json!(options_json, ErrorCode::CommonInvalidParam3, PurgeOptions);
check_useful_c_callback!(cb, ErrorCode::CommonInvalidParam4);
trace!("indy_purge_schema_cache: entities >>> wallet_handle: {:?}, options_json: {:?}",
wallet_handle, options_json);
let result = CommandExecutor::instance()
.send(Command::Cache(CacheCommand::PurgeSchemaCache(
wallet_handle,
options_json,
Box::new(move |result| {
let err = prepare_result!(result);
trace!("indy_purge_schema_cache:");
cb(command_handle, err)
})
)));
let res = prepare_result!(result);
trace!("indy_purge_schema_cache: <<< res: {:?}", res);
res
}<|fim▁end|> | #[no_mangle]
pub extern fn indy_purge_schema_cache(command_handle: CommandHandle,
wallet_handle: WalletHandle, |
<|file_name|>validation-messages.ts<|end_file_name|><|fim▁begin|>import { Expression } from 'aurelia-binding';
import { ValidationMessageParser } from './validation-message-parser';
export interface ValidationMessages {
[key: string]: string;
}
/**
* Dictionary of validation messages. [messageKey]: messageExpression
*/
export const validationMessages: ValidationMessages = {
/**
* The default validation message. Used with rules that have no standard message.
*/
default: `\${$displayName} is invalid.`,
required: `\${$displayName} is required.`,
matches: `\${$displayName} is not correctly formatted.`,
email: `\${$displayName} is not a valid email.`,
<|fim▁hole|> minItems: `\${$displayName} must contain at least \${$config.count} item\${$config.count === 1 ? '' : 's'}.`,
maxItems: `\${$displayName} cannot contain more than \${$config.count} item\${$config.count === 1 ? '' : 's'}.`,
min: `\${$displayName} must be at least \${$config.constraint}.`,
max: `\${$displayName} must be at most \${$config.constraint}.`,
range: `\${$displayName} must be between or equal to \${$config.min} and \${$config.max}.`,
between: `\${$displayName} must be between but not equal to \${$config.min} and \${$config.max}.`,
equals: `\${$displayName} must be \${$config.expectedValue}.`,
};
/**
* Retrieves validation messages and property display names.
*/
export class ValidationMessageProvider {
public static inject = [ValidationMessageParser];
constructor(public parser: ValidationMessageParser) { }
/**
* Returns a message binding expression that corresponds to the key.
* @param key The message key.
*/
public getMessage(key: string): Expression {
let message: string;
if (key in validationMessages) {
message = validationMessages[key];
} else {
message = validationMessages['default'];
}
return this.parser.parse(message);
}
/**
* Formulates a property display name using the property name and the configured
* displayName (if provided).
* Override this with your own custom logic.
* @param propertyName The property name.
*/
public getDisplayName(propertyName: string | number, displayName?: string | null | (() => string)): string {
if (displayName !== null && displayName !== undefined) {
return (displayName instanceof Function) ? displayName() : displayName as string;
}
// split on upper-case letters.
const words = propertyName.toString().split(/(?=[A-Z])/).join(' ');
// capitalize first letter.
return words.charAt(0).toUpperCase() + words.slice(1);
}
}<|fim▁end|> | minLength: `\${$displayName} must be at least \${$config.length} character\${$config.length === 1 ? '' : 's'}.`,
maxLength: `\${$displayName} cannot be longer than \${$config.length} character\${$config.length === 1 ? '' : 's'}.`,
|
<|file_name|>ns3_module_nix_vector_routing.py<|end_file_name|><|fim▁begin|>from pybindgen import Module, FileCodeSink, param, retval, cppclass, typehandlers
def register_types(module):
root_module = module.get_root()
## ipv4-nix-vector-routing.h: ns3::Ipv4NixVectorRouting [class]
module.add_class('Ipv4NixVectorRouting', parent=root_module['ns3::Ipv4RoutingProtocol'])
typehandlers.add_type_alias('std::map< ns3::Ipv4Address, ns3::Ptr< ns3::NixVector >, std::less< ns3::Ipv4Address >, std::allocator< std::pair< ns3::Ipv4Address const, ns3::Ptr< ns3::NixVector > > > >', 'ns3::NixMap_t')
typehandlers.add_type_alias('std::map< ns3::Ipv4Address, ns3::Ptr< ns3::NixVector >, std::less< ns3::Ipv4Address >, std::allocator< std::pair< ns3::Ipv4Address const, ns3::Ptr< ns3::NixVector > > > >*', 'ns3::NixMap_t*')
typehandlers.add_type_alias('std::map< ns3::Ipv4Address, ns3::Ptr< ns3::NixVector >, std::less< ns3::Ipv4Address >, std::allocator< std::pair< ns3::Ipv4Address const, ns3::Ptr< ns3::NixVector > > > >&', 'ns3::NixMap_t&')
typehandlers.add_type_alias('std::map< ns3::Ipv4Address, ns3::Ptr< ns3::Ipv4Route >, std::less< ns3::Ipv4Address >, std::allocator< std::pair< ns3::Ipv4Address const, ns3::Ptr< ns3::Ipv4Route > > > >', 'ns3::Ipv4RouteMap_t')
typehandlers.add_type_alias('std::map< ns3::Ipv4Address, ns3::Ptr< ns3::Ipv4Route >, std::less< ns3::Ipv4Address >, std::allocator< std::pair< ns3::Ipv4Address const, ns3::Ptr< ns3::Ipv4Route > > > >*', 'ns3::Ipv4RouteMap_t*')
typehandlers.add_type_alias('std::map< ns3::Ipv4Address, ns3::Ptr< ns3::Ipv4Route >, std::less< ns3::Ipv4Address >, std::allocator< std::pair< ns3::Ipv4Address const, ns3::Ptr< ns3::Ipv4Route > > > >&', 'ns3::Ipv4RouteMap_t&')
## Register a nested module for the namespace Config
nested_module = module.add_cpp_namespace('Config')
register_types_ns3_Config(nested_module)
## Register a nested module for the namespace FatalImpl
nested_module = module.add_cpp_namespace('FatalImpl')
register_types_ns3_FatalImpl(nested_module)
## Register a nested module for the namespace addressUtils
nested_module = module.add_cpp_namespace('addressUtils')
register_types_ns3_addressUtils(nested_module)
## Register a nested module for the namespace aodv
nested_module = module.add_cpp_namespace('aodv')
register_types_ns3_aodv(nested_module)
## Register a nested module for the namespace dot11s
nested_module = module.add_cpp_namespace('dot11s')
register_types_ns3_dot11s(nested_module)
## Register a nested module for the namespace dsdv
nested_module = module.add_cpp_namespace('dsdv')
register_types_ns3_dsdv(nested_module)
## Register a nested module for the namespace flame
nested_module = module.add_cpp_namespace('flame')
register_types_ns3_flame(nested_module)
## Register a nested module for the namespace internal
nested_module = module.add_cpp_namespace('internal')
register_types_ns3_internal(nested_module)
## Register a nested module for the namespace olsr
nested_module = module.add_cpp_namespace('olsr')
register_types_ns3_olsr(nested_module)
def register_types_ns3_Config(module):
root_module = module.get_root()
def register_types_ns3_FatalImpl(module):
root_module = module.get_root()
def register_types_ns3_addressUtils(module):
root_module = module.get_root()
def register_types_ns3_aodv(module):
root_module = module.get_root()
def register_types_ns3_dot11s(module):
root_module = module.get_root()
def register_types_ns3_dsdv(module):
root_module = module.get_root()
def register_types_ns3_flame(module):
root_module = module.get_root()
def register_types_ns3_internal(module):
root_module = module.get_root()
def register_types_ns3_olsr(module):
root_module = module.get_root()
def register_methods(root_module):
register_Ns3Ipv4NixVectorRouting_methods(root_module, root_module['ns3::Ipv4NixVectorRouting'])
return
def register_Ns3Ipv4NixVectorRouting_methods(root_module, cls):
## ipv4-nix-vector-routing.h: ns3::Ipv4NixVectorRouting::Ipv4NixVectorRouting(ns3::Ipv4NixVectorRouting const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4NixVectorRouting const &', 'arg0')])
## ipv4-nix-vector-routing.h: ns3::Ipv4NixVectorRouting::Ipv4NixVectorRouting() [constructor]
cls.add_constructor([])
## ipv4-nix-vector-routing.h: void ns3::Ipv4NixVectorRouting::FlushGlobalNixRoutingCache() [member function]
cls.add_method('FlushGlobalNixRoutingCache',
'void',
[])
## ipv4-nix-vector-routing.h: static ns3::TypeId ns3::Ipv4NixVectorRouting::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## ipv4-nix-vector-routing.h: void ns3::Ipv4NixVectorRouting::SetNode(ns3::Ptr<ns3::Node> node) [member function]
cls.add_method('SetNode',
'void',
[param('ns3::Ptr< ns3::Node >', 'node')])
## ipv4-nix-vector-routing.h: void ns3::Ipv4NixVectorRouting::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='private', is_virtual=True)
## ipv4-nix-vector-routing.h: void ns3::Ipv4NixVectorRouting::NotifyAddAddress(uint32_t interface, ns3::Ipv4InterfaceAddress address) [member function]
cls.add_method('NotifyAddAddress',
'void',
[param('uint32_t', 'interface'), param('ns3::Ipv4InterfaceAddress', 'address')],
visibility='private', is_virtual=True)
## ipv4-nix-vector-routing.h: void ns3::Ipv4NixVectorRouting::NotifyInterfaceDown(uint32_t interface) [member function]
cls.add_method('NotifyInterfaceDown',
'void',
[param('uint32_t', 'interface')],
visibility='private', is_virtual=True)
## ipv4-nix-vector-routing.h: void ns3::Ipv4NixVectorRouting::NotifyInterfaceUp(uint32_t interface) [member function]
cls.add_method('NotifyInterfaceUp',
'void',
[param('uint32_t', 'interface')],
visibility='private', is_virtual=True)
## ipv4-nix-vector-routing.h: void ns3::Ipv4NixVectorRouting::NotifyRemoveAddress(uint32_t interface, ns3::Ipv4InterfaceAddress address) [member function]
cls.add_method('NotifyRemoveAddress',
'void',
[param('uint32_t', 'interface'), param('ns3::Ipv4InterfaceAddress', 'address')],
visibility='private', is_virtual=True)
## ipv4-nix-vector-routing.h: void ns3::Ipv4NixVectorRouting::PrintRoutingTable(ns3::Ptr<ns3::OutputStreamWrapper> stream) const [member function]
cls.add_method('PrintRoutingTable',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'stream')],
is_const=True, visibility='private', is_virtual=True)
## ipv4-nix-vector-routing.h: bool ns3::Ipv4NixVectorRouting::RouteInput(ns3::Ptr<ns3::Packet const> p, ns3::Ipv4Header const & header, ns3::Ptr<const ns3::NetDevice> idev, ns3::Callback<void, ns3::Ptr<ns3::Ipv4Route>, ns3::Ptr<ns3::Packet const>, ns3::Ipv4Header const&, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> ucb, ns3::Callback<void,ns3::Ptr<ns3::Ipv4MulticastRoute>,ns3::Ptr<const ns3::Packet>,const ns3::Ipv4Header&,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> mcb, ns3::Callback<void,ns3::Ptr<const ns3::Packet>,const ns3::Ipv4Header&,unsigned int,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> lcb, ns3::Callback<void, ns3::Ptr<ns3::Packet const>, ns3::Ipv4Header const&, ns3::Socket::SocketErrno, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> ecb) [member function]
cls.add_method('RouteInput',
'bool',
[param('ns3::Ptr< ns3::Packet const >', 'p'), param('ns3::Ipv4Header const &', 'header'), param('ns3::Ptr< ns3::NetDevice const >', 'idev'), param('ns3::Callback< void, ns3::Ptr< ns3::Ipv4Route >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'ucb'), param('ns3::Callback< void, ns3::Ptr< ns3::Ipv4MulticastRoute >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'mcb'), param('ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'lcb'), param('ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::Socket::SocketErrno, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'ecb')],
visibility='private', is_virtual=True)
## ipv4-nix-vector-routing.h: ns3::Ptr<ns3::Ipv4Route> ns3::Ipv4NixVectorRouting::RouteOutput(ns3::Ptr<ns3::Packet> p, ns3::Ipv4Header const & header, ns3::Ptr<ns3::NetDevice> oif, ns3::Socket::SocketErrno & sockerr) [member function]
cls.add_method('RouteOutput',
'ns3::Ptr< ns3::Ipv4Route >',
[param('ns3::Ptr< ns3::Packet >', 'p'), param('ns3::Ipv4Header const &', 'header'), param('ns3::Ptr< ns3::NetDevice >', 'oif'), param('ns3::Socket::SocketErrno &', 'sockerr')], <|fim▁hole|> [param('ns3::Ptr< ns3::Ipv4 >', 'ipv4')],
visibility='private', is_virtual=True)
return
def register_functions(root_module):
module = root_module
register_functions_ns3_Config(module.get_submodule('Config'), root_module)
register_functions_ns3_FatalImpl(module.get_submodule('FatalImpl'), root_module)
register_functions_ns3_addressUtils(module.get_submodule('addressUtils'), root_module)
register_functions_ns3_aodv(module.get_submodule('aodv'), root_module)
register_functions_ns3_dot11s(module.get_submodule('dot11s'), root_module)
register_functions_ns3_dsdv(module.get_submodule('dsdv'), root_module)
register_functions_ns3_flame(module.get_submodule('flame'), root_module)
register_functions_ns3_internal(module.get_submodule('internal'), root_module)
register_functions_ns3_olsr(module.get_submodule('olsr'), root_module)
return
def register_functions_ns3_Config(module, root_module):
return
def register_functions_ns3_FatalImpl(module, root_module):
return
def register_functions_ns3_addressUtils(module, root_module):
return
def register_functions_ns3_aodv(module, root_module):
return
def register_functions_ns3_dot11s(module, root_module):
return
def register_functions_ns3_dsdv(module, root_module):
return
def register_functions_ns3_flame(module, root_module):
return
def register_functions_ns3_internal(module, root_module):
return
def register_functions_ns3_olsr(module, root_module):
return<|fim▁end|> | visibility='private', is_virtual=True)
## ipv4-nix-vector-routing.h: void ns3::Ipv4NixVectorRouting::SetIpv4(ns3::Ptr<ns3::Ipv4> ipv4) [member function]
cls.add_method('SetIpv4',
'void', |
<|file_name|>common.py<|end_file_name|><|fim▁begin|>import inspect
import json
import os
import random
import subprocess
import time
import requests
import ast
import paramiko
import rancher
from rancher import ApiError
from lib.aws import AmazonWebServices
DEFAULT_TIMEOUT = 120
DEFAULT_MULTI_CLUSTER_APP_TIMEOUT = 300
CATTLE_TEST_URL = os.environ.get('CATTLE_TEST_URL', "http://localhost:80")
CATTLE_API_URL = CATTLE_TEST_URL + "/v3"
ADMIN_TOKEN = os.environ.get('ADMIN_TOKEN', "None")
kube_fname = os.path.join(os.path.dirname(os.path.realpath(__file__)),
"k8s_kube_config")
MACHINE_TIMEOUT = float(os.environ.get('RANCHER_MACHINE_TIMEOUT', "1200"))
TEST_IMAGE = os.environ.get('RANCHER_TEST_IMAGE', "sangeetha/mytestcontainer")
CLUSTER_NAME = os.environ.get("RANCHER_CLUSTER_NAME", "")
CLUSTER_NAME_2 = os.environ.get("RANCHER_CLUSTER_NAME_2", "")
RANCHER_CLEANUP_CLUSTER = \
ast.literal_eval(os.environ.get('RANCHER_CLEANUP_CLUSTER', "True"))
env_file = os.path.join(
os.path.dirname(os.path.realpath(__file__)),
"rancher_env.config")
def random_str():
return 'random-{0}-{1}'.format(random_num(), int(time.time()))
def random_num():
return random.randint(0, 1000000)
def random_int(start, end):
return random.randint(start, end)
def random_test_name(name="test"):
return name + "-" + str(random_int(10000, 99999))
def get_admin_client():
return rancher.Client(url=CATTLE_API_URL, token=ADMIN_TOKEN, verify=False)
def get_client_for_token(token):
return rancher.Client(url=CATTLE_API_URL, token=token, verify=False)
def get_project_client_for_token(project, token):
p_url = project.links['self'] + '/schemas'
p_client = rancher.Client(url=p_url, token=token, verify=False)
return p_client
def get_cluster_client_for_token(cluster, token):
c_url = cluster.links['self'] + '/schemas'
c_client = rancher.Client(url=c_url, token=token, verify=False)
return c_client
def up(cluster, token):
c_url = cluster.links['self'] + '/schemas'
c_client = rancher.Client(url=c_url, token=token, verify=False)
return c_client
def wait_state(client, obj, state, timeout=DEFAULT_TIMEOUT):
wait_for(lambda: client.reload(obj).state == state, timeout)
return client.reload(obj)
def wait_for_condition(client, resource, check_function, fail_handler=None,
timeout=DEFAULT_TIMEOUT):
start = time.time()
resource = client.reload(resource)
while not check_function(resource):
if time.time() - start > timeout:
exceptionMsg = 'Timeout waiting for ' + resource.baseType + \
' to satisfy condition: ' + \
inspect.getsource(check_function)
if fail_handler:
exceptionMsg = exceptionMsg + fail_handler(resource)
raise Exception(exceptionMsg)
time.sleep(.5)
resource = client.reload(resource)
return resource
def wait_for(callback, timeout=DEFAULT_TIMEOUT, timeout_message=None):
start = time.time()
ret = callback()
while ret is None or ret is False:
time.sleep(.5)
if time.time() - start > timeout:
if timeout_message:
raise Exception(timeout_message)
else:
raise Exception('Timeout waiting for condition')
ret = callback()
return ret
def random_name():
return "test" + "-" + str(random_int(10000, 99999))
def create_project_and_ns(token, cluster, project_name=None, ns_name=None):
client = get_client_for_token(token)
p = create_project(client, cluster, project_name)
c_client = get_cluster_client_for_token(cluster, token)
ns = create_ns(c_client, cluster, p, ns_name)
return p, ns
def create_project(client, cluster, project_name=None):
if project_name is None:
project_name = random_name()
p = client.create_project(name=project_name,
clusterId=cluster.id)
time.sleep(5)
p = wait_until_available(client, p)
assert p.state == 'active'
return p
def create_project_with_pspt(client, cluster, pspt):
p = client.create_project(name=random_name(),
clusterId=cluster.id)
p = wait_until_available(client, p)
assert p.state == 'active'
return set_pspt_for_project(p, client, pspt)
def set_pspt_for_project(project, client, pspt):
project.setpodsecuritypolicytemplate(podSecurityPolicyTemplateId=pspt.id)
project = wait_until_available(client, project)
assert project.state == 'active'
return project
def create_ns(client, cluster, project, ns_name=None):
if ns_name is None:
ns_name = random_name()
ns = client.create_namespace(name=ns_name,
clusterId=cluster.id,
projectId=project.id)
wait_for_ns_to_become_active(client, ns)
ns = client.reload(ns)
assert ns.state == 'active'
return ns
def assign_members_to_cluster(client, user, cluster, role_template_id):
crtb = client.create_cluster_role_template_binding(
clusterId=cluster.id,
roleTemplateId=role_template_id,
subjectKind="User",
userId=user.id)
return crtb
def assign_members_to_project(client, user, project, role_template_id):
prtb = client.create_project_role_template_binding(
projectId=project.id,
roleTemplateId=role_template_id,
subjectKind="User",
userId=user.id)
return prtb
def change_member_role_in_cluster(client, user, crtb, role_template_id):
crtb = client.update(
crtb,
roleTemplateId=role_template_id,
userId=user.id)
return crtb
def change_member_role_in_project(client, user, prtb, role_template_id):
prtb = client.update(
prtb,
roleTemplateId=role_template_id,
userId=user.id)
return prtb
def create_kubeconfig(cluster):
generateKubeConfigOutput = cluster.generateKubeconfig()
print(generateKubeConfigOutput.config)
file = open(kube_fname, "w")
file.write(generateKubeConfigOutput.config)
file.close()
def validate_psp_error_worklaod(p_client, workload, error_message):
workload = wait_for_wl_transitioning(p_client, workload)
assert workload.state == "updating"
assert workload.transitioning == "error"
print(workload.transitioningMessage)
assert error_message in workload.transitioningMessage
def validate_workload(p_client, workload, type, ns_name, pod_count=1,
wait_for_cron_pods=60):
workload = wait_for_wl_to_active(p_client, workload)
assert workload.state == "active"
# For cronjob, wait for the first pod to get created after
# scheduled wait time
if type == "cronJob":
time.sleep(wait_for_cron_pods)
pods = p_client.list_pod(workloadId=workload.id).data
assert len(pods) == pod_count
for pod in pods:
wait_for_pod_to_running(p_client, pod)
wl_result = execute_kubectl_cmd(
"get " + type + " " + workload.name + " -n " + ns_name)
if type == "deployment" or type == "statefulSet":
assert wl_result["status"]["readyReplicas"] == pod_count
if type == "daemonSet":
assert wl_result["status"]["currentNumberScheduled"] == pod_count
if type == "cronJob":
assert len(wl_result["status"]["active"]) >= pod_count
return
for key, value in workload.workloadLabels.items():
label = key + "=" + value
get_pods = "get pods -l" + label + " -n " + ns_name
pods_result = execute_kubectl_cmd(get_pods)
assert len(pods_result["items"]) == pod_count
for pod in pods_result["items"]:
assert pod["status"]["phase"] == "Running"
return pods_result["items"]
def validate_workload_with_sidekicks(p_client, workload, type, ns_name,
pod_count=1):
workload = wait_for_wl_to_active(p_client, workload)
assert workload.state == "active"
pods = wait_for_pods_in_workload(p_client, workload, pod_count)
assert len(pods) == pod_count
for pod in pods:
wait_for_pod_to_running(p_client, pod)
wl_result = execute_kubectl_cmd(
"get " + type + " " + workload.name + " -n " + ns_name)
assert wl_result["status"]["readyReplicas"] == pod_count
for key, value in workload.workloadLabels.items():
label = key + "=" + value
get_pods = "get pods -l" + label + " -n " + ns_name
execute_kubectl_cmd(get_pods)
pods_result = execute_kubectl_cmd(get_pods)
assert len(pods_result["items"]) == pod_count
for pod in pods_result["items"]:
assert pod["status"]["phase"] == "Running"
assert len(pod["status"]["containerStatuses"]) == 2
assert "running" in pod["status"]["containerStatuses"][0]["state"]
assert "running" in pod["status"]["containerStatuses"][1]["state"]
def validate_workload_paused(p_client, workload, expectedstatus):
workloadStatus = p_client.list_workload(uuid=workload.uuid).data[0].paused
assert workloadStatus == expectedstatus
def validate_pod_images(expectedimage, workload, ns_name):
for key, value in workload.workloadLabels.items():
label = key + "=" + value
get_pods = "get pods -l" + label + " -n " + ns_name
pods = execute_kubectl_cmd(get_pods)
for pod in pods["items"]:
assert pod["spec"]["containers"][0]["image"] == expectedimage
def validate_pods_are_running_by_id(expectedpods, workload, ns_name):
for key, value in workload.workloadLabels.items():
label = key + "=" + value
get_pods = "get pods -l" + label + " -n " + ns_name
pods = execute_kubectl_cmd(get_pods)
curpodnames = []
for pod in pods["items"]:
curpodnames.append(pod["metadata"]["name"])
for expectedpod in expectedpods["items"]:
assert expectedpod["metadata"]["name"] in curpodnames
def validate_workload_image(client, workload, expectedImage, ns):
workload = client.list_workload(uuid=workload.uuid).data[0]
assert workload.containers[0].image == expectedImage
validate_pod_images(expectedImage, workload, ns.name)
def execute_kubectl_cmd(cmd, json_out=True, stderr=False):
command = 'kubectl --kubeconfig {0} {1}'.format(
kube_fname, cmd)
if json_out:
command += ' -o json'
if stderr:
result = run_command_with_stderr(command)
else:
result = run_command(command)
if json_out:
result = json.loads(result)
print(result)
return result
def run_command(command):
return subprocess.check_output(command, shell=True, text=True)
def run_command_with_stderr(command):
try:
output = subprocess.check_output(command, shell=True,
stderr=subprocess.PIPE)
returncode = 0
except subprocess.CalledProcessError as e:
output = e.output
returncode = e.returncode
print(returncode)
return output
def wait_for_wl_to_active(client, workload, timeout=DEFAULT_TIMEOUT):
start = time.time()
workloads = client.list_workload(uuid=workload.uuid).data
assert len(workloads) == 1
wl = workloads[0]
while wl.state != "active":
if time.time() - start > timeout:
raise AssertionError(
"Timed out waiting for state to get to active")
time.sleep(.5)
workloads = client.list_workload(uuid=workload.uuid).data
assert len(workloads) == 1
wl = workloads[0]
return wl
def wait_for_ingress_to_active(client, ingress, timeout=DEFAULT_TIMEOUT):
start = time.time()
ingresses = client.list_ingress(uuid=ingress.uuid).data
assert len(ingresses) == 1
wl = ingresses[0]
while wl.state != "active":<|fim▁hole|> "Timed out waiting for state to get to active")
time.sleep(.5)
ingresses = client.list_ingress(uuid=ingress.uuid).data
assert len(ingresses) == 1
wl = ingresses[0]
return wl
def wait_for_wl_transitioning(client, workload, timeout=DEFAULT_TIMEOUT,
state="error"):
start = time.time()
workloads = client.list_workload(uuid=workload.uuid).data
assert len(workloads) == 1
wl = workloads[0]
while wl.transitioning != state:
if time.time() - start > timeout:
raise AssertionError(
"Timed out waiting for state to get to active")
time.sleep(.5)
workloads = client.list_workload(uuid=workload.uuid).data
assert len(workloads) == 1
wl = workloads[0]
return wl
def wait_for_pod_to_running(client, pod, timeout=DEFAULT_TIMEOUT):
start = time.time()
pods = client.list_pod(uuid=pod.uuid).data
assert len(pods) == 1
p = pods[0]
while p.state != "running":
if time.time() - start > timeout:
raise AssertionError(
"Timed out waiting for state to get to active")
time.sleep(.5)
pods = client.list_pod(uuid=pod.uuid).data
assert len(pods) == 1
p = pods[0]
return p
def get_schedulable_nodes(cluster):
client = get_admin_client()
nodes = client.list_node(clusterId=cluster.id).data
schedulable_nodes = []
for node in nodes:
if node.worker:
schedulable_nodes.append(node)
return schedulable_nodes
def get_role_nodes(cluster, role):
etcd_nodes = []
control_nodes = []
worker_nodes = []
node_list = []
client = get_admin_client()
nodes = client.list_node(clusterId=cluster.id).data
for node in nodes:
if node.etcd:
etcd_nodes.append(node)
if node.controlPlane:
control_nodes.append(node)
if node.worker:
worker_nodes.append(node)
if role == "etcd":
node_list = etcd_nodes
if role == "control":
node_list = control_nodes
if role == "worker":
node_list = worker_nodes
return node_list
def validate_ingress(p_client, cluster, workloads, host, path,
insecure_redirect=False):
time.sleep(10)
curl_args = " "
if (insecure_redirect):
curl_args = " -L --insecure "
if len(host) > 0:
curl_args += " --header 'Host: " + host + "'"
nodes = get_schedulable_nodes(cluster)
target_name_list = get_target_names(p_client, workloads)
for node in nodes:
host_ip = node.externalIpAddress
cmd = curl_args + " http://" + host_ip + path
validate_http_response(cmd, target_name_list)
def validate_ingress_using_endpoint(p_client, ingress, workloads,
timeout=300):
target_name_list = get_target_names(p_client, workloads)
start = time.time()
fqdn_available = False
url = None
while not fqdn_available:
if time.time() - start > timeout:
raise AssertionError(
"Timed out waiting for endpoint to be available")
time.sleep(.5)
ingress_list = p_client.list_ingress(uuid=ingress.uuid).data
assert len(ingress_list) == 1
ingress = ingress_list[0]
if hasattr(ingress, 'publicEndpoints'):
for public_endpoint in ingress.publicEndpoints:
if public_endpoint["hostname"].startswith(ingress.name):
fqdn_available = True
url = \
public_endpoint["protocol"].lower() + "://" + \
public_endpoint["hostname"]
if "path" in public_endpoint.keys():
url += public_endpoint["path"]
time.sleep(10)
validate_http_response(url, target_name_list)
def get_target_names(p_client, workloads):
pods = []
for workload in workloads:
pod_list = p_client.list_pod(workloadId=workload.id).data
pods.extend(pod_list)
target_name_list = []
for pod in pods:
target_name_list.append(pod.name)
print("target name list:" + str(target_name_list))
return target_name_list
def get_endpoint_url_for_workload(p_client, workload, timeout=600):
fqdn_available = False
url = ""
start = time.time()
while not fqdn_available:
if time.time() - start > timeout:
raise AssertionError(
"Timed out waiting for endpoint to be available")
time.sleep(.5)
workload_list = p_client.list_workload(uuid=workload.uuid).data
assert len(workload_list) == 1
workload = workload_list[0]
if hasattr(workload, 'publicEndpoints'):
assert len(workload.publicEndpoints) > 0
url = "http://"
url = url + workload.publicEndpoints[0]["addresses"][0] + ":"
url = url + str(workload.publicEndpoints[0]["port"])
fqdn_available = True
return url
def wait_until_lb_is_active(url, timeout=300):
start = time.time()
while check_for_no_access(url):
time.sleep(.5)
print("No access yet")
if time.time() - start > timeout:
raise Exception('Timed out waiting for LB to become active')
return
def check_for_no_access(url, verify=False):
try:
requests.get(url, verify=verify)
return False
except requests.ConnectionError:
print("Connection Error - " + url)
return True
def wait_until_active(url, timeout=120):
start = time.time()
while check_for_no_access(url):
time.sleep(.5)
print("No access yet")
if time.time() - start > timeout:
raise Exception('Timed out waiting for url '
'to become active')
return
def validate_http_response(cmd, target_name_list, client_pod=None):
if client_pod is None and cmd.startswith("http://"):
wait_until_active(cmd, 60)
target_hit_list = target_name_list[:]
count = 5 * len(target_name_list)
for i in range(1, count):
if len(target_hit_list) == 0:
break
if client_pod is None:
curl_cmd = "curl " + cmd
result = run_command(curl_cmd)
else:
wget_cmd = "wget -qO- " + cmd
result = kubectl_pod_exec(client_pod, wget_cmd)
result = result.decode()
result = result.rstrip()
print("cmd: \t" + cmd)
print("result: \t" + result)
assert result in target_name_list
if result in target_hit_list:
target_hit_list.remove(result)
print("After removing all, the rest is: ", target_hit_list)
assert len(target_hit_list) == 0
def validate_cluster(client, cluster, intermediate_state="provisioning",
check_intermediate_state=True, skipIngresscheck=True,
nodes_not_in_active_state=[], k8s_version=""):
cluster = validate_cluster_state(
client, cluster,
check_intermediate_state=check_intermediate_state,
intermediate_state=intermediate_state,
nodes_not_in_active_state=nodes_not_in_active_state)
# Create Daemon set workload and have an Ingress with Workload
# rule pointing to this daemonset
create_kubeconfig(cluster)
if k8s_version != "":
check_cluster_version(cluster, k8s_version)
if hasattr(cluster, 'rancherKubernetesEngineConfig'):
check_cluster_state(len(get_role_nodes(cluster, "etcd")))
project, ns = create_project_and_ns(ADMIN_TOKEN, cluster)
p_client = get_project_client_for_token(project, ADMIN_TOKEN)
con = [{"name": "test1",
"image": TEST_IMAGE}]
name = random_test_name("default")
workload = p_client.create_workload(name=name,
containers=con,
namespaceId=ns.id,
daemonSetConfig={})
validate_workload(p_client, workload, "daemonSet", ns.name,
len(get_schedulable_nodes(cluster)))
if not skipIngresscheck:
host = "test" + str(random_int(10000, 99999)) + ".com"
path = "/name.html"
rule = {"host": host,
"paths":
[{"workloadIds": [workload.id], "targetPort": "80"}]}
ingress = p_client.create_ingress(name=name,
namespaceId=ns.id,
rules=[rule])
wait_for_ingress_to_active(p_client, ingress)
validate_ingress(p_client, cluster, [workload], host, path)
return cluster
def check_cluster_version(cluster, version):
cluster_k8s_version = \
cluster.appliedSpec["rancherKubernetesEngineConfig"][
"kubernetesVersion"]
assert cluster_k8s_version == version, \
"cluster_k8s_version: " + cluster_k8s_version + \
" Expected: " + version
expected_k8s_version = version[:version.find("-")]
k8s_version = execute_kubectl_cmd("version")
kubectl_k8s_version = k8s_version["serverVersion"]["gitVersion"]
assert kubectl_k8s_version == expected_k8s_version, \
"kubectl version: " + kubectl_k8s_version + \
" Expected: " + expected_k8s_version
def check_cluster_state(etcd_count):
css_resp = execute_kubectl_cmd("get cs")
css = css_resp["items"]
components = ["scheduler", "controller-manager"]
for i in range(0, etcd_count):
components.append("etcd-" + str(i))
print("components to check - " + str(components))
for cs in css:
component_name = cs["metadata"]["name"]
assert component_name in components
components.remove(component_name)
assert cs["conditions"][0]["status"] == "True"
assert cs["conditions"][0]["type"] == "Healthy"
assert len(components) == 0
def validate_dns_record(pod, record, expected):
# requires pod with `dig` available - TEST_IMAGE
host = '{0}.{1}.svc.cluster.local'.format(
record["name"], record["namespaceId"])
validate_dns_entry(pod, host, expected)
def validate_dns_entry(pod, host, expected):
# requires pod with `dig` available - TEST_IMAGE
cmd = 'ping -c 1 -W 1 {0}'.format(host)
ping_output = kubectl_pod_exec(pod, cmd)
ping_validation_pass = False
for expected_value in expected:
if expected_value in str(ping_output):
ping_validation_pass = True
break
assert ping_validation_pass is True
assert " 0% packet loss" in str(ping_output)
dig_cmd = 'dig {0} +short'.format(host)
dig_output = kubectl_pod_exec(pod, dig_cmd)
for expected_value in expected:
assert expected_value in str(dig_output)
def wait_for_nodes_to_become_active(client, cluster, exception_list=[],
retry_count=0):
nodes = client.list_node(clusterId=cluster.id).data
node_auto_deleted = False
for node in nodes:
if node.requestedHostname not in exception_list:
node = wait_for_node_status(client, node, "active")
if node is None:
print("Need to re-evalauate new node list")
node_auto_deleted = True
retry_count += 1
print("Retry Count:" + str(retry_count))
if node_auto_deleted and retry_count < 5:
wait_for_nodes_to_become_active(client, cluster, exception_list,
retry_count)
def wait_for_node_status(client, node, state):
uuid = node.uuid
start = time.time()
nodes = client.list_node(uuid=uuid).data
node_count = len(nodes)
# Handle the case of nodes getting auto deleted when they are part of
# nodepools
if node_count == 1:
node_status = nodes[0].state
else:
print("Node does not exist anymore -" + uuid)
return None
while node_status != state:
if time.time() - start > MACHINE_TIMEOUT:
raise AssertionError(
"Timed out waiting for state to get to active")
time.sleep(5)
nodes = client.list_node(uuid=uuid).data
node_count = len(nodes)
if node_count == 1:
node_status = nodes[0].state
else:
print("Node does not exist anymore -" + uuid)
return None
return node
def wait_for_node_to_be_deleted(client, node, timeout=300):
uuid = node.uuid
start = time.time()
nodes = client.list_node(uuid=uuid).data
node_count = len(nodes)
while node_count != 0:
if time.time() - start > timeout:
raise AssertionError(
"Timed out waiting for state to get to active")
time.sleep(.5)
nodes = client.list_node(uuid=uuid).data
node_count = len(nodes)
def wait_for_cluster_node_count(client, cluster, expected_node_count,
timeout=300):
start = time.time()
nodes = client.list_node(clusterId=cluster.id).data
node_count = len(nodes)
while node_count != expected_node_count:
if time.time() - start > timeout:
raise AssertionError(
"Timed out waiting for state to get to active")
time.sleep(.5)
nodes = client.list_node(clusterId=cluster.id).data
node_count = len(nodes)
def get_custom_host_registration_cmd(client, cluster, roles, node):
allowed_roles = ["etcd", "worker", "controlplane"]
cluster_tokens = client.list_cluster_registration_token(
clusterId=cluster.id).data
if len(cluster_tokens) > 0:
cluster_token = cluster_tokens[0]
else:
cluster_token = create_custom_host_registration_token(client, cluster)
cmd = cluster_token.nodeCommand
for role in roles:
assert role in allowed_roles
cmd += " --" + role
additional_options = " --address " + node.public_ip_address + \
" --internal-address " + node.private_ip_address
cmd += additional_options
return cmd
def create_custom_host_registration_token(client, cluster):
cluster_token = client.create_cluster_registration_token(
clusterId=cluster.id)
cluster_token = client.wait_success(cluster_token)
assert cluster_token.state == 'active'
return cluster_token
def get_cluster_type(client, cluster):
cluster_configs = [
"amazonElasticContainerServiceConfig",
"azureKubernetesServiceConfig",
"googleKubernetesEngineConfig",
"rancherKubernetesEngineConfig"
]
if "rancherKubernetesEngineConfig" in cluster:
nodes = client.list_node(clusterId=cluster.id).data
if len(nodes) > 0:
if nodes[0].nodeTemplateId is None:
return "Custom"
for cluster_config in cluster_configs:
if cluster_config in cluster:
return cluster_config
return "Imported"
def delete_cluster(client, cluster):
nodes = client.list_node(clusterId=cluster.id).data
# Delete Cluster
client.delete(cluster)
# Delete nodes(in cluster) from AWS for Imported and Custom Cluster
if (len(nodes) > 0):
cluster_type = get_cluster_type(client, cluster)
print(cluster_type)
if get_cluster_type(client, cluster) in ["Imported", "Custom"]:
nodes = client.list_node(clusterId=cluster.id).data
filters = [
{'Name': 'tag:Name',
'Values': ['testcustom*', 'teststess*']}]
ip_filter = {}
ip_list = []
ip_filter['Name'] = \
'network-interface.addresses.association.public-ip'
ip_filter['Values'] = ip_list
filters.append(ip_filter)
for node in nodes:
ip_list.append(node.externalIpAddress)
assert len(ip_filter) > 0
print(ip_filter)
aws_nodes = AmazonWebServices().get_nodes(filters)
for node in aws_nodes:
print(node.public_ip_address)
AmazonWebServices().delete_nodes(aws_nodes)
def check_connectivity_between_workloads(p_client1, workload1, p_client2,
workload2, allow_connectivity=True):
wl1_pods = p_client1.list_pod(workloadId=workload1.id).data
wl2_pods = p_client2.list_pod(workloadId=workload2.id).data
for pod in wl1_pods:
for o_pod in wl2_pods:
check_connectivity_between_pods(pod, o_pod, allow_connectivity)
def check_connectivity_between_workload_pods(p_client, workload):
pods = p_client.list_pod(workloadId=workload.id).data
for pod in pods:
for o_pod in pods:
check_connectivity_between_pods(pod, o_pod)
def check_connectivity_between_pods(pod1, pod2, allow_connectivity=True):
pod_ip = pod2.status.podIp
cmd = "ping -c 1 -W 1 " + pod_ip
response = kubectl_pod_exec(pod1, cmd)
print("Actual ping Response from " + pod1.name + ":" + str(response))
if allow_connectivity:
assert pod_ip in str(response) and " 0% packet loss" in str(response)
else:
assert pod_ip in str(response) and " 100% packet loss" in str(response)
def kubectl_pod_exec(pod, cmd):
command = "exec " + pod.name + " -n " + pod.namespaceId + " -- " + cmd
return execute_kubectl_cmd(command, json_out=False, stderr=True)
def exec_shell_command(ip, port, cmd, password, user="root", sshKey=None):
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
if sshKey:
ssh.connect(ip, username=user, key_filename=sshKey, port=port)
else:
ssh.connect(ip, username=user, password=password, port=port)
stdin, stdout, stderr = ssh.exec_command(cmd)
response = stdout.readlines()
return response
def wait_for_ns_to_become_active(client, ns, timeout=DEFAULT_TIMEOUT):
start = time.time()
time.sleep(2)
nss = client.list_namespace(uuid=ns.uuid).data
assert len(nss) == 1
ns = nss[0]
while ns.state != "active":
if time.time() - start > timeout:
raise AssertionError(
"Timed out waiting for state to get to active")
time.sleep(.5)
nss = client.list_namespace(uuid=ns.uuid).data
assert len(nss) == 1
ns = nss[0]
return ns
def wait_for_pod_images(p_client, workload, ns_name, expectedimage, numofpods,
timeout=DEFAULT_TIMEOUT):
start = time.time()
for key, value in workload.workloadLabels.items():
label = key + "=" + value
get_pods = "get pods -l" + label + " -n " + ns_name
pods = execute_kubectl_cmd(get_pods)
for x in range(0, numofpods - 1):
pod = pods["items"][x]
podimage = pod["spec"]["containers"][0]["image"]
while podimage != expectedimage:
if time.time() - start > timeout:
raise AssertionError(
"Timed out waiting for correct pod images")
time.sleep(.5)
pods = execute_kubectl_cmd(get_pods)
pod = pods["items"][x]
podimage = pod["spec"]["containers"][0]["image"]
def wait_for_pods_in_workload(p_client, workload, pod_count,
timeout=DEFAULT_TIMEOUT):
start = time.time()
pods = p_client.list_pod(workloadId=workload.id).data
while len(pods) != pod_count:
if time.time() - start > timeout:
raise AssertionError(
"Timed out waiting for state to get to active")
time.sleep(.5)
pods = p_client.list_pod(workloadId=workload.id).data
return pods
def get_admin_client_and_cluster():
client = get_admin_client()
if CLUSTER_NAME == "":
clusters = client.list_cluster().data
else:
clusters = client.list_cluster(name=CLUSTER_NAME).data
assert len(clusters) > 0
cluster = clusters[0]
return client, cluster
def validate_cluster_state(client, cluster,
check_intermediate_state=True,
intermediate_state="provisioning",
nodes_not_in_active_state=[]):
if check_intermediate_state:
cluster = wait_for_condition(
client, cluster,
lambda x: x.state == intermediate_state,
lambda x: 'State is: ' + x.state,
timeout=MACHINE_TIMEOUT)
assert cluster.state == intermediate_state
cluster = wait_for_condition(
client, cluster,
lambda x: x.state == "active",
lambda x: 'State is: ' + x.state,
timeout=MACHINE_TIMEOUT)
assert cluster.state == "active"
wait_for_nodes_to_become_active(client, cluster,
exception_list=nodes_not_in_active_state)
return cluster
def wait_until_available(client, obj, timeout=DEFAULT_TIMEOUT):
start = time.time()
sleep = 0.01
while True:
time.sleep(sleep)
sleep *= 2
if sleep > 2:
sleep = 2
try:
obj = client.reload(obj)
except ApiError as e:
if e.error.status != 403:
raise e
else:
return obj
delta = time.time() - start
if delta > timeout:
msg = 'Timeout waiting for [{}:{}] for condition after {}' \
' seconds'.format(obj.type, obj.id, delta)
raise Exception(msg)
def delete_node(aws_nodes):
for node in aws_nodes:
AmazonWebServices().delete_node(node)
def cluster_cleanup(client, cluster, aws_nodes=None):
if RANCHER_CLEANUP_CLUSTER:
client.delete(cluster)
if aws_nodes is not None:
delete_node(aws_nodes)
else:
env_details = "env.CATTLE_TEST_URL='" + CATTLE_TEST_URL + "'\n"
env_details += "env.ADMIN_TOKEN='" + ADMIN_TOKEN + "'\n"
env_details += "env.CLUSTER_NAME='" + cluster.name + "'\n"
create_config_file(env_details)
def create_config_file(env_details):
file = open(env_file, "w")
file.write(env_details)
file.close()
def validate_hostPort(p_client, workload, source_port, cluster):
pods = p_client.list_pod(workloadId=workload.id).data
nodes = get_schedulable_nodes(cluster)
for node in nodes:
target_name_list = []
for pod in pods:
print(pod.nodeId + " check " + node.id)
if pod.nodeId == node.id:
target_name_list.append(pod.name)
break
if len(target_name_list) > 0:
host_ip = node.externalIpAddress
curl_cmd = " http://" + host_ip + ":" + \
str(source_port) + "/name.html"
validate_http_response(curl_cmd, target_name_list)
def validate_lb(p_client, workload):
url = get_endpoint_url_for_workload(p_client, workload)
target_name_list = get_target_names(p_client, [workload])
wait_until_lb_is_active(url)
validate_http_response(url + "/name.html", target_name_list)
def validate_nodePort(p_client, workload, cluster):
get_endpoint_url_for_workload(p_client, workload, 60)
wl = p_client.list_workload(uuid=workload.uuid).data[0]
source_port = wl.publicEndpoints[0]["port"]
nodes = get_schedulable_nodes(cluster)
pods = p_client.list_pod(workloadId=wl.id).data
target_name_list = []
for pod in pods:
target_name_list.append(pod.name)
print("target name list:" + str(target_name_list))
for node in nodes:
host_ip = node.externalIpAddress
curl_cmd = " http://" + host_ip + ":" + \
str(source_port) + "/name.html"
validate_http_response(curl_cmd, target_name_list)
def validate_clusterIp(p_client, workload, cluster_ip, test_pods):
pods = p_client.list_pod(workloadId=workload.id).data
target_name_list = []
for pod in pods:
target_name_list.append(pod["name"])
curl_cmd = "http://" + cluster_ip + "/name.html"
for pod in test_pods:
validate_http_response(curl_cmd, target_name_list, pod)
def wait_for_pv_to_be_available(c_client, pv_object, timeout=DEFAULT_TIMEOUT):
start = time.time()
time.sleep(2)
list = c_client.list_persistent_volume(uuid=pv_object.uuid).data
assert len(list) == 1
pv = list[0]
while pv.state != "available":
if time.time() - start > timeout:
raise AssertionError(
"Timed out waiting for state to get to available")
time.sleep(.5)
list = c_client.list_persistent_volume(uuid=pv_object.uuid).data
assert len(list) == 1
pv = list[0]
return pv
def wait_for_pvc_to_be_bound(p_client, pvc_object, timeout=DEFAULT_TIMEOUT):
start = time.time()
time.sleep(2)
list = p_client.list_persistent_volume_claim(uuid=pvc_object.uuid).data
assert len(list) == 1
pvc = list[0]
while pvc.state != "bound":
if time.time() - start > timeout:
raise AssertionError(
"Timed out waiting for state to get to bound")
time.sleep(.5)
list = p_client.list_persistent_volume_claim(uuid=pvc_object.uuid).data
assert len(list) == 1
pvc = list[0]
return pvc
def create_wl_with_nfs(p_client, ns_id, pvc_name, wl_name,
mount_path, sub_path, is_daemonSet=False):
volumes = [{"type": "volume",
"name": "vol1",
"persistentVolumeClaim": {
"readOnly": "false",
"type": "persistentVolumeClaimVolumeSource",
"persistentVolumeClaimId": pvc_name
}}]
volumeMounts = [{"readOnly": "False",
"type": "volumeMount",
"mountPath": mount_path,
"subPath": sub_path,
"name": "vol1"
}]
con = [{"name": "test1",
"image": TEST_IMAGE,
"volumeMounts": volumeMounts
}]
if is_daemonSet:
workload = p_client.create_workload(name=wl_name,
containers=con,
namespaceId=ns_id,
volumes=volumes,
daemonSetConfig={})
else:
workload = p_client.create_workload(name=wl_name,
containers=con,
namespaceId=ns_id,
volumes=volumes)
return workload
def write_content_to_file(pod, content, filename):
cmd_write = "/bin/bash -c 'echo {1} > {0}'".format(filename, content)
output = kubectl_pod_exec(pod, cmd_write)
assert output.strip().decode('utf-8') == ""
def validate_file_content(pod, content, filename):
cmd_get_content = "/bin/bash -c 'cat {0}' ".format(filename)
output = kubectl_pod_exec(pod, cmd_get_content)
assert output.strip().decode('utf-8') == content
def wait_for_mcapp_to_active(client, multiClusterApp,
timeout=DEFAULT_MULTI_CLUSTER_APP_TIMEOUT):
time.sleep(5)
mcapps = client.list_multiClusterApp(uuid=multiClusterApp.uuid,
name=multiClusterApp.name).data
start = time.time()
assert len(mcapps) == 1, "Cannot find multi cluster app"
mapp = mcapps[0]
while mapp.state != "active":
if time.time() - start > timeout:
raise AssertionError(
"Timed out waiting for state to get to active")
time.sleep(.5)
multiclusterapps = client.list_multiClusterApp(
uuid=multiClusterApp.uuid, name=multiClusterApp.name).data
assert len(multiclusterapps) == 1
mapp = multiclusterapps[0]
return mapp
def wait_for_app_to_active(client, app_id,
timeout=DEFAULT_MULTI_CLUSTER_APP_TIMEOUT):
app_data = client.list_app(name=app_id).data
start = time.time()
assert len(app_data) == 1, "Cannot find app"
application = app_data[0]
while application.state != "active":
if time.time() - start > timeout:
raise AssertionError(
"Timed out waiting for state to get to active")
time.sleep(.5)
app = client.list_app(name=app_id).data
assert len(app) == 1
application = app[0]
return application
def validate_response_app_endpoint(p_client, appId):
ingress_list = p_client.list_ingress(namespaceId=appId).data
assert len(ingress_list) == 1
ingress = ingress_list[0]
if hasattr(ingress, 'publicEndpoints'):
for public_endpoint in ingress.publicEndpoints:
url = \
public_endpoint["protocol"].lower() + "://" + \
public_endpoint["hostname"]
print(url)
try:
r = requests.head(url)
assert r.status_code == 200, \
"Http response is not 200. Failed to launch the app"
except requests.ConnectionError:
print("failed to connect")
assert False, "failed to connect to the app"<|fim▁end|> | if time.time() - start > timeout:
raise AssertionError( |
<|file_name|>ExampleGuiceServletModule.java<|end_file_name|><|fim▁begin|>package org.vaadin.addons.guice.server;
import org.vaadin.addons.guice.servlet.VGuiceApplicationServlet;
import com.google.inject.servlet.ServletModule;
/**
*
* @author Will Temperley
*
*/
public class ExampleGuiceServletModule extends ServletModule {<|fim▁hole|> @Override
protected void configureServlets() {
serve("/*").with(VGuiceApplicationServlet.class);
}
}<|fim▁end|> | |
<|file_name|>cast.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Unsafe casting functions
use mem;
use intrinsics;
use ptr::copy_nonoverlapping_memory;
/**
* Transform a value of one type into a value of another type.
* Both types must have the same size and alignment.
*
* # Example
*
* ```rust
* use std::cast;
*
* let v: &[u8] = unsafe { cast::transmute("L") };
* assert!(v == [76u8]);
* ```
*/
#[inline]
pub unsafe fn transmute<T, U>(thing: T) -> U {
intrinsics::transmute(thing)
}
/**
* Move a thing into the void
*
* The forget function will take ownership of the provided value but neglect
* to run any required cleanup or memory-management operations on it.<|fim▁hole|>pub unsafe fn forget<T>(thing: T) { intrinsics::forget(thing); }
/// Casts the value at `src` to U. The two types must have the same length.
#[inline]
pub unsafe fn transmute_copy<T, U>(src: &T) -> U {
let mut dest: U = mem::uninit();
let dest_ptr: *mut u8 = transmute(&mut dest);
let src_ptr: *u8 = transmute(src);
copy_nonoverlapping_memory(dest_ptr, src_ptr, mem::size_of::<U>());
dest
}
/// Coerce an immutable reference to be mutable.
#[inline]
#[deprecated="casting &T to &mut T is undefined behaviour: use Cell<T>, RefCell<T> or Unsafe<T>"]
pub unsafe fn transmute_mut<'a,T>(ptr: &'a T) -> &'a mut T { transmute(ptr) }
/// Coerce a reference to have an arbitrary associated lifetime.
#[inline]
pub unsafe fn transmute_lifetime<'a,'b,T>(ptr: &'a T) -> &'b T {
transmute(ptr)
}
/// Coerce an immutable reference to be mutable.
#[inline]
pub unsafe fn transmute_mut_unsafe<T>(ptr: *T) -> *mut T {
transmute(ptr)
}
/// Coerce a mutable reference to have an arbitrary associated lifetime.
#[inline]
pub unsafe fn transmute_mut_lifetime<'a,'b,T>(ptr: &'a mut T) -> &'b mut T {
transmute(ptr)
}
/// Transforms lifetime of the second pointer to match the first.
#[inline]
pub unsafe fn copy_lifetime<'a,S,T>(_ptr: &'a S, ptr: &T) -> &'a T {
transmute_lifetime(ptr)
}
/// Transforms lifetime of the second pointer to match the first.
#[inline]
pub unsafe fn copy_mut_lifetime<'a,S,T>(_ptr: &'a mut S, ptr: &mut T) -> &'a mut T {
transmute_mut_lifetime(ptr)
}
/// Transforms lifetime of the second pointer to match the first.
#[inline]
pub unsafe fn copy_lifetime_vec<'a,S,T>(_ptr: &'a [S], ptr: &T) -> &'a T {
transmute_lifetime(ptr)
}
/****************************************************************************
* Tests
****************************************************************************/
#[cfg(test)]
mod tests {
use cast::transmute;
use raw;
use realstd::str::StrAllocating;
#[test]
fn test_transmute_copy() {
assert_eq!(1u, unsafe { ::cast::transmute_copy(&1) });
}
#[test]
fn test_transmute() {
unsafe {
let x = @100u8;
let x: *raw::Box<u8> = transmute(x);
assert!((*x).data == 100);
let _x: @int = transmute(x);
}
}
#[test]
fn test_transmute2() {
unsafe {
assert_eq!(box [76u8], transmute("L".to_owned()));
}
}
}<|fim▁end|> | */
#[inline] |
<|file_name|>Board.cpp<|end_file_name|><|fim▁begin|>#include "Board.h"
#include <iostream>
using namespace std;
void Board::enumExts( )
{
if(ptester->hasUnboundedHorizAttacks())
{
for( int i = 1; i <= nrows; i++ )
if( PiecesInRow[i] > 1 )
{
cout << 0 << endl;
return;
}
}
_enumExts();
}
void Board::_enumExts( )
{
Place P(1,1);
if( n() != 0 )
{
P = top();
if( PiecesInOrBelowRow[P.row] == n() )
{
P.row++;
P.col = 1;
}
}
while( P.col <= ncols )
{
if( ! IsAttacked(P) )
{
push(P);
if( n() == npieces )
report( );
else
_enumExts( );
pop();
}
P.col++;
}
if( n() == 0 )
cout << sol_count << endl;
}
void Board::print( )
{
int row, col;
for( row = nrows; row > 0; row-- )
{
for( col = 1; col <= ncols; col++)
// if(in(row,col)) cout << '*'; else cout << '0';
if(in(row,col)) cout << '#'; else cout << 'O';
cout << endl;
}
cout << endl;
}
void Board::report( )
{
sol_count++;
if( show_boards )
{
cout << "Board " << sol_count << ':' << endl;
print( );
}
}
#include "AttackTester.h"
static void useage()
{
cerr << "Options:\n\
--show-boards\n\
--pieces-per-row n1,n2,n3,...nr [default 1,1,1,...]\n\
--nrows m [default 4]\n\
--ncols n [default 4]\n\
--piece queen [default]\n\<|fim▁hole|> exit( 1 );
}
int main(int argc, char *argv[])
{
int nrows=4;
int ncols=4;
bool show_boards = false;
int A[Board::Maxrows];
bool piecesPerRowGiven = false;
AttackTester *ptester = getp("queen");
argc--;
argv++;
while( argc-- )
{
if(!strcmp(*argv,"--show-boards"))
{
show_boards = true;
argv++;
continue;
}
if(!strcmp(*argv,"--pieces-per-row"))
{
char *p = *(++argv);
int i;
for(i = 0; (i < Board::Maxrows) && *p!='\0'; i++ )
{
A[i] = strtol(p,&p,0);
if( *p == ',' ) p++; else
if( *p != '\0' ) useage();
}
for( ; i < Board::Maxrows; i++ ) A[i] = 1;
piecesPerRowGiven = true;
argv++; argc--;
continue;
}
if(!strcmp(*argv,"--nrows"))
{
argv++; argc--;
nrows=strtol(*(argv++),NULL,0);
continue;
}
if(!strcmp(*argv,"--ncols"))
{
argv++; argc--;
ncols=strtol(*(argv++),NULL,0);
continue;
}
if(!strcmp(*argv,"--piece"))
{
argv++; argc--;
ptester = getp(*(argv++));
if( !ptester )
{
cerr << "Unimplemented Piece:" << *(argv - 1) << endl;
exit ( 1 );
}
continue;
}
}
if(piecesPerRowGiven)
{
Board myBoard(nrows, ncols, ptester, show_boards, A);
myBoard.enumExts( );
}
else
{
Board myBoard(nrows, ncols, ptester, show_boards, NULL );
myBoard.enumExts( );
}
return 0;
}<|fim▁end|> | --piece king\n\
--piece rook or others added" << endl; |
<|file_name|>efflist.ts<|end_file_name|><|fim▁begin|>class effWord {
id: number;
word: string;
}
function getWords(): Array<effWord> {
return [
{ id: 11111, word: "abacus" },
{ id: 11112, word: "abdomen" },
{ id: 11113, word: "abdominal" },
{ id: 11114, word: "abide" },
{ id: 11115, word: "abiding" },
{ id: 11116, word: "ability" },
{ id: 11121, word: "ablaze" },
{ id: 11123, word: "abnormal" },
{ id: 11124, word: "abrasion" },
{ id: 11125, word: "abrasive" },
{ id: 11126, word: "abreast" },
{ id: 11131, word: "abridge" },
{ id: 11132, word: "abroad" },
{ id: 11133, word: "abruptly" },
{ id: 11134, word: "absence" },
{ id: 11135, word: "absentee" },
{ id: 11136, word: "absently" },
{ id: 11141, word: "absinthe" },
{ id: 11142, word: "absolute" },
{ id: 11143, word: "absolve" },
{ id: 11144, word: "abstain" },
{ id: 11145, word: "abstract" },
{ id: 11146, word: "absurd" },
{ id: 11151, word: "accent" },
{ id: 11152, word: "acclaim" },
{ id: 11153, word: "acclimate" },
{ id: 11154, word: "accompany" },
{ id: 11155, word: "account" },
{ id: 11156, word: "accuracy" },
{ id: 11161, word: "accurate" },
{ id: 11162, word: "accustom" },
{ id: 11163, word: "acetone" },
{ id: 11164, word: "achiness" },
{ id: 11165, word: "aching" },
{ id: 11166, word: "acid" },
{ id: 11211, word: "acorn" },
{ id: 11212, word: "acquaint" },
{ id: 11213, word: "acquire" },
{ id: 11214, word: "acre" },
{ id: 11215, word: "acrobat" },
{ id: 11216, word: "acronym" },
{ id: 11221, word: "acting" },
{ id: 11222, word: "action" },
{ id: 11223, word: "activate" },
{ id: 11224, word: "activator" },
{ id: 11225, word: "active" },
{ id: 11226, word: "activism" },
{ id: 11231, word: "activist" },
{ id: 11232, word: "activity" },
{ id: 11233, word: "actress" },
{ id: 11234, word: "acts" },
{ id: 11235, word: "acutely" },
{ id: 11236, word: "acuteness" },
{ id: 11241, word: "aeration" },
{ id: 11242, word: "aerobics" },
{ id: 11243, word: "aerosol" },
{ id: 11244, word: "aerospace" },
{ id: 11245, word: "afar" },
{ id: 11246, word: "affair" },
{ id: 11251, word: "affected" },
{ id: 11252, word: "affecting" },
{ id: 11253, word: "affection" },
{ id: 11254, word: "affidavit" },
{ id: 11255, word: "affiliate" },
{ id: 11256, word: "affirm" },
{ id: 11261, word: "affix" },
{ id: 11262, word: "afflicted" },
{ id: 11263, word: "affluent" },
{ id: 11264, word: "afford" },
{ id: 11265, word: "affront" },
{ id: 11266, word: "aflame" },
{ id: 11311, word: "afloat" },
{ id: 11312, word: "aflutter" },
{ id: 11313, word: "afoot" },
{ id: 11314, word: "afraid" },
{ id: 11315, word: "afterglow" },
{ id: 11316, word: "afterlife" },
{ id: 11321, word: "aftermath" },
{ id: 11322, word: "aftermost" },
{ id: 11323, word: "afternoon" },
{ id: 11324, word: "aged" },
{ id: 11325, word: "ageless" },
{ id: 11326, word: "agency" },
{ id: 11331, word: "agenda" },
{ id: 11332, word: "agent" },
{ id: 11333, word: "aggregate" },
{ id: 11334, word: "aghast" },
{ id: 11335, word: "agile" },
{ id: 11336, word: "agility" },
{ id: 11341, word: "aging" },
{ id: 11342, word: "agnostic" },
{ id: 11343, word: "agonize" },
{ id: 11344, word: "agonizing" },
{ id: 11345, word: "agony" },
{ id: 11346, word: "agreeable" },
{ id: 11351, word: "agreeably" },
{ id: 11352, word: "agreed" },
{ id: 11353, word: "agreeing" },
{ id: 11354, word: "agreement" },
{ id: 11355, word: "aground" },
{ id: 11356, word: "ahead" },
{ id: 11361, word: "ahoy" },
{ id: 11362, word: "aide" },
{ id: 11363, word: "aids" },
{ id: 11364, word: "aim" },
{ id: 11365, word: "ajar" },
{ id: 11366, word: "alabaster" },
{ id: 11411, word: "alarm" },
{ id: 11412, word: "albatross" },
{ id: 11413, word: "album" },
{ id: 11414, word: "alfalfa" },
{ id: 11415, word: "algebra" },
{ id: 11416, word: "algorithm" },
{ id: 11421, word: "alias" },
{ id: 11422, word: "alibi" },
{ id: 11423, word: "alienable" },
{ id: 11424, word: "alienate" },
{ id: 11425, word: "aliens" },
{ id: 11426, word: "alike" },
{ id: 11431, word: "alive" },
{ id: 11432, word: "alkaline" },
{ id: 11433, word: "alkalize" },
{ id: 11434, word: "almanac" },
{ id: 11435, word: "almighty" },
{ id: 11436, word: "almost" },
{ id: 11441, word: "aloe" },
{ id: 11442, word: "aloft" },
{ id: 11443, word: "aloha" },
{ id: 11444, word: "alone" },
{ id: 11445, word: "alongside" },
{ id: 11446, word: "aloof" },
{ id: 11451, word: "alphabet" },
{ id: 11452, word: "alright" },
{ id: 11453, word: "although" },
{ id: 11454, word: "altitude" },
{ id: 11455, word: "alto" },
{ id: 11456, word: "aluminum" },
{ id: 11461, word: "alumni" },
{ id: 11462, word: "always" },
{ id: 11463, word: "amaretto" },
{ id: 11464, word: "amaze" },
{ id: 11465, word: "amazingly" },
{ id: 11466, word: "amber" },
{ id: 11511, word: "ambiance" },
{ id: 11512, word: "ambiguity" },
{ id: 11513, word: "ambiguous" },
{ id: 11514, word: "ambition" },
{ id: 11515, word: "ambitious" },
{ id: 11516, word: "ambulance" },
{ id: 11521, word: "ambush" },
{ id: 11522, word: "amendable" },
{ id: 11523, word: "amendment" },
{ id: 11524, word: "amends" },
{ id: 11525, word: "amenity" },
{ id: 11526, word: "amiable" },
{ id: 11531, word: "amicably" },
{ id: 11532, word: "amid" },
{ id: 11533, word: "amigo" },
{ id: 11534, word: "amino" },
{ id: 11535, word: "amiss" },
{ id: 11536, word: "ammonia" },
{ id: 11541, word: "ammonium" },
{ id: 11542, word: "amnesty" },
{ id: 11543, word: "amniotic" },
{ id: 11544, word: "among" },
{ id: 11545, word: "amount" },
{ id: 11546, word: "amperage" },
{ id: 11551, word: "ample" },
{ id: 11552, word: "amplifier" },
{ id: 11553, word: "amplify" },
{ id: 11554, word: "amply" },
{ id: 11555, word: "amuck" },
{ id: 11556, word: "amulet" },
{ id: 11561, word: "amusable" },
{ id: 11562, word: "amused" },
{ id: 11563, word: "amusement" },
{ id: 11564, word: "amuser" },
{ id: 11565, word: "amusing" },
{ id: 11566, word: "anaconda" },
{ id: 11611, word: "anaerobic" },
{ id: 11612, word: "anagram" },
{ id: 11613, word: "anatomist" },
{ id: 11614, word: "anatomy" },
{ id: 11615, word: "anchor" },
{ id: 11616, word: "anchovy" },
{ id: 11621, word: "ancient" },
{ id: 11622, word: "android" },
{ id: 11623, word: "anemia" },
{ id: 11624, word: "anemic" },
{ id: 11625, word: "aneurism" },
{ id: 11626, word: "anew" },
{ id: 11631, word: "angelfish" },
{ id: 11632, word: "angelic" },
{ id: 11633, word: "anger" },
{ id: 11634, word: "angled" },
{ id: 11635, word: "angler" },
{ id: 11636, word: "angles" },
{ id: 11641, word: "angling" },
{ id: 11642, word: "angrily" },
{ id: 11643, word: "angriness" },
{ id: 11644, word: "anguished" },
{ id: 11645, word: "angular" },
{ id: 11646, word: "animal" },
{ id: 11651, word: "animate" },
{ id: 11652, word: "animating" },
{ id: 11653, word: "animation" },
{ id: 11654, word: "animator" },
{ id: 11655, word: "anime" },
{ id: 11656, word: "animosity" },
{ id: 11661, word: "ankle" },
{ id: 11662, word: "annex" },
{ id: 11663, word: "annotate" },
{ id: 11664, word: "announcer" },
{ id: 11665, word: "annoying" },
{ id: 11666, word: "annually" },
{ id: 12111, word: "annuity" },
{ id: 12112, word: "anointer" },
{ id: 12113, word: "another" },
{ id: 12114, word: "answering" },
{ id: 12115, word: "antacid" },
{ id: 12116, word: "antarctic" },
{ id: 12121, word: "anteater" },
{ id: 12122, word: "antelope" },
{ id: 12123, word: "antennae" },
{ id: 12124, word: "anthem" },
{ id: 12125, word: "anthill" },
{ id: 12126, word: "anthology" },
{ id: 12131, word: "antibody" },
{ id: 12132, word: "antics" },
{ id: 12133, word: "antidote" },
{ id: 12134, word: "antihero" },
{ id: 12135, word: "antiquely" },
{ id: 12136, word: "antiques" },
{ id: 12141, word: "antiquity" },
{ id: 12142, word: "antirust" },
{ id: 12143, word: "antitoxic" },
{ id: 12144, word: "antitrust" },
{ id: 12145, word: "antiviral" },
{ id: 12146, word: "antivirus" },
{ id: 12151, word: "antler" },
{ id: 12152, word: "antonym" },
{ id: 12153, word: "antsy" },
{ id: 12154, word: "anvil" },
{ id: 12155, word: "anybody" },
{ id: 12156, word: "anyhow" },
{ id: 12161, word: "anymore" },
{ id: 12162, word: "anyone" },
{ id: 12163, word: "anyplace" },
{ id: 12164, word: "anything" },
{ id: 12165, word: "anytime" },
{ id: 12166, word: "anyway" },
{ id: 12211, word: "anywhere" },
{ id: 12212, word: "aorta" },
{ id: 12213, word: "apache" },
{ id: 12214, word: "apostle" },
{ id: 12215, word: "appealing" },
{ id: 12216, word: "appear" },
{ id: 12221, word: "appease" },
{ id: 12222, word: "appeasing" },
{ id: 12223, word: "appendage" },
{ id: 12224, word: "appendix" },
{ id: 12225, word: "appetite" },
{ id: 12226, word: "appetizer" },
{ id: 12231, word: "applaud" },
{ id: 12232, word: "applause" },
{ id: 12233, word: "apple" },
{ id: 12234, word: "appliance" },
{ id: 12235, word: "applicant" },
{ id: 12236, word: "applied" },
{ id: 12241, word: "apply" },
{ id: 12242, word: "appointee" },
{ id: 12243, word: "appraisal" },
{ id: 12244, word: "appraiser" },
{ id: 12245, word: "apprehend" },
{ id: 12246, word: "approach" },
{ id: 12251, word: "approval" },
{ id: 12252, word: "approve" },
{ id: 12253, word: "apricot" },
{ id: 12254, word: "april" },
{ id: 12255, word: "apron" },
{ id: 12256, word: "aptitude" },
{ id: 12261, word: "aptly" },
{ id: 12262, word: "aqua" },
{ id: 12263, word: "aqueduct" },
{ id: 12264, word: "arbitrary" },
{ id: 12265, word: "arbitrate" },
{ id: 12266, word: "ardently" },
{ id: 12311, word: "area" },
{ id: 12312, word: "arena" },
{ id: 12313, word: "arguable" },
{ id: 12314, word: "arguably" },
{ id: 12315, word: "argue" },
{ id: 12316, word: "arise" },
{ id: 12321, word: "armadillo" },
{ id: 12322, word: "armband" },
{ id: 12323, word: "armchair" },
{ id: 12324, word: "armed" },
{ id: 12325, word: "armful" },
{ id: 12326, word: "armhole" },
{ id: 12331, word: "arming" },
{ id: 12332, word: "armless" },
{ id: 12333, word: "armoire" },
{ id: 12334, word: "armored" },
{ id: 12335, word: "armory" },
{ id: 12336, word: "armrest" },
{ id: 12341, word: "army" },
{ id: 12342, word: "aroma" },
{ id: 12343, word: "arose" },
{ id: 12344, word: "around" },
{ id: 12345, word: "arousal" },
{ id: 12346, word: "arrange" },
{ id: 12351, word: "array" },
{ id: 12352, word: "arrest" },
{ id: 12353, word: "arrival" },
{ id: 12354, word: "arrive" },
{ id: 12355, word: "arrogance" },
{ id: 12356, word: "arrogant" },
{ id: 12361, word: "arson" },
{ id: 12362, word: "art" },
{ id: 12363, word: "ascend" },
{ id: 12364, word: "ascension" },
{ id: 12365, word: "ascent" },
{ id: 12366, word: "ascertain" },
{ id: 12411, word: "ashamed" },
{ id: 12412, word: "ashen" },
{ id: 12413, word: "ashes" },
{ id: 12414, word: "ashy" },
{ id: 12415, word: "aside" },
{ id: 12416, word: "askew" },
{ id: 12421, word: "asleep" },
{ id: 12422, word: "asparagus" },
{ id: 12423, word: "aspect" },
{ id: 12424, word: "aspirate" },
{ id: 12425, word: "aspire" },
{ id: 12426, word: "aspirin" },
{ id: 12431, word: "astonish" },
{ id: 12432, word: "astound" },
{ id: 12433, word: "astride" },
{ id: 12434, word: "astrology" },
{ id: 12435, word: "astronaut" },
{ id: 12436, word: "astronomy" },
{ id: 12441, word: "astute" },
{ id: 12442, word: "atlantic" },
{ id: 12443, word: "atlas" },
{ id: 12444, word: "atom" },
{ id: 12445, word: "atonable" },
{ id: 12446, word: "atop" },
{ id: 12451, word: "atrium" },
{ id: 12452, word: "atrocious" },
{ id: 12453, word: "atrophy" },
{ id: 12454, word: "attach" },
{ id: 12455, word: "attain" },
{ id: 12456, word: "attempt" },
{ id: 12461, word: "attendant" },
{ id: 12462, word: "attendee" },
{ id: 12463, word: "attention" },
{ id: 12464, word: "attentive" },
{ id: 12465, word: "attest" },
{ id: 12466, word: "attic" },
{ id: 12511, word: "attire" },
{ id: 12512, word: "attitude" },
{ id: 12513, word: "attractor" },
{ id: 12514, word: "attribute" },
{ id: 12515, word: "atypical" },
{ id: 12516, word: "auction" },
{ id: 12521, word: "audacious" },
{ id: 12522, word: "audacity" },
{ id: 12523, word: "audible" },
{ id: 12524, word: "audibly" },
{ id: 12525, word: "audience" },
{ id: 12526, word: "audio" },
{ id: 12531, word: "audition" },
{ id: 12532, word: "augmented" },
{ id: 12533, word: "august" },
{ id: 12534, word: "authentic" },
{ id: 12535, word: "author" },
{ id: 12536, word: "autism" },
{ id: 12541, word: "autistic" },
{ id: 12542, word: "autograph" },
{ id: 12543, word: "automaker" },
{ id: 12544, word: "automated" },
{ id: 12545, word: "automatic" },
{ id: 12546, word: "autopilot" },
{ id: 12551, word: "available" },
{ id: 12552, word: "avalanche" },
{ id: 12553, word: "avatar" },
{ id: 12554, word: "avenge" },
{ id: 12555, word: "avenging" },
{ id: 12556, word: "avenue" },
{ id: 12561, word: "average" },
{ id: 12562, word: "aversion" },
{ id: 12563, word: "avert" },
{ id: 12564, word: "aviation" },
{ id: 12565, word: "aviator" },
{ id: 12566, word: "avid" },
{ id: 12611, word: "avoid" },
{ id: 12612, word: "await" },
{ id: 12613, word: "awaken" },
{ id: 12614, word: "award" },
{ id: 12615, word: "aware" },
{ id: 12616, word: "awhile" },
{ id: 12621, word: "awkward" },
{ id: 12622, word: "awning" },
{ id: 12623, word: "awoke" },
{ id: 12624, word: "awry" },
{ id: 12625, word: "axis" },
{ id: 12626, word: "babble" },
{ id: 12631, word: "babbling" },
{ id: 12632, word: "babied" },
{ id: 12633, word: "baboon" },
{ id: 12634, word: "backache" },
{ id: 12635, word: "backboard" },
{ id: 12636, word: "backboned" },
{ id: 12641, word: "backdrop" },
{ id: 12642, word: "backed" },
{ id: 12643, word: "backer" },
{ id: 12644, word: "backfield" },
{ id: 12645, word: "backfire" },
{ id: 12646, word: "backhand" },
{ id: 12651, word: "backing" },
{ id: 12652, word: "backlands" },
{ id: 12653, word: "backlash" },
{ id: 12654, word: "backless" },
{ id: 12655, word: "backlight" },
{ id: 12656, word: "backlit" },
{ id: 12661, word: "backlog" },
{ id: 12662, word: "backpack" },
{ id: 12663, word: "backpedal" },
{ id: 12664, word: "backrest" },
{ id: 12665, word: "backroom" },
{ id: 12666, word: "backshift" },
{ id: 13111, word: "backside" },
{ id: 13112, word: "backslid" },
{ id: 13113, word: "backspace" },
{ id: 13114, word: "backspin" },
{ id: 13115, word: "backstab" },
{ id: 13116, word: "backstage" },
{ id: 13121, word: "backtalk" },
{ id: 13122, word: "backtrack" },
{ id: 13123, word: "backup" },
{ id: 13124, word: "backward" },
{ id: 13125, word: "backwash" },
{ id: 13126, word: "backwater" },
{ id: 13131, word: "backyard" },
{ id: 13132, word: "bacon" },
{ id: 13133, word: "bacteria" },
{ id: 13134, word: "bacterium" },
{ id: 13135, word: "badass" },
{ id: 13136, word: "badge" },
{ id: 13141, word: "badland" },
{ id: 13142, word: "badly" },
{ id: 13143, word: "badness" },
{ id: 13144, word: "baffle" },
{ id: 13145, word: "baffling" },
{ id: 13146, word: "bagel" },
{ id: 13151, word: "bagful" },
{ id: 13152, word: "baggage" },
{ id: 13153, word: "bagged" },
{ id: 13154, word: "baggie" },
{ id: 13155, word: "bagginess" },
{ id: 13156, word: "bagging" },
{ id: 13161, word: "baggy" },
{ id: 13162, word: "bagpipe" },
{ id: 13163, word: "baguette" },
{ id: 13164, word: "baked" },
{ id: 13165, word: "bakery" },
{ id: 13166, word: "bakeshop" },
{ id: 13211, word: "baking" },
{ id: 13212, word: "balance" },
{ id: 13213, word: "balancing" },
{ id: 13214, word: "balcony" },
{ id: 13215, word: "balmy" },
{ id: 13216, word: "balsamic" },
{ id: 13221, word: "bamboo" },
{ id: 13222, word: "banana" },
{ id: 13223, word: "banish" },
{ id: 13224, word: "banister" },
{ id: 13225, word: "banjo" },
{ id: 13226, word: "bankable" },
{ id: 13231, word: "bankbook" },
{ id: 13232, word: "banked" },
{ id: 13233, word: "banker" },
{ id: 13234, word: "banking" },
{ id: 13235, word: "banknote" },
{ id: 13236, word: "bankroll" },
{ id: 13241, word: "banner" },
{ id: 13242, word: "bannister" },
{ id: 13243, word: "banshee" },
{ id: 13244, word: "banter" },
{ id: 13245, word: "barbecue" },
{ id: 13246, word: "barbed" },
{ id: 13251, word: "barbell" },
{ id: 13252, word: "barber" },
{ id: 13253, word: "barcode" },
{ id: 13254, word: "barge" },
{ id: 13255, word: "bargraph" },
{ id: 13256, word: "barista" },
{ id: 13261, word: "baritone" },
{ id: 13262, word: "barley" },
{ id: 13263, word: "barmaid" },
{ id: 13264, word: "barman" },
{ id: 13265, word: "barn" },
{ id: 13266, word: "barometer" },
{ id: 13311, word: "barrack" },
{ id: 13312, word: "barracuda" },
{ id: 13313, word: "barrel" },
{ id: 13314, word: "barrette" },
{ id: 13315, word: "barricade" },
{ id: 13316, word: "barrier" },
{ id: 13321, word: "barstool" },
{ id: 13322, word: "bartender" },
{ id: 13323, word: "barterer" },
{ id: 13324, word: "bash" },
{ id: 13325, word: "basically" },
{ id: 13326, word: "basics" },
{ id: 13331, word: "basil" },
{ id: 13332, word: "basin" },
{ id: 13333, word: "basis" },
{ id: 13334, word: "basket" },
{ id: 13335, word: "batboy" },
{ id: 13336, word: "batch" },
{ id: 13341, word: "bath" },
{ id: 13342, word: "baton" },
{ id: 13343, word: "bats" },
{ id: 13344, word: "battalion" },
{ id: 13345, word: "battered" },
{ id: 13346, word: "battering" },
{ id: 13351, word: "battery" },
{ id: 13352, word: "batting" },
{ id: 13353, word: "battle" },
{ id: 13354, word: "bauble" },
{ id: 13355, word: "bazooka" },
{ id: 13356, word: "blabber" },
{ id: 13361, word: "bladder" },
{ id: 13362, word: "blade" },
{ id: 13363, word: "blah" },
{ id: 13364, word: "blame" },
{ id: 13365, word: "blaming" },
{ id: 13366, word: "blanching" },
{ id: 13411, word: "blandness" },
{ id: 13412, word: "blank" },
{ id: 13413, word: "blaspheme" },
{ id: 13414, word: "blasphemy" },
{ id: 13415, word: "blast" },
{ id: 13416, word: "blatancy" },
{ id: 13421, word: "blatantly" },
{ id: 13422, word: "blazer" },
{ id: 13423, word: "blazing" },
{ id: 13424, word: "bleach" },
{ id: 13425, word: "bleak" },
{ id: 13426, word: "bleep" },
{ id: 13431, word: "blemish" },
{ id: 13432, word: "blend" },
{ id: 13433, word: "bless" },
{ id: 13434, word: "blighted" },
{ id: 13435, word: "blimp" },
{ id: 13436, word: "bling" },
{ id: 13441, word: "blinked" },
{ id: 13442, word: "blinker" },
{ id: 13443, word: "blinking" },
{ id: 13444, word: "blinks" },
{ id: 13445, word: "blip" },
{ id: 13446, word: "blissful" },
{ id: 13451, word: "blitz" },
{ id: 13452, word: "blizzard" },
{ id: 13453, word: "bloated" },
{ id: 13454, word: "bloating" },
{ id: 13455, word: "blob" },
{ id: 13456, word: "blog" },
{ id: 13461, word: "bloomers" },
{ id: 13462, word: "blooming" },
{ id: 13463, word: "blooper" },
{ id: 13464, word: "blot" },
{ id: 13465, word: "blouse" },
{ id: 13466, word: "blubber" },
{ id: 13511, word: "bluff" },
{ id: 13512, word: "bluish" },
{ id: 13513, word: "blunderer" },
{ id: 13514, word: "blunt" },
{ id: 13515, word: "blurb" },
{ id: 13516, word: "blurred" },
{ id: 13521, word: "blurry" },
{ id: 13522, word: "blurt" },
{ id: 13523, word: "blush" },
{ id: 13524, word: "blustery" },
{ id: 13525, word: "boaster" },
{ id: 13526, word: "boastful" },
{ id: 13531, word: "boasting" },
{ id: 13532, word: "boat" },
{ id: 13533, word: "bobbed" },
{ id: 13534, word: "bobbing" },
{ id: 13535, word: "bobble" },
{ id: 13536, word: "bobcat" },
{ id: 13541, word: "bobsled" },
{ id: 13542, word: "bobtail" },
{ id: 13543, word: "bodacious" },
{ id: 13544, word: "body" },
{ id: 13545, word: "bogged" },
{ id: 13546, word: "boggle" },
{ id: 13551, word: "bogus" },
{ id: 13552, word: "boil" },
{ id: 13553, word: "bok" },
{ id: 13554, word: "bolster" },
{ id: 13555, word: "bolt" },
{ id: 13556, word: "bonanza" },
{ id: 13561, word: "bonded" },
{ id: 13562, word: "bonding" },
{ id: 13563, word: "bondless" },
{ id: 13564, word: "boned" },
{ id: 13565, word: "bonehead" },
{ id: 13566, word: "boneless" },
{ id: 13611, word: "bonelike" },
{ id: 13612, word: "boney" },
{ id: 13613, word: "bonfire" },
{ id: 13614, word: "bonnet" },
{ id: 13615, word: "bonsai" },
{ id: 13616, word: "bonus" },
{ id: 13621, word: "bony" },
{ id: 13622, word: "boogeyman" },
{ id: 13623, word: "boogieman" },
{ id: 13624, word: "book" },
{ id: 13625, word: "boondocks" },
{ id: 13626, word: "booted" },
{ id: 13631, word: "booth" },
{ id: 13632, word: "bootie" },
{ id: 13633, word: "booting" },
{ id: 13634, word: "bootlace" },
{ id: 13635, word: "bootleg" },
{ id: 13636, word: "boots" },
{ id: 13641, word: "boozy" },
{ id: 13642, word: "borax" },
{ id: 13643, word: "boring" },
{ id: 13644, word: "borough" },
{ id: 13645, word: "borrower" },
{ id: 13646, word: "borrowing" },
{ id: 13651, word: "boss" },
{ id: 13652, word: "botanical" },
{ id: 13653, word: "botanist" },
{ id: 13654, word: "botany" },
{ id: 13655, word: "botch" },
{ id: 13656, word: "both" },
{ id: 13661, word: "bottle" },
{ id: 13662, word: "bottling" },
{ id: 13663, word: "bottom" },
{ id: 13664, word: "bounce" },
{ id: 13665, word: "bouncing" },
{ id: 13666, word: "bouncy" },
{ id: 14111, word: "bounding" },
{ id: 14112, word: "boundless" },
{ id: 14113, word: "bountiful" },
{ id: 14114, word: "bovine" },
{ id: 14115, word: "boxcar" },
{ id: 14116, word: "boxer" },
{ id: 14121, word: "boxing" },
{ id: 14122, word: "boxlike" },
{ id: 14123, word: "boxy" },
{ id: 14124, word: "breach" },
{ id: 14125, word: "breath" },
{ id: 14126, word: "breeches" },
{ id: 14131, word: "breeching" },
{ id: 14132, word: "breeder" },
{ id: 14133, word: "breeding" },
{ id: 14134, word: "breeze" },
{ id: 14135, word: "breezy" },
{ id: 14136, word: "brethren" },
{ id: 14141, word: "brewery" },
{ id: 14142, word: "brewing" },
{ id: 14143, word: "briar" },
{ id: 14144, word: "bribe" },
{ id: 14145, word: "brick" },
{ id: 14146, word: "bride" },
{ id: 14151, word: "bridged" },
{ id: 14152, word: "brigade" },
{ id: 14153, word: "bright" },
{ id: 14154, word: "brilliant" },
{ id: 14155, word: "brim" },
{ id: 14156, word: "bring" },
{ id: 14161, word: "brink" },
{ id: 14162, word: "brisket" },
{ id: 14163, word: "briskly" },
{ id: 14164, word: "briskness" },
{ id: 14165, word: "bristle" },
{ id: 14166, word: "brittle" },
{ id: 14211, word: "broadband" },
{ id: 14212, word: "broadcast" },
{ id: 14213, word: "broaden" },
{ id: 14214, word: "broadly" },
{ id: 14215, word: "broadness" },
{ id: 14216, word: "broadside" },
{ id: 14221, word: "broadways" },
{ id: 14222, word: "broiler" },
{ id: 14223, word: "broiling" },
{ id: 14224, word: "broken" },
{ id: 14225, word: "broker" },
{ id: 14226, word: "bronchial" },
{ id: 14231, word: "bronco" },
{ id: 14232, word: "bronze" },
{ id: 14233, word: "bronzing" },
{ id: 14234, word: "brook" },
{ id: 14235, word: "broom" },
{ id: 14236, word: "brought" },
{ id: 14241, word: "browbeat" },
{ id: 14242, word: "brownnose" },
{ id: 14243, word: "browse" },
{ id: 14244, word: "browsing" },
{ id: 14245, word: "bruising" },
{ id: 14246, word: "brunch" },
{ id: 14251, word: "brunette" },
{ id: 14252, word: "brunt" },
{ id: 14253, word: "brush" },
{ id: 14254, word: "brussels" },
{ id: 14255, word: "brute" },
{ id: 14256, word: "brutishly" },
{ id: 14261, word: "bubble" },
{ id: 14262, word: "bubbling" },
{ id: 14263, word: "bubbly" },
{ id: 14264, word: "buccaneer" },
{ id: 14265, word: "bucked" },
{ id: 14266, word: "bucket" },
{ id: 14311, word: "buckle" },
{ id: 14312, word: "buckshot" },
{ id: 14313, word: "buckskin" },
{ id: 14314, word: "bucktooth" },
{ id: 14315, word: "buckwheat" },
{ id: 14316, word: "buddhism" },
{ id: 14321, word: "buddhist" },
{ id: 14322, word: "budding" },
{ id: 14323, word: "buddy" },
{ id: 14324, word: "budget" },
{ id: 14325, word: "buffalo" },
{ id: 14326, word: "buffed" },
{ id: 14331, word: "buffer" },
{ id: 14332, word: "buffing" },
{ id: 14333, word: "buffoon" },
{ id: 14334, word: "buggy" },
{ id: 14335, word: "bulb" },
{ id: 14336, word: "bulge" },
{ id: 14341, word: "bulginess" },
{ id: 14342, word: "bulgur" },
{ id: 14343, word: "bulk" },
{ id: 14344, word: "bulldog" },
{ id: 14345, word: "bulldozer" },
{ id: 14346, word: "bullfight" },
{ id: 14351, word: "bullfrog" },
{ id: 14352, word: "bullhorn" },
{ id: 14353, word: "bullion" },
{ id: 14354, word: "bullish" },
{ id: 14355, word: "bullpen" },
{ id: 14356, word: "bullring" },
{ id: 14361, word: "bullseye" },
{ id: 14362, word: "bullwhip" },
{ id: 14363, word: "bully" },
{ id: 14364, word: "bunch" },
{ id: 14365, word: "bundle" },
{ id: 14366, word: "bungee" },
{ id: 14411, word: "bunion" },
{ id: 14412, word: "bunkbed" },
{ id: 14413, word: "bunkhouse" },
{ id: 14414, word: "bunkmate" },
{ id: 14415, word: "bunny" },
{ id: 14416, word: "bunt" },
{ id: 14421, word: "busboy" },
{ id: 14422, word: "bush" },
{ id: 14423, word: "busily" },
{ id: 14424, word: "busload" },
{ id: 14425, word: "bust" },
{ id: 14426, word: "busybody" },
{ id: 14431, word: "buzz" },
{ id: 14432, word: "cabana" },
{ id: 14433, word: "cabbage" },
{ id: 14434, word: "cabbie" },
{ id: 14435, word: "cabdriver" },
{ id: 14436, word: "cable" },
{ id: 14441, word: "caboose" },
{ id: 14442, word: "cache" },
{ id: 14443, word: "cackle" },
{ id: 14444, word: "cacti" },
{ id: 14445, word: "cactus" },
{ id: 14446, word: "caddie" },
{ id: 14451, word: "caddy" },
{ id: 14452, word: "cadet" },
{ id: 14453, word: "cadillac" },
{ id: 14454, word: "cadmium" },
{ id: 14455, word: "cage" },
{ id: 14456, word: "cahoots" },
{ id: 14461, word: "cake" },
{ id: 14462, word: "calamari" },
{ id: 14463, word: "calamity" },
{ id: 14464, word: "calcium" },
{ id: 14465, word: "calculate" },
{ id: 14466, word: "calculus" },
{ id: 14511, word: "caliber" },
{ id: 14512, word: "calibrate" },
{ id: 14513, word: "calm" },
{ id: 14514, word: "caloric" },
{ id: 14515, word: "calorie" },
{ id: 14516, word: "calzone" },
{ id: 14521, word: "camcorder" },
{ id: 14522, word: "cameo" },
{ id: 14523, word: "camera" },
{ id: 14524, word: "camisole" },
{ id: 14525, word: "camper" },
{ id: 14526, word: "campfire" },
{ id: 14531, word: "camping" },
{ id: 14532, word: "campsite" },
{ id: 14533, word: "campus" },
{ id: 14534, word: "canal" },
{ id: 14535, word: "canary" },
{ id: 14536, word: "cancel" },
{ id: 14541, word: "candied" },
{ id: 14542, word: "candle" },
{ id: 14543, word: "candy" },
{ id: 14544, word: "cane" },
{ id: 14545, word: "canine" },
{ id: 14546, word: "canister" },
{ id: 14551, word: "cannabis" },
{ id: 14552, word: "canned" },
{ id: 14553, word: "canning" },
{ id: 14554, word: "cannon" },
{ id: 14555, word: "cannot" },
{ id: 14556, word: "canola" },
{ id: 14561, word: "canon" },
{ id: 14562, word: "canopener" },
{ id: 14563, word: "canopy" },
{ id: 14564, word: "canteen" },
{ id: 14565, word: "canyon" },
{ id: 14566, word: "capable" },
{ id: 14611, word: "capably" },
{ id: 14612, word: "capacity" },
{ id: 14613, word: "cape" },
{ id: 14614, word: "capillary" },
{ id: 14615, word: "capital" },
{ id: 14616, word: "capitol" },
{ id: 14621, word: "capped" },
{ id: 14622, word: "capricorn" },
{ id: 14623, word: "capsize" },
{ id: 14624, word: "capsule" },
{ id: 14625, word: "caption" },
{ id: 14626, word: "captivate" },
{ id: 14631, word: "captive" },
{ id: 14632, word: "captivity" },
{ id: 14633, word: "capture" },
{ id: 14634, word: "caramel" },
{ id: 14635, word: "carat" },
{ id: 14636, word: "caravan" },
{ id: 14641, word: "carbon" },
{ id: 14642, word: "cardboard" },
{ id: 14643, word: "carded" },
{ id: 14644, word: "cardiac" },
{ id: 14645, word: "cardigan" },
{ id: 14646, word: "cardinal" },
{ id: 14651, word: "cardstock" },
{ id: 14652, word: "carefully" },
{ id: 14653, word: "caregiver" },
{ id: 14654, word: "careless" },
{ id: 14655, word: "caress" },
{ id: 14656, word: "caretaker" },
{ id: 14661, word: "cargo" },
{ id: 14662, word: "caring" },
{ id: 14663, word: "carless" },
{ id: 14664, word: "carload" },
{ id: 14665, word: "carmaker" },
{ id: 14666, word: "carnage" },
{ id: 15111, word: "carnation" },
{ id: 15112, word: "carnival" },
{ id: 15113, word: "carnivore" },
{ id: 15114, word: "carol" },
{ id: 15115, word: "carpenter" },
{ id: 15116, word: "carpentry" },
{ id: 15121, word: "carpool" },
{ id: 15122, word: "carport" },
{ id: 15123, word: "carried" },
{ id: 15124, word: "carrot" },
{ id: 15125, word: "carrousel" },
{ id: 15126, word: "carry" },
{ id: 15131, word: "cartel" },
{ id: 15132, word: "cartload" },
{ id: 15133, word: "carton" },
{ id: 15134, word: "cartoon" },
{ id: 15135, word: "cartridge" },
{ id: 15136, word: "cartwheel" },
{ id: 15141, word: "carve" },
{ id: 15142, word: "carving" },
{ id: 15143, word: "carwash" },
{ id: 15144, word: "cascade" },
{ id: 15145, word: "case" },
{ id: 15146, word: "cash" },
{ id: 15151, word: "casing" },
{ id: 15152, word: "casino" },
{ id: 15153, word: "casket" },
{ id: 15154, word: "cassette" },
{ id: 15155, word: "casually" },
{ id: 15156, word: "casualty" },
{ id: 15161, word: "catacomb" },
{ id: 15162, word: "catalog" },
{ id: 15163, word: "catalyst" },
{ id: 15164, word: "catalyze" },
{ id: 15165, word: "catapult" },
{ id: 15166, word: "cataract" },
{ id: 15211, word: "catatonic" },
{ id: 15212, word: "catcall" },
{ id: 15213, word: "catchable" },
{ id: 15214, word: "catcher" },
{ id: 15215, word: "catching" },
{ id: 15216, word: "catchy" },
{ id: 15221, word: "caterer" },
{ id: 15222, word: "catering" },
{ id: 15223, word: "catfight" },
{ id: 15224, word: "catfish" },
{ id: 15225, word: "cathedral" },
{ id: 15226, word: "cathouse" },
{ id: 15231, word: "catlike" },
{ id: 15232, word: "catnap" },
{ id: 15233, word: "catnip" },
{ id: 15234, word: "catsup" },
{ id: 15235, word: "cattail" },
{ id: 15236, word: "cattishly" },
{ id: 15241, word: "cattle" },
{ id: 15242, word: "catty" },
{ id: 15243, word: "catwalk" },
{ id: 15244, word: "caucasian" },
{ id: 15245, word: "caucus" },
{ id: 15246, word: "causal" },
{ id: 15251, word: "causation" },
{ id: 15252, word: "cause" },
{ id: 15253, word: "causing" },
{ id: 15254, word: "cauterize" },
{ id: 15255, word: "caution" },
{ id: 15256, word: "cautious" },
{ id: 15261, word: "cavalier" },
{ id: 15262, word: "cavalry" },
{ id: 15263, word: "caviar" },
{ id: 15264, word: "cavity" },
{ id: 15265, word: "cedar" },
{ id: 15266, word: "celery" },
{ id: 15311, word: "celestial" },
{ id: 15312, word: "celibacy" },
{ id: 15313, word: "celibate" },
{ id: 15314, word: "celtic" },
{ id: 15315, word: "cement" },
{ id: 15316, word: "census" },
{ id: 15321, word: "ceramics" },
{ id: 15322, word: "ceremony" },
{ id: 15323, word: "certainly" },
{ id: 15324, word: "certainty" },
{ id: 15325, word: "certified" },
{ id: 15326, word: "certify" },
{ id: 15331, word: "cesarean" },
{ id: 15332, word: "cesspool" },
{ id: 15333, word: "chafe" },
{ id: 15334, word: "chaffing" },
{ id: 15335, word: "chain" },
{ id: 15336, word: "chair" },
{ id: 15341, word: "chalice" },
{ id: 15342, word: "challenge" },
{ id: 15343, word: "chamber" },
{ id: 15344, word: "chamomile" },
{ id: 15345, word: "champion" },
{ id: 15346, word: "chance" },
{ id: 15351, word: "change" },
{ id: 15352, word: "channel" },
{ id: 15353, word: "chant" },
{ id: 15354, word: "chaos" },
{ id: 15355, word: "chaperone" },
{ id: 15356, word: "chaplain" },
{ id: 15361, word: "chapped" },
{ id: 15362, word: "chaps" },
{ id: 15363, word: "chapter" },
{ id: 15364, word: "character" },
{ id: 15365, word: "charbroil" },
{ id: 15366, word: "charcoal" },
{ id: 15411, word: "charger" },
{ id: 15412, word: "charging" },
{ id: 15413, word: "chariot" },
{ id: 15414, word: "charity" },
{ id: 15415, word: "charm" },
{ id: 15416, word: "charred" },
{ id: 15421, word: "charter" },
{ id: 15422, word: "charting" },
{ id: 15423, word: "chase" },
{ id: 15424, word: "chasing" },
{ id: 15425, word: "chaste" },
{ id: 15426, word: "chastise" },
{ id: 15431, word: "chastity" },
{ id: 15432, word: "chatroom" },
{ id: 15433, word: "chatter" },
{ id: 15434, word: "chatting" },
{ id: 15435, word: "chatty" },
{ id: 15436, word: "cheating" },
{ id: 15441, word: "cheddar" },
{ id: 15442, word: "cheek" },
{ id: 15443, word: "cheer" },
{ id: 15444, word: "cheese" },
{ id: 15445, word: "cheesy" },
{ id: 15446, word: "chef" },
{ id: 15451, word: "chemicals" },
{ id: 15452, word: "chemist" },
{ id: 15453, word: "chemo" },
{ id: 15454, word: "cherisher" },
{ id: 15455, word: "cherub" },
{ id: 15456, word: "chess" },
{ id: 15461, word: "chest" },
{ id: 15462, word: "chevron" },
{ id: 15463, word: "chevy" },
{ id: 15464, word: "chewable" },
{ id: 15465, word: "chewer" },
{ id: 15466, word: "chewing" },
{ id: 15511, word: "chewy" },
{ id: 15512, word: "chief" },
{ id: 15513, word: "chihuahua" },
{ id: 15514, word: "childcare" },
{ id: 15515, word: "childhood" },
{ id: 15516, word: "childish" },
{ id: 15521, word: "childless" },
{ id: 15522, word: "childlike" },
{ id: 15523, word: "chili" },
{ id: 15524, word: "chill" },
{ id: 15525, word: "chimp" },
{ id: 15526, word: "chip" },
{ id: 15531, word: "chirping" },
{ id: 15532, word: "chirpy" },
{ id: 15533, word: "chitchat" },
{ id: 15534, word: "chivalry" },
{ id: 15535, word: "chive" },
{ id: 15536, word: "chloride" },
{ id: 15541, word: "chlorine" },
{ id: 15542, word: "choice" },
{ id: 15543, word: "chokehold" },
{ id: 15544, word: "choking" },
{ id: 15545, word: "chomp" },
{ id: 15546, word: "chooser" },
{ id: 15551, word: "choosing" },
{ id: 15552, word: "choosy" },
{ id: 15553, word: "chop" },
{ id: 15554, word: "chosen" },
{ id: 15555, word: "chowder" },
{ id: 15556, word: "chowtime" },
{ id: 15561, word: "chrome" },
{ id: 15562, word: "chubby" },
{ id: 15563, word: "chuck" },
{ id: 15564, word: "chug" },
{ id: 15565, word: "chummy" },
{ id: 15566, word: "chump" },
{ id: 15611, word: "chunk" },
{ id: 15612, word: "churn" },
{ id: 15613, word: "chute" },
{ id: 15614, word: "cider" },
{ id: 15615, word: "cilantro" },
{ id: 15616, word: "cinch" },
{ id: 15621, word: "cinema" },
{ id: 15622, word: "cinnamon" },
{ id: 15623, word: "circle" },
{ id: 15624, word: "circling" },
{ id: 15625, word: "circular" },
{ id: 15626, word: "circulate" },
{ id: 15631, word: "circus" },
{ id: 15632, word: "citable" },
{ id: 15633, word: "citadel" },
{ id: 15634, word: "citation" },
{ id: 15635, word: "citizen" },
{ id: 15636, word: "citric" },
{ id: 15641, word: "citrus" },
{ id: 15642, word: "city" },
{ id: 15643, word: "civic" },
{ id: 15644, word: "civil" },
{ id: 15645, word: "clad" },
{ id: 15646, word: "claim" },
{ id: 15651, word: "clambake" },
{ id: 15652, word: "clammy" },
{ id: 15653, word: "clamor" },
{ id: 15654, word: "clamp" },
{ id: 15655, word: "clamshell" },
{ id: 15656, word: "clang" },
{ id: 15661, word: "clanking" },
{ id: 15662, word: "clapped" },
{ id: 15663, word: "clapper" },
{ id: 15664, word: "clapping" },
{ id: 15665, word: "clarify" },
{ id: 15666, word: "clarinet" },
{ id: 16111, word: "clarity" },
{ id: 16112, word: "clash" },
{ id: 16113, word: "clasp" },
{ id: 16114, word: "class" },
{ id: 16115, word: "clatter" },
{ id: 16116, word: "clause" },
{ id: 16121, word: "clavicle" },
{ id: 16122, word: "claw" },
{ id: 16123, word: "clay" },
{ id: 16124, word: "clean" },
{ id: 16125, word: "clear" },
{ id: 16126, word: "cleat" },
{ id: 16131, word: "cleaver" },
{ id: 16132, word: "cleft" },
{ id: 16133, word: "clench" },
{ id: 16134, word: "clergyman" },
{ id: 16135, word: "clerical" },
{ id: 16136, word: "clerk" },
{ id: 16141, word: "clever" },
{ id: 16142, word: "clicker" },
{ id: 16143, word: "client" },
{ id: 16144, word: "climate" },
{ id: 16145, word: "climatic" },
{ id: 16146, word: "cling" },
{ id: 16151, word: "clinic" },
{ id: 16152, word: "clinking" },
{ id: 16153, word: "clip" },
{ id: 16154, word: "clique" },
{ id: 16155, word: "cloak" },
{ id: 16156, word: "clobber" },
{ id: 16161, word: "clock" },
{ id: 16162, word: "clone" },
{ id: 16163, word: "cloning" },
{ id: 16164, word: "closable" },
{ id: 16165, word: "closure" },
{ id: 16166, word: "clothes" },
{ id: 16211, word: "clothing" },
{ id: 16212, word: "cloud" },
{ id: 16213, word: "clover" },
{ id: 16214, word: "clubbed" },
{ id: 16215, word: "clubbing" },
{ id: 16216, word: "clubhouse" },
{ id: 16221, word: "clump" },
{ id: 16222, word: "clumsily" },
{ id: 16223, word: "clumsy" },
{ id: 16224, word: "clunky" },
{ id: 16225, word: "clustered" },
{ id: 16226, word: "clutch" },
{ id: 16231, word: "clutter" },
{ id: 16232, word: "coach" },
{ id: 16233, word: "coagulant" },
{ id: 16234, word: "coastal" },
{ id: 16235, word: "coaster" },
{ id: 16236, word: "coasting" },
{ id: 16241, word: "coastland" },
{ id: 16242, word: "coastline" },
{ id: 16243, word: "coat" },
{ id: 16244, word: "coauthor" },
{ id: 16245, word: "cobalt" },
{ id: 16246, word: "cobbler" },
{ id: 16251, word: "cobweb" },
{ id: 16252, word: "cocoa" },
{ id: 16253, word: "coconut" },
{ id: 16254, word: "cod" },
{ id: 16255, word: "coeditor" },
{ id: 16256, word: "coerce" },
{ id: 16261, word: "coexist" },
{ id: 16262, word: "coffee" },
{ id: 16263, word: "cofounder" },
{ id: 16264, word: "cognition" },
{ id: 16265, word: "cognitive" },
{ id: 16266, word: "cogwheel" },
{ id: 16311, word: "coherence" },
{ id: 16312, word: "coherent" },
{ id: 16313, word: "cohesive" },
{ id: 16314, word: "coil" },
{ id: 16315, word: "coke" },
{ id: 16316, word: "cola" },
{ id: 16321, word: "cold" },
{ id: 16322, word: "coleslaw" },
{ id: 16323, word: "coliseum" },
{ id: 16324, word: "collage" },
{ id: 16325, word: "collapse" },
{ id: 16326, word: "collar" },
{ id: 16331, word: "collected" },
{ id: 16332, word: "collector" },
{ id: 16333, word: "collide" },
{ id: 16334, word: "collie" },
{ id: 16335, word: "collision" },
{ id: 16336, word: "colonial" },
{ id: 16341, word: "colonist" },
{ id: 16342, word: "colonize" },
{ id: 16343, word: "colony" },
{ id: 16344, word: "colossal" },
{ id: 16345, word: "colt" },
{ id: 16346, word: "coma" },
{ id: 16351, word: "come" },
{ id: 16352, word: "comfort" },
{ id: 16353, word: "comfy" },
{ id: 16354, word: "comic" },
{ id: 16355, word: "coming" },
{ id: 16356, word: "comma" },
{ id: 16361, word: "commence" },
{ id: 16362, word: "commend" },
{ id: 16363, word: "comment" },
{ id: 16364, word: "commerce" },
{ id: 16365, word: "commode" },
{ id: 16366, word: "commodity" },
{ id: 16411, word: "commodore" },
{ id: 16412, word: "common" },
{ id: 16413, word: "commotion" },
{ id: 16414, word: "commute" },
{ id: 16415, word: "commuting" },
{ id: 16416, word: "compacted" },
{ id: 16421, word: "compacter" },
{ id: 16422, word: "compactly" },
{ id: 16423, word: "compactor" },
{ id: 16424, word: "companion" },
{ id: 16425, word: "company" },
{ id: 16426, word: "compare" },
{ id: 16431, word: "compel" },
{ id: 16432, word: "compile" },
{ id: 16433, word: "comply" },
{ id: 16434, word: "component" },
{ id: 16435, word: "composed" },
{ id: 16436, word: "composer" },
{ id: 16441, word: "composite" },
{ id: 16442, word: "compost" },
{ id: 16443, word: "composure" },
{ id: 16444, word: "compound" },
{ id: 16445, word: "compress" },
{ id: 16446, word: "comprised" },
{ id: 16451, word: "computer" },
{ id: 16452, word: "computing" },
{ id: 16453, word: "comrade" },
{ id: 16454, word: "concave" },
{ id: 16455, word: "conceal" },
{ id: 16456, word: "conceded" },
{ id: 16461, word: "concept" },
{ id: 16462, word: "concerned" },
{ id: 16463, word: "concert" },
{ id: 16464, word: "conch" },
{ id: 16465, word: "concierge" },
{ id: 16466, word: "concise" },
{ id: 16511, word: "conclude" },
{ id: 16512, word: "concrete" },
{ id: 16513, word: "concur" },
{ id: 16514, word: "condense" },
{ id: 16515, word: "condiment" },
{ id: 16516, word: "condition" },
{ id: 16521, word: "condone" },
{ id: 16522, word: "conducive" },
{ id: 16523, word: "conductor" },
{ id: 16524, word: "conduit" },
{ id: 16525, word: "cone" },
{ id: 16526, word: "confess" },
{ id: 16531, word: "confetti" },
{ id: 16532, word: "confidant" },
{ id: 16533, word: "confident" },
{ id: 16534, word: "confider" },
{ id: 16535, word: "confiding" },
{ id: 16536, word: "configure" },
{ id: 16541, word: "confined" },
{ id: 16542, word: "confining" },
{ id: 16543, word: "confirm" },
{ id: 16544, word: "conflict" },
{ id: 16545, word: "conform" },
{ id: 16546, word: "confound" },
{ id: 16551, word: "confront" },
{ id: 16552, word: "confused" },
{ id: 16553, word: "confusing" },
{ id: 16554, word: "confusion" },
{ id: 16555, word: "congenial" },
{ id: 16556, word: "congested" },
{ id: 16561, word: "congrats" },
{ id: 16562, word: "congress" },
{ id: 16563, word: "conical" },
{ id: 16564, word: "conjoined" },
{ id: 16565, word: "conjure" },
{ id: 16566, word: "conjuror" },
{ id: 16611, word: "connected" },
{ id: 16612, word: "connector" },
{ id: 16613, word: "consensus" },
{ id: 16614, word: "consent" },
{ id: 16615, word: "console" },
{ id: 16616, word: "consoling" },
{ id: 16621, word: "consonant" },
{ id: 16622, word: "constable" },
{ id: 16623, word: "constant" },
{ id: 16624, word: "constrain" },
{ id: 16625, word: "constrict" },
{ id: 16626, word: "construct" },
{ id: 16631, word: "consult" },
{ id: 16632, word: "consumer" },
{ id: 16633, word: "consuming" },
{ id: 16634, word: "contact" },
{ id: 16635, word: "container" },
{ id: 16636, word: "contempt" },
{ id: 16641, word: "contend" },
{ id: 16642, word: "contented" },
{ id: 16643, word: "contently" },
{ id: 16644, word: "contents" },
{ id: 16645, word: "contest" },
{ id: 16646, word: "context" },
{ id: 16651, word: "contort" },
{ id: 16652, word: "contour" },
{ id: 16653, word: "contrite" },
{ id: 16654, word: "control" },
{ id: 16655, word: "contusion" },
{ id: 16656, word: "convene" },
{ id: 16661, word: "convent" },
{ id: 16662, word: "copartner" },
{ id: 16663, word: "cope" },
{ id: 16664, word: "copied" },
{ id: 16665, word: "copier" },
{ id: 16666, word: "copilot" },
{ id: 21111, word: "coping" },
{ id: 21112, word: "copious" },
{ id: 21113, word: "copper" },
{ id: 21114, word: "copy" },
{ id: 21115, word: "coral" },
{ id: 21116, word: "cork" },
{ id: 21121, word: "cornball" },
{ id: 21122, word: "cornbread" },
{ id: 21123, word: "corncob" },
{ id: 21124, word: "cornea" },
{ id: 21125, word: "corned" },
{ id: 21126, word: "corner" },
{ id: 21131, word: "cornfield" },
{ id: 21132, word: "cornflake" },
{ id: 21133, word: "cornhusk" },
{ id: 21134, word: "cornmeal" },
{ id: 21135, word: "cornstalk" },
{ id: 21136, word: "corny" },
{ id: 21141, word: "coronary" },
{ id: 21142, word: "coroner" },
{ id: 21143, word: "corporal" },
{ id: 21144, word: "corporate" },
{ id: 21145, word: "corral" },
{ id: 21146, word: "correct" },
{ id: 21151, word: "corridor" },
{ id: 21152, word: "corrode" },
{ id: 21153, word: "corroding" },
{ id: 21154, word: "corrosive" },
{ id: 21155, word: "corsage" },
{ id: 21156, word: "corset" },
{ id: 21161, word: "cortex" },
{ id: 21162, word: "cosigner" },
{ id: 21163, word: "cosmetics" },
{ id: 21164, word: "cosmic" },
{ id: 21165, word: "cosmos" },
{ id: 21166, word: "cosponsor" },
{ id: 21211, word: "cost" },
{ id: 21212, word: "cottage" },
{ id: 21213, word: "cotton" },
{ id: 21214, word: "couch" },
{ id: 21215, word: "cough" },
{ id: 21216, word: "could" },
{ id: 21221, word: "countable" },
{ id: 21222, word: "countdown" },
{ id: 21223, word: "counting" },
{ id: 21224, word: "countless" },
{ id: 21225, word: "country" },
{ id: 21226, word: "county" },
{ id: 21231, word: "courier" },
{ id: 21232, word: "covenant" },
{ id: 21233, word: "cover" },
{ id: 21234, word: "coveted" },
{ id: 21235, word: "coveting" },
{ id: 21236, word: "coyness" },
{ id: 21241, word: "cozily" },
{ id: 21242, word: "coziness" },
{ id: 21243, word: "cozy" },
{ id: 21244, word: "crabbing" },
{ id: 21245, word: "crabgrass" },
{ id: 21246, word: "crablike" },
{ id: 21251, word: "crabmeat" },
{ id: 21252, word: "cradle" },
{ id: 21253, word: "cradling" },
{ id: 21254, word: "crafter" },
{ id: 21255, word: "craftily" },
{ id: 21256, word: "craftsman" },
{ id: 21261, word: "craftwork" },
{ id: 21262, word: "crafty" },
{ id: 21263, word: "cramp" },
{ id: 21264, word: "cranberry" },
{ id: 21265, word: "crane" },
{ id: 21266, word: "cranial" },
{ id: 21311, word: "cranium" },
{ id: 21312, word: "crank" },
{ id: 21313, word: "crate" },
{ id: 21314, word: "crave" },
{ id: 21315, word: "craving" },
{ id: 21316, word: "crawfish" },
{ id: 21321, word: "crawlers" },
{ id: 21322, word: "crawling" },
{ id: 21323, word: "crayfish" },
{ id: 21324, word: "crayon" },
{ id: 21325, word: "crazed" },
{ id: 21326, word: "crazily" },
{ id: 21331, word: "craziness" },
{ id: 21332, word: "crazy" },
{ id: 21333, word: "creamed" },
{ id: 21334, word: "creamer" },
{ id: 21335, word: "creamlike" },
{ id: 21336, word: "crease" },
{ id: 21341, word: "creasing" },
{ id: 21342, word: "creatable" },
{ id: 21343, word: "create" },
{ id: 21344, word: "creation" },
{ id: 21345, word: "creative" },
{ id: 21346, word: "creature" },
{ id: 21351, word: "credible" },
{ id: 21352, word: "credibly" },
{ id: 21353, word: "credit" },
{ id: 21354, word: "creed" },
{ id: 21355, word: "creme" },
{ id: 21356, word: "creole" },
{ id: 21361, word: "crepe" },
{ id: 21362, word: "crept" },
{ id: 21363, word: "crescent" },
{ id: 21364, word: "crested" },
{ id: 21365, word: "cresting" },
{ id: 21366, word: "crestless" },
{ id: 21411, word: "crevice" },
{ id: 21412, word: "crewless" },
{ id: 21413, word: "crewman" },
{ id: 21414, word: "crewmate" },
{ id: 21415, word: "crib" },
{ id: 21416, word: "cricket" },
{ id: 21421, word: "cried" },
{ id: 21422, word: "crier" },
{ id: 21423, word: "crimp" },
{ id: 21424, word: "crimson" },
{ id: 21425, word: "cringe" },
{ id: 21426, word: "cringing" },
{ id: 21431, word: "crinkle" },
{ id: 21432, word: "crinkly" },
{ id: 21433, word: "crisped" },
{ id: 21434, word: "crisping" },
{ id: 21435, word: "crisply" },
{ id: 21436, word: "crispness" },
{ id: 21441, word: "crispy" },
{ id: 21442, word: "criteria" },
{ id: 21443, word: "critter" },
{ id: 21444, word: "croak" },
{ id: 21445, word: "crock" },
{ id: 21446, word: "crook" },
{ id: 21451, word: "croon" },
{ id: 21452, word: "crop" },
{ id: 21453, word: "cross" },
{ id: 21454, word: "crouch" },
{ id: 21455, word: "crouton" },
{ id: 21456, word: "crowbar" },
{ id: 21461, word: "crowd" },
{ id: 21462, word: "crown" },
{ id: 21463, word: "crucial" },
{ id: 21464, word: "crudely" },
{ id: 21465, word: "crudeness" },
{ id: 21466, word: "cruelly" },
{ id: 21511, word: "cruelness" },
{ id: 21512, word: "cruelty" },
{ id: 21513, word: "crumb" },
{ id: 21514, word: "crummiest" },
{ id: 21515, word: "crummy" },
{ id: 21516, word: "crumpet" },
{ id: 21521, word: "crumpled" },
{ id: 21522, word: "cruncher" },
{ id: 21523, word: "crunching" },
{ id: 21524, word: "crunchy" },
{ id: 21525, word: "crusader" },
{ id: 21526, word: "crushable" },
{ id: 21531, word: "crushed" },
{ id: 21532, word: "crusher" },
{ id: 21533, word: "crushing" },
{ id: 21534, word: "crust" },
{ id: 21535, word: "crux" },
{ id: 21536, word: "crying" },
{ id: 21541, word: "cryptic" },
{ id: 21542, word: "crystal" },
{ id: 21543, word: "cubbyhole" },
{ id: 21544, word: "cube" },
{ id: 21545, word: "cubical" },
{ id: 21546, word: "cubicle" },
{ id: 21551, word: "cucumber" },
{ id: 21552, word: "cuddle" },
{ id: 21553, word: "cuddly" },
{ id: 21554, word: "cufflink" },
{ id: 21555, word: "culinary" },
{ id: 21556, word: "culminate" },
{ id: 21561, word: "culpable" },
{ id: 21562, word: "culprit" },
{ id: 21563, word: "cultivate" },
{ id: 21564, word: "cultural" },
{ id: 21565, word: "culture" },
{ id: 21566, word: "cupbearer" },
{ id: 21611, word: "cupcake" },
{ id: 21612, word: "cupid" },
{ id: 21613, word: "cupped" },
{ id: 21614, word: "cupping" },
{ id: 21615, word: "curable" },
{ id: 21616, word: "curator" },
{ id: 21621, word: "curdle" },
{ id: 21622, word: "cure" },
{ id: 21623, word: "curfew" },
{ id: 21624, word: "curing" },
{ id: 21625, word: "curled" },
{ id: 21626, word: "curler" },
{ id: 21631, word: "curliness" },
{ id: 21632, word: "curling" },
{ id: 21633, word: "curly" },
{ id: 21634, word: "curry" },
{ id: 21635, word: "curse" },
{ id: 21636, word: "cursive" },
{ id: 21641, word: "cursor" },
{ id: 21642, word: "curtain" },
{ id: 21643, word: "curtly" },
{ id: 21644, word: "curtsy" },
{ id: 21645, word: "curvature" },
{ id: 21646, word: "curve" },
{ id: 21651, word: "curvy" },
{ id: 21652, word: "cushy" },
{ id: 21653, word: "cusp" },
{ id: 21654, word: "cussed" },
{ id: 21655, word: "custard" },
{ id: 21656, word: "custodian" },
{ id: 21661, word: "custody" },
{ id: 21662, word: "customary" },
{ id: 21663, word: "customer" },
{ id: 21664, word: "customize" },
{ id: 21665, word: "customs" },
{ id: 21666, word: "cut" },
{ id: 22111, word: "cycle" },
{ id: 22112, word: "cyclic" },
{ id: 22113, word: "cycling" },
{ id: 22114, word: "cyclist" },
{ id: 22115, word: "cylinder" },
{ id: 22116, word: "cymbal" },
{ id: 22121, word: "cytoplasm" },
{ id: 22122, word: "cytoplast" },
{ id: 22123, word: "dab" },
{ id: 22124, word: "dad" },
{ id: 22125, word: "daffodil" },
{ id: 22126, word: "dagger" },
{ id: 22131, word: "daily" },
{ id: 22132, word: "daintily" },
{ id: 22133, word: "dainty" },
{ id: 22134, word: "dairy" },
{ id: 22135, word: "daisy" },
{ id: 22136, word: "dallying" },
{ id: 22141, word: "dance" },
{ id: 22142, word: "dancing" },
{ id: 22143, word: "dandelion" },
{ id: 22144, word: "dander" },
{ id: 22145, word: "dandruff" },
{ id: 22146, word: "dandy" },
{ id: 22151, word: "danger" },
{ id: 22152, word: "dangle" },
{ id: 22153, word: "dangling" },
{ id: 22154, word: "daredevil" },
{ id: 22155, word: "dares" },
{ id: 22156, word: "daringly" },
{ id: 22161, word: "darkened" },
{ id: 22162, word: "darkening" },
{ id: 22163, word: "darkish" },
{ id: 22164, word: "darkness" },
{ id: 22165, word: "darkroom" },
{ id: 22166, word: "darling" },
{ id: 22211, word: "darn" },
{ id: 22212, word: "dart" },
{ id: 22213, word: "darwinism" },
{ id: 22214, word: "dash" },
{ id: 22215, word: "dastardly" },
{ id: 22216, word: "data" },
{ id: 22221, word: "datebook" },
{ id: 22222, word: "dating" },
{ id: 22223, word: "daughter" },
{ id: 22224, word: "daunting" },
{ id: 22225, word: "dawdler" },
{ id: 22226, word: "dawn" },
{ id: 22231, word: "daybed" },
{ id: 22232, word: "daybreak" },
{ id: 22233, word: "daycare" },
{ id: 22234, word: "daydream" },
{ id: 22235, word: "daylight" },
{ id: 22236, word: "daylong" },
{ id: 22241, word: "dayroom" },
{ id: 22242, word: "daytime" },
{ id: 22243, word: "dazzler" },
{ id: 22244, word: "dazzling" },
{ id: 22245, word: "deacon" },
{ id: 22246, word: "deafening" },
{ id: 22251, word: "deafness" },
{ id: 22252, word: "dealer" },
{ id: 22253, word: "dealing" },
{ id: 22254, word: "dealmaker" },
{ id: 22255, word: "dealt" },
{ id: 22256, word: "dean" },
{ id: 22261, word: "debatable" },
{ id: 22262, word: "debate" },
{ id: 22263, word: "debating" },
{ id: 22264, word: "debit" },
{ id: 22265, word: "debrief" },
{ id: 22266, word: "debtless" },
{ id: 22311, word: "debtor" },
{ id: 22312, word: "debug" },
{ id: 22313, word: "debunk" },
{ id: 22314, word: "decade" },
{ id: 22315, word: "decaf" },
{ id: 22316, word: "decal" },
{ id: 22321, word: "decathlon" },
{ id: 22322, word: "decay" },
{ id: 22323, word: "deceased" },
{ id: 22324, word: "deceit" },
{ id: 22325, word: "deceiver" },
{ id: 22326, word: "deceiving" },
{ id: 22331, word: "december" },
{ id: 22332, word: "decency" },
{ id: 22333, word: "decent" },
{ id: 22334, word: "deception" },
{ id: 22335, word: "deceptive" },
{ id: 22336, word: "decibel" },
{ id: 22341, word: "decidable" },
{ id: 22342, word: "decimal" },
{ id: 22343, word: "decimeter" },
{ id: 22344, word: "decipher" },
{ id: 22345, word: "deck" },
{ id: 22346, word: "declared" },
{ id: 22351, word: "decline" },
{ id: 22352, word: "decode" },
{ id: 22353, word: "decompose" },
{ id: 22354, word: "decorated" },
{ id: 22355, word: "decorator" },
{ id: 22356, word: "decoy" },
{ id: 22361, word: "decrease" },
{ id: 22362, word: "decree" },
{ id: 22363, word: "dedicate" },
{ id: 22364, word: "dedicator" },
{ id: 22365, word: "deduce" },
{ id: 22366, word: "deduct" },
{ id: 22411, word: "deed" },
{ id: 22412, word: "deem" },
{ id: 22413, word: "deepen" },
{ id: 22414, word: "deeply" },
{ id: 22415, word: "deepness" },
{ id: 22416, word: "deface" },
{ id: 22421, word: "defacing" },
{ id: 22422, word: "defame" },
{ id: 22423, word: "default" },
{ id: 22424, word: "defeat" },
{ id: 22425, word: "defection" },
{ id: 22426, word: "defective" },
{ id: 22431, word: "defendant" },
{ id: 22432, word: "defender" },
{ id: 22433, word: "defense" },
{ id: 22434, word: "defensive" },
{ id: 22435, word: "deferral" },
{ id: 22436, word: "deferred" },
{ id: 22441, word: "defiance" },
{ id: 22442, word: "defiant" },
{ id: 22443, word: "defile" },
{ id: 22444, word: "defiling" },
{ id: 22445, word: "define" },
{ id: 22446, word: "definite" },
{ id: 22451, word: "deflate" },
{ id: 22452, word: "deflation" },
{ id: 22453, word: "deflator" },
{ id: 22454, word: "deflected" },
{ id: 22455, word: "deflector" },
{ id: 22456, word: "defog" },
{ id: 22461, word: "deforest" },
{ id: 22462, word: "defraud" },
{ id: 22463, word: "defrost" },
{ id: 22464, word: "deftly" },
{ id: 22465, word: "defuse" },
{ id: 22466, word: "defy" },
{ id: 22511, word: "degraded" },
{ id: 22512, word: "degrading" },
{ id: 22513, word: "degrease" },
{ id: 22514, word: "degree" },
{ id: 22515, word: "dehydrate" },
{ id: 22516, word: "deity" },
{ id: 22521, word: "dejected" },
{ id: 22522, word: "delay" },
{ id: 22523, word: "delegate" },
{ id: 22524, word: "delegator" },
{ id: 22525, word: "delete" },
{ id: 22526, word: "deletion" },
{ id: 22531, word: "delicacy" },
{ id: 22532, word: "delicate" },
{ id: 22533, word: "delicious" },
{ id: 22534, word: "delighted" },
{ id: 22535, word: "delirious" },
{ id: 22536, word: "delirium" },
{ id: 22541, word: "deliverer" },
{ id: 22542, word: "delivery" },
{ id: 22543, word: "delouse" },
{ id: 22544, word: "delta" },
{ id: 22545, word: "deluge" },
{ id: 22546, word: "delusion" },
{ id: 22551, word: "deluxe" },
{ id: 22552, word: "demanding" },
{ id: 22553, word: "demeaning" },
{ id: 22554, word: "demeanor" },
{ id: 22555, word: "demise" },
{ id: 22556, word: "democracy" },
{ id: 22561, word: "democrat" },
{ id: 22562, word: "demote" },
{ id: 22563, word: "demotion" },
{ id: 22564, word: "demystify" },
{ id: 22565, word: "denatured" },
{ id: 22566, word: "deniable" },
{ id: 22611, word: "denial" },
{ id: 22612, word: "denim" },
{ id: 22613, word: "denote" },
{ id: 22614, word: "dense" },
{ id: 22615, word: "density" },
{ id: 22616, word: "dental" },
{ id: 22621, word: "dentist" },
{ id: 22622, word: "denture" },
{ id: 22623, word: "deny" },
{ id: 22624, word: "deodorant" },
{ id: 22625, word: "deodorize" },
{ id: 22626, word: "departed" },
{ id: 22631, word: "departure" },
{ id: 22632, word: "depict" },
{ id: 22633, word: "deplete" },
{ id: 22634, word: "depletion" },
{ id: 22635, word: "deplored" },
{ id: 22636, word: "deploy" },
{ id: 22641, word: "deport" },
{ id: 22642, word: "depose" },
{ id: 22643, word: "depraved" },
{ id: 22644, word: "depravity" },
{ id: 22645, word: "deprecate" },
{ id: 22646, word: "depress" },
{ id: 22651, word: "deprive" },
{ id: 22652, word: "depth" },
{ id: 22653, word: "deputize" },
{ id: 22654, word: "deputy" },
{ id: 22655, word: "derail" },
{ id: 22656, word: "deranged" },
{ id: 22661, word: "derby" },
{ id: 22662, word: "derived" },
{ id: 22663, word: "desecrate" },
{ id: 22664, word: "deserve" },
{ id: 22665, word: "deserving" },
{ id: 22666, word: "designate" },
{ id: 23111, word: "designed" },
{ id: 23112, word: "designer" },
{ id: 23113, word: "designing" },
{ id: 23114, word: "deskbound" },
{ id: 23115, word: "desktop" },
{ id: 23116, word: "deskwork" },
{ id: 23121, word: "desolate" },
{ id: 23122, word: "despair" },
{ id: 23123, word: "despise" },
{ id: 23124, word: "despite" },
{ id: 23125, word: "destiny" },
{ id: 23126, word: "destitute" },
{ id: 23131, word: "destruct" },
{ id: 23132, word: "detached" },
{ id: 23133, word: "detail" },
{ id: 23134, word: "detection" },
{ id: 23135, word: "detective" },
{ id: 23136, word: "detector" },
{ id: 23141, word: "detention" },
{ id: 23142, word: "detergent" },
{ id: 23143, word: "detest" },
{ id: 23144, word: "detonate" },
{ id: 23145, word: "detonator" },
{ id: 23146, word: "detoxify" },
{ id: 23151, word: "detract" },
{ id: 23152, word: "deuce" },
{ id: 23153, word: "devalue" },
{ id: 23154, word: "deviancy" },
{ id: 23155, word: "deviant" },
{ id: 23156, word: "deviate" },
{ id: 23161, word: "deviation" },
{ id: 23162, word: "deviator" },
{ id: 23163, word: "device" },
{ id: 23164, word: "devious" },
{ id: 23165, word: "devotedly" },
{ id: 23166, word: "devotee" },
{ id: 23211, word: "devotion" },
{ id: 23212, word: "devourer" },
{ id: 23213, word: "devouring" },
{ id: 23214, word: "devoutly" },
{ id: 23215, word: "dexterity" },
{ id: 23216, word: "dexterous" },
{ id: 23221, word: "diabetes" },
{ id: 23222, word: "diabetic" },
{ id: 23223, word: "diabolic" },
{ id: 23224, word: "diagnoses" },
{ id: 23225, word: "diagnosis" },
{ id: 23226, word: "diagram" },
{ id: 23231, word: "dial" },
{ id: 23232, word: "diameter" },
{ id: 23233, word: "diaper" },
{ id: 23234, word: "diaphragm" },
{ id: 23235, word: "diary" },
{ id: 23236, word: "dice" },
{ id: 23241, word: "dicing" },
{ id: 23242, word: "dictate" },
{ id: 23243, word: "dictation" },
{ id: 23244, word: "dictator" },
{ id: 23245, word: "difficult" },
{ id: 23246, word: "diffused" },
{ id: 23251, word: "diffuser" },
{ id: 23252, word: "diffusion" },
{ id: 23253, word: "diffusive" },
{ id: 23254, word: "dig" },
{ id: 23255, word: "dilation" },
{ id: 23256, word: "diligence" },
{ id: 23261, word: "diligent" },
{ id: 23262, word: "dill" },
{ id: 23263, word: "dilute" },
{ id: 23264, word: "dime" },
{ id: 23265, word: "diminish" },
{ id: 23266, word: "dimly" },
{ id: 23311, word: "dimmed" },
{ id: 23312, word: "dimmer" },
{ id: 23313, word: "dimness" },
{ id: 23314, word: "dimple" },
{ id: 23315, word: "diner" },
{ id: 23316, word: "dingbat" },
{ id: 23321, word: "dinghy" },
{ id: 23322, word: "dinginess" },
{ id: 23323, word: "dingo" },
{ id: 23324, word: "dingy" },
{ id: 23325, word: "dining" },
{ id: 23326, word: "dinner" },
{ id: 23331, word: "diocese" },
{ id: 23332, word: "dioxide" },
{ id: 23333, word: "diploma" },
{ id: 23334, word: "dipped" },
{ id: 23335, word: "dipper" },
{ id: 23336, word: "dipping" },
{ id: 23341, word: "directed" },
{ id: 23342, word: "direction" },
{ id: 23343, word: "directive" },
{ id: 23344, word: "directly" },
{ id: 23345, word: "directory" },
{ id: 23346, word: "direness" },
{ id: 23351, word: "dirtiness" },
{ id: 23352, word: "disabled" },
{ id: 23353, word: "disagree" },
{ id: 23354, word: "disallow" },
{ id: 23355, word: "disarm" },
{ id: 23356, word: "disarray" },
{ id: 23361, word: "disaster" },
{ id: 23362, word: "disband" },
{ id: 23363, word: "disbelief" },
{ id: 23364, word: "disburse" },
{ id: 23365, word: "discard" },
{ id: 23366, word: "discern" },
{ id: 23411, word: "discharge" },
{ id: 23412, word: "disclose" },
{ id: 23413, word: "discolor" },
{ id: 23414, word: "discount" },
{ id: 23415, word: "discourse" },
{ id: 23416, word: "discover" },
{ id: 23421, word: "discuss" },
{ id: 23422, word: "disdain" },
{ id: 23423, word: "disengage" },
{ id: 23424, word: "disfigure" },
{ id: 23425, word: "disgrace" },
{ id: 23426, word: "dish" },
{ id: 23431, word: "disinfect" },
{ id: 23432, word: "disjoin" },
{ id: 23433, word: "disk" },
{ id: 23434, word: "dislike" },
{ id: 23435, word: "disliking" },
{ id: 23436, word: "dislocate" },
{ id: 23441, word: "dislodge" },
{ id: 23442, word: "disloyal" },
{ id: 23443, word: "dismantle" },
{ id: 23444, word: "dismay" },
{ id: 23445, word: "dismiss" },
{ id: 23446, word: "dismount" },
{ id: 23451, word: "disobey" },
{ id: 23452, word: "disorder" },
{ id: 23453, word: "disown" },
{ id: 23454, word: "disparate" },
{ id: 23455, word: "disparity" },
{ id: 23456, word: "dispatch" },
{ id: 23461, word: "dispense" },
{ id: 23462, word: "dispersal" },
{ id: 23463, word: "dispersed" },
{ id: 23464, word: "disperser" },
{ id: 23465, word: "displace" },
{ id: 23466, word: "display" },
{ id: 23511, word: "displease" },
{ id: 23512, word: "disposal" },
{ id: 23513, word: "dispose" },
{ id: 23514, word: "disprove" },
{ id: 23515, word: "dispute" },
{ id: 23516, word: "disregard" },
{ id: 23521, word: "disrupt" },
{ id: 23522, word: "dissuade" },
{ id: 23523, word: "distance" },
{ id: 23524, word: "distant" },
{ id: 23525, word: "distaste" },
{ id: 23526, word: "distill" },
{ id: 23531, word: "distinct" },
{ id: 23532, word: "distort" },
{ id: 23533, word: "distract" },
{ id: 23534, word: "distress" },
{ id: 23535, word: "district" },
{ id: 23536, word: "distrust" },
{ id: 23541, word: "ditch" },
{ id: 23542, word: "ditto" },
{ id: 23543, word: "ditzy" },
{ id: 23544, word: "dividable" },
{ id: 23545, word: "divided" },
{ id: 23546, word: "dividend" },
{ id: 23551, word: "dividers" },
{ id: 23552, word: "dividing" },
{ id: 23553, word: "divinely" },
{ id: 23554, word: "diving" },
{ id: 23555, word: "divinity" },
{ id: 23556, word: "divisible" },
{ id: 23561, word: "divisibly" },
{ id: 23562, word: "division" },
{ id: 23563, word: "divisive" },
{ id: 23564, word: "divorcee" },
{ id: 23565, word: "dizziness" },
{ id: 23566, word: "dizzy" },
{ id: 23611, word: "doable" },
{ id: 23612, word: "docile" },
{ id: 23613, word: "dock" },
{ id: 23614, word: "doctrine" },
{ id: 23615, word: "document" },
{ id: 23616, word: "dodge" },
{ id: 23621, word: "dodgy" },
{ id: 23622, word: "doily" },
{ id: 23623, word: "doing" },
{ id: 23624, word: "dole" },
{ id: 23625, word: "dollar" },
{ id: 23626, word: "dollhouse" },
{ id: 23631, word: "dollop" },
{ id: 23632, word: "dolly" },
{ id: 23633, word: "dolphin" },
{ id: 23634, word: "domain" },
{ id: 23635, word: "domelike" },
{ id: 23636, word: "domestic" },
{ id: 23641, word: "dominion" },
{ id: 23642, word: "dominoes" },
{ id: 23643, word: "donated" },
{ id: 23644, word: "donation" },
{ id: 23645, word: "donator" },
{ id: 23646, word: "donor" },
{ id: 23651, word: "donut" },
{ id: 23652, word: "doodle" },
{ id: 23653, word: "doorbell" },
{ id: 23654, word: "doorframe" },
{ id: 23655, word: "doorknob" },
{ id: 23656, word: "doorman" },
{ id: 23661, word: "doormat" },
{ id: 23662, word: "doornail" },
{ id: 23663, word: "doorpost" },
{ id: 23664, word: "doorstep" },
{ id: 23665, word: "doorstop" },
{ id: 23666, word: "doorway" },
{ id: 24111, word: "doozy" },
{ id: 24112, word: "dork" },
{ id: 24113, word: "dormitory" },
{ id: 24114, word: "dorsal" },
{ id: 24115, word: "dosage" },
{ id: 24116, word: "dose" },
{ id: 24121, word: "dotted" },
{ id: 24122, word: "doubling" },
{ id: 24123, word: "douche" },
{ id: 24124, word: "dove" },
{ id: 24125, word: "down" },
{ id: 24126, word: "dowry" },
{ id: 24131, word: "doze" },
{ id: 24132, word: "drab" },
{ id: 24133, word: "dragging" },
{ id: 24134, word: "dragonfly" },
{ id: 24135, word: "dragonish" },
{ id: 24136, word: "dragster" },
{ id: 24141, word: "drainable" },
{ id: 24142, word: "drainage" },
{ id: 24143, word: "drained" },
{ id: 24144, word: "drainer" },
{ id: 24145, word: "drainpipe" },
{ id: 24146, word: "dramatic" },
{ id: 24151, word: "dramatize" },
{ id: 24152, word: "drank" },
{ id: 24153, word: "drapery" },
{ id: 24154, word: "drastic" },
{ id: 24155, word: "draw" },
{ id: 24156, word: "dreaded" },
{ id: 24161, word: "dreadful" },
{ id: 24162, word: "dreadlock" },
{ id: 24163, word: "dreamboat" },
{ id: 24164, word: "dreamily" },
{ id: 24165, word: "dreamland" },
{ id: 24166, word: "dreamless" },
{ id: 24211, word: "dreamlike" },
{ id: 24212, word: "dreamt" },
{ id: 24213, word: "dreamy" },
{ id: 24214, word: "drearily" },
{ id: 24215, word: "dreary" },
{ id: 24216, word: "drench" },
{ id: 24221, word: "dress" },
{ id: 24222, word: "drew" },
{ id: 24223, word: "dribble" },
{ id: 24224, word: "dried" },
{ id: 24225, word: "drier" },
{ id: 24226, word: "drift" },
{ id: 24231, word: "driller" },
{ id: 24232, word: "drilling" },
{ id: 24233, word: "drinkable" },
{ id: 24234, word: "drinking" },
{ id: 24235, word: "dripping" },
{ id: 24236, word: "drippy" },
{ id: 24241, word: "drivable" },
{ id: 24242, word: "driven" },
{ id: 24243, word: "driver" },
{ id: 24244, word: "driveway" },
{ id: 24245, word: "driving" },
{ id: 24246, word: "drizzle" },
{ id: 24251, word: "drizzly" },
{ id: 24252, word: "drone" },
{ id: 24253, word: "drool" },
{ id: 24254, word: "droop" },
{ id: 24255, word: "drop-down" },
{ id: 24256, word: "dropbox" },
{ id: 24261, word: "dropkick" },
{ id: 24262, word: "droplet" },
{ id: 24263, word: "dropout" },
{ id: 24264, word: "dropper" },
{ id: 24265, word: "drove" },
{ id: 24266, word: "drown" },
{ id: 24311, word: "drowsily" },
{ id: 24312, word: "drudge" },
{ id: 24313, word: "drum" },
{ id: 24314, word: "dry" },
{ id: 24315, word: "dubbed" },
{ id: 24316, word: "dubiously" },
{ id: 24321, word: "duchess" },
{ id: 24322, word: "duckbill" },
{ id: 24323, word: "ducking" },
{ id: 24324, word: "duckling" },
{ id: 24325, word: "ducktail" },
{ id: 24326, word: "ducky" },
{ id: 24331, word: "duct" },
{ id: 24332, word: "dude" },
{ id: 24333, word: "duffel" },
{ id: 24334, word: "dugout" },
{ id: 24335, word: "duh" },
{ id: 24336, word: "duke" },
{ id: 24341, word: "duller" },
{ id: 24342, word: "dullness" },
{ id: 24343, word: "duly" },
{ id: 24344, word: "dumping" },
{ id: 24345, word: "dumpling" },
{ id: 24346, word: "dumpster" },
{ id: 24351, word: "duo" },
{ id: 24352, word: "dupe" },
{ id: 24353, word: "duplex" },
{ id: 24354, word: "duplicate" },
{ id: 24355, word: "duplicity" },
{ id: 24356, word: "durable" },
{ id: 24361, word: "durably" },
{ id: 24362, word: "duration" },
{ id: 24363, word: "duress" },
{ id: 24364, word: "during" },
{ id: 24365, word: "dusk" },
{ id: 24366, word: "dust" },
{ id: 24411, word: "dutiful" },
{ id: 24412, word: "duty" },
{ id: 24413, word: "duvet" },
{ id: 24414, word: "dwarf" },
{ id: 24415, word: "dweeb" },
{ id: 24416, word: "dwelled" },
{ id: 24421, word: "dweller" },
{ id: 24422, word: "dwelling" },
{ id: 24423, word: "dwindle" },
{ id: 24424, word: "dwindling" },
{ id: 24425, word: "dynamic" },
{ id: 24426, word: "dynamite" },
{ id: 24431, word: "dynasty" },
{ id: 24432, word: "dyslexia" },
{ id: 24433, word: "dyslexic" },
{ id: 24434, word: "each" },
{ id: 24435, word: "eagle" },
{ id: 24436, word: "earache" },
{ id: 24441, word: "eardrum" },
{ id: 24442, word: "earflap" },
{ id: 24443, word: "earful" },
{ id: 24444, word: "earlobe" },
{ id: 24445, word: "early" },
{ id: 24446, word: "earmark" },
{ id: 24451, word: "earmuff" },
{ id: 24452, word: "earphone" },
{ id: 24453, word: "earpiece" },
{ id: 24454, word: "earplugs" },
{ id: 24455, word: "earring" },
{ id: 24456, word: "earshot" },
{ id: 24461, word: "earthen" },
{ id: 24462, word: "earthlike" },
{ id: 24463, word: "earthling" },
{ id: 24464, word: "earthly" },
{ id: 24465, word: "earthworm" },
{ id: 24466, word: "earthy" },
{ id: 24511, word: "earwig" },
{ id: 24512, word: "easeful" },
{ id: 24513, word: "easel" },
{ id: 24514, word: "easiest" },
{ id: 24515, word: "easily" },
{ id: 24516, word: "easiness" },
{ id: 24521, word: "easing" },
{ id: 24522, word: "eastbound" },
{ id: 24523, word: "eastcoast" },
{ id: 24524, word: "easter" },
{ id: 24525, word: "eastward" },
{ id: 24526, word: "eatable" },
{ id: 24531, word: "eaten" },
{ id: 24532, word: "eatery" },
{ id: 24533, word: "eating" },
{ id: 24534, word: "eats" },
{ id: 24535, word: "ebay" },
{ id: 24536, word: "ebony" },
{ id: 24541, word: "ebook" },
{ id: 24542, word: "ecard" },
{ id: 24543, word: "eccentric" },
{ id: 24544, word: "echo" },
{ id: 24545, word: "eclair" },
{ id: 24546, word: "eclipse" },
{ id: 24551, word: "ecologist" },
{ id: 24552, word: "ecology" },
{ id: 24553, word: "economic" },
{ id: 24554, word: "economist" },
{ id: 24555, word: "economy" },
{ id: 24556, word: "ecosphere" },
{ id: 24561, word: "ecosystem" },
{ id: 24562, word: "edge" },
{ id: 24563, word: "edginess" },
{ id: 24564, word: "edging" },
{ id: 24565, word: "edgy" },
{ id: 24566, word: "edition" },
{ id: 24611, word: "editor" },
{ id: 24612, word: "educated" },
{ id: 24613, word: "education" },
{ id: 24614, word: "educator" },
{ id: 24615, word: "eel" },
{ id: 24616, word: "effective" },
{ id: 24621, word: "effects" },
{ id: 24622, word: "efficient" },
{ id: 24623, word: "effort" },
{ id: 24624, word: "eggbeater" },
{ id: 24625, word: "egging" },
{ id: 24626, word: "eggnog" },
{ id: 24631, word: "eggplant" },
{ id: 24632, word: "eggshell" },
{ id: 24633, word: "egomaniac" },
{ id: 24634, word: "egotism" },
{ id: 24635, word: "egotistic" },
{ id: 24636, word: "either" },
{ id: 24641, word: "eject" },
{ id: 24642, word: "elaborate" },
{ id: 24643, word: "elastic" },
{ id: 24644, word: "elated" },
{ id: 24645, word: "elbow" },
{ id: 24646, word: "eldercare" },
{ id: 24651, word: "elderly" },
{ id: 24652, word: "eldest" },
{ id: 24653, word: "electable" },
{ id: 24654, word: "election" },
{ id: 24655, word: "elective" },
{ id: 24656, word: "elephant" },
{ id: 24661, word: "elevate" },
{ id: 24662, word: "elevating" },
{ id: 24663, word: "elevation" },
{ id: 24664, word: "elevator" },
{ id: 24665, word: "eleven" },
{ id: 24666, word: "elf" },
{ id: 25111, word: "eligible" },
{ id: 25112, word: "eligibly" },
{ id: 25113, word: "eliminate" },
{ id: 25114, word: "elite" },
{ id: 25115, word: "elitism" },
{ id: 25116, word: "elixir" },
{ id: 25121, word: "elk" },
{ id: 25122, word: "ellipse" },
{ id: 25123, word: "elliptic" },
{ id: 25124, word: "elm" },
{ id: 25125, word: "elongated" },
{ id: 25126, word: "elope" },
{ id: 25131, word: "eloquence" },
{ id: 25132, word: "eloquent" },
{ id: 25133, word: "elsewhere" },
{ id: 25134, word: "elude" },
{ id: 25135, word: "elusive" },
{ id: 25136, word: "elves" },
{ id: 25141, word: "email" },
{ id: 25142, word: "embargo" },
{ id: 25143, word: "embark" },
{ id: 25144, word: "embassy" },
{ id: 25145, word: "embattled" },
{ id: 25146, word: "embellish" },
{ id: 25151, word: "ember" },
{ id: 25152, word: "embezzle" },
{ id: 25153, word: "emblaze" },
{ id: 25154, word: "emblem" },
{ id: 25155, word: "embody" },
{ id: 25156, word: "embolism" },
{ id: 25161, word: "emboss" },
{ id: 25162, word: "embroider" },
{ id: 25163, word: "emcee" },
{ id: 25164, word: "emerald" },
{ id: 25165, word: "emergency" },
{ id: 25166, word: "emission" },
{ id: 25211, word: "emit" },
{ id: 25212, word: "emote" },
{ id: 25213, word: "emoticon" },
{ id: 25214, word: "emotion" },
{ id: 25215, word: "empathic" },
{ id: 25216, word: "empathy" },
{ id: 25221, word: "emperor" },
{ id: 25222, word: "emphases" },
{ id: 25223, word: "emphasis" },
{ id: 25224, word: "emphasize" },
{ id: 25225, word: "emphatic" },
{ id: 25226, word: "empirical" },
{ id: 25231, word: "employed" },
{ id: 25232, word: "employee" },
{ id: 25233, word: "employer" },
{ id: 25234, word: "emporium" },
{ id: 25235, word: "empower" },
{ id: 25236, word: "emptier" },
{ id: 25241, word: "emptiness" },
{ id: 25242, word: "empty" },
{ id: 25243, word: "emu" },
{ id: 25244, word: "enable" },
{ id: 25245, word: "enactment" },
{ id: 25246, word: "enamel" },
{ id: 25251, word: "enchanted" },
{ id: 25252, word: "enchilada" },
{ id: 25253, word: "encircle" },
{ id: 25254, word: "enclose" },
{ id: 25255, word: "enclosure" },
{ id: 25256, word: "encode" },
{ id: 25261, word: "encore" },
{ id: 25262, word: "encounter" },
{ id: 25263, word: "encourage" },
{ id: 25264, word: "encroach" },
{ id: 25265, word: "encrust" },
{ id: 25266, word: "encrypt" },
{ id: 25311, word: "endanger" },
{ id: 25312, word: "endeared" },
{ id: 25313, word: "endearing" },
{ id: 25314, word: "ended" },
{ id: 25315, word: "ending" },
{ id: 25316, word: "endless" },
{ id: 25321, word: "endnote" },
{ id: 25322, word: "endocrine" },
{ id: 25323, word: "endorphin" },
{ id: 25324, word: "endorse" },
{ id: 25325, word: "endowment" },
{ id: 25326, word: "endpoint" },
{ id: 25331, word: "endurable" },
{ id: 25332, word: "endurance" },
{ id: 25333, word: "enduring" },
{ id: 25334, word: "energetic" },
{ id: 25335, word: "energize" },
{ id: 25336, word: "energy" },
{ id: 25341, word: "enforced" },
{ id: 25342, word: "enforcer" },
{ id: 25343, word: "engaged" },
{ id: 25344, word: "engaging" },
{ id: 25345, word: "engine" },
{ id: 25346, word: "engorge" },
{ id: 25351, word: "engraved" },
{ id: 25352, word: "engraver" },
{ id: 25353, word: "engraving" },
{ id: 25354, word: "engross" },
{ id: 25355, word: "engulf" },
{ id: 25356, word: "enhance" },
{ id: 25361, word: "enigmatic" },
{ id: 25362, word: "enjoyable" },
{ id: 25363, word: "enjoyably" },
{ id: 25364, word: "enjoyer" },
{ id: 25365, word: "enjoying" },
{ id: 25366, word: "enjoyment" },
{ id: 25411, word: "enlarged" },
{ id: 25412, word: "enlarging" },
{ id: 25413, word: "enlighten" },
{ id: 25414, word: "enlisted" },
{ id: 25415, word: "enquirer" },
{ id: 25416, word: "enrage" },
{ id: 25421, word: "enrich" },
{ id: 25422, word: "enroll" },
{ id: 25423, word: "enslave" },
{ id: 25424, word: "ensnare" },
{ id: 25425, word: "ensure" },
{ id: 25426, word: "entail" },
{ id: 25431, word: "entangled" },
{ id: 25432, word: "entering" },
{ id: 25433, word: "entertain" },
{ id: 25434, word: "enticing" },
{ id: 25435, word: "entire" },
{ id: 25436, word: "entitle" },
{ id: 25441, word: "entity" },
{ id: 25442, word: "entomb" },
{ id: 25443, word: "entourage" },
{ id: 25444, word: "entrap" },
{ id: 25445, word: "entree" },
{ id: 25446, word: "entrench" },
{ id: 25451, word: "entrust" },
{ id: 25452, word: "entryway" },
{ id: 25453, word: "entwine" },
{ id: 25454, word: "enunciate" },
{ id: 25455, word: "envelope" },
{ id: 25456, word: "enviable" },
{ id: 25461, word: "enviably" },
{ id: 25462, word: "envious" },
{ id: 25463, word: "envision" },
{ id: 25464, word: "envoy" },
{ id: 25465, word: "envy" },
{ id: 25466, word: "enzyme" },
{ id: 25511, word: "epic" },
{ id: 25512, word: "epidemic" },
{ id: 25513, word: "epidermal" },
{ id: 25514, word: "epidermis" },
{ id: 25515, word: "epidural" },
{ id: 25516, word: "epilepsy" },
{ id: 25521, word: "epileptic" },
{ id: 25522, word: "epilogue" },
{ id: 25523, word: "epiphany" },
{ id: 25524, word: "episode" },
{ id: 25525, word: "equal" },
{ id: 25526, word: "equate" },
{ id: 25531, word: "equation" },
{ id: 25532, word: "equator" },
{ id: 25533, word: "equinox" },
{ id: 25534, word: "equipment" },
{ id: 25535, word: "equity" },
{ id: 25536, word: "equivocal" },
{ id: 25541, word: "eradicate" },
{ id: 25542, word: "erasable" },
{ id: 25543, word: "erased" },
{ id: 25544, word: "eraser" },
{ id: 25545, word: "erasure" },
{ id: 25546, word: "ergonomic" },
{ id: 25551, word: "errand" },
{ id: 25552, word: "errant" },
{ id: 25553, word: "erratic" },
{ id: 25554, word: "error" },
{ id: 25555, word: "erupt" },
{ id: 25556, word: "escalate" },
{ id: 25561, word: "escalator" },
{ id: 25562, word: "escapable" },
{ id: 25563, word: "escapade" },
{ id: 25564, word: "escapist" },
{ id: 25565, word: "escargot" },
{ id: 25566, word: "eskimo" },
{ id: 25611, word: "esophagus" },
{ id: 25612, word: "espionage" },
{ id: 25613, word: "espresso" },
{ id: 25614, word: "esquire" },
{ id: 25615, word: "essay" },
{ id: 25616, word: "essence" },
{ id: 25621, word: "essential" },
{ id: 25622, word: "establish" },
{ id: 25623, word: "estate" },
{ id: 25624, word: "esteemed" },
{ id: 25625, word: "estimate" },
{ id: 25626, word: "estimator" },
{ id: 25631, word: "estranged" },
{ id: 25632, word: "estrogen" },
{ id: 25633, word: "etching" },
{ id: 25634, word: "eternal" },
{ id: 25635, word: "eternity" },
{ id: 25636, word: "ethanol" },
{ id: 25641, word: "ether" },
{ id: 25642, word: "ethically" },
{ id: 25643, word: "ethics" },
{ id: 25644, word: "euphemism" },
{ id: 25645, word: "evacuate" },
{ id: 25646, word: "evacuee" },
{ id: 25651, word: "evade" },
{ id: 25652, word: "evaluate" },
{ id: 25653, word: "evaluator" },
{ id: 25654, word: "evaporate" },
{ id: 25655, word: "evasion" },
{ id: 25656, word: "evasive" },
{ id: 25661, word: "even" },
{ id: 25662, word: "everglade" },
{ id: 25663, word: "evergreen" },
{ id: 25664, word: "everybody" },
{ id: 25665, word: "everyday" },
{ id: 25666, word: "everyone" },
{ id: 26111, word: "evict" },
{ id: 26112, word: "evidence" },
{ id: 26113, word: "evident" },
{ id: 26114, word: "evil" },
{ id: 26115, word: "evoke" },
{ id: 26116, word: "evolution" },
{ id: 26121, word: "evolve" },
{ id: 26122, word: "exact" },
{ id: 26123, word: "exalted" },
{ id: 26124, word: "example" },
{ id: 26125, word: "excavate" },
{ id: 26126, word: "excavator" },
{ id: 26131, word: "exceeding" },
{ id: 26132, word: "exception" },
{ id: 26133, word: "excess" },
{ id: 26134, word: "exchange" },
{ id: 26135, word: "excitable" },
{ id: 26136, word: "exciting" },
{ id: 26141, word: "exclaim" },
{ id: 26142, word: "exclude" },
{ id: 26143, word: "excluding" },
{ id: 26144, word: "exclusion" },
{ id: 26145, word: "exclusive" },
{ id: 26146, word: "excretion" },
{ id: 26151, word: "excretory" },
{ id: 26152, word: "excursion" },
{ id: 26153, word: "excusable" },
{ id: 26154, word: "excusably" },
{ id: 26155, word: "excuse" },
{ id: 26156, word: "exemplary" },
{ id: 26161, word: "exemplify" },
{ id: 26162, word: "exemption" },
{ id: 26163, word: "exerciser" },
{ id: 26164, word: "exert" },
{ id: 26165, word: "exes" },
{ id: 26166, word: "exfoliate" },
{ id: 26211, word: "exhale" },
{ id: 26212, word: "exhaust" },
{ id: 26213, word: "exhume" },
{ id: 26214, word: "exile" },
{ id: 26215, word: "existing" },
{ id: 26216, word: "exit" },
{ id: 26221, word: "exodus" },
{ id: 26222, word: "exonerate" },
{ id: 26223, word: "exorcism" },
{ id: 26224, word: "exorcist" },
{ id: 26225, word: "expand" },
{ id: 26226, word: "expanse" },
{ id: 26231, word: "expansion" },
{ id: 26232, word: "expansive" },
{ id: 26233, word: "expectant" },
{ id: 26234, word: "expedited" },
{ id: 26235, word: "expediter" },
{ id: 26236, word: "expel" },
{ id: 26241, word: "expend" },
{ id: 26242, word: "expenses" },
{ id: 26243, word: "expensive" },
{ id: 26244, word: "expert" },
{ id: 26245, word: "expire" },
{ id: 26246, word: "expiring" },
{ id: 26251, word: "explain" },
{ id: 26252, word: "expletive" },
{ id: 26253, word: "explicit" },
{ id: 26254, word: "explode" },
{ id: 26255, word: "exploit" },
{ id: 26256, word: "explore" },
{ id: 26261, word: "exploring" },
{ id: 26262, word: "exponent" },
{ id: 26263, word: "exporter" },
{ id: 26264, word: "exposable" },
{ id: 26265, word: "expose" },
{ id: 26266, word: "exposure" },
{ id: 26311, word: "express" },
{ id: 26312, word: "expulsion" },
{ id: 26313, word: "exquisite" },
{ id: 26314, word: "extended" },
{ id: 26315, word: "extending" },
{ id: 26316, word: "extent" },
{ id: 26321, word: "extenuate" },
{ id: 26322, word: "exterior" },
{ id: 26323, word: "external" },
{ id: 26324, word: "extinct" },
{ id: 26325, word: "extortion" },
{ id: 26326, word: "extradite" },
{ id: 26331, word: "extras" },
{ id: 26332, word: "extrovert" },
{ id: 26333, word: "extrude" },
{ id: 26334, word: "extruding" },
{ id: 26335, word: "exuberant" },
{ id: 26336, word: "fable" },
{ id: 26341, word: "fabric" },
{ id: 26342, word: "fabulous" },
{ id: 26343, word: "facebook" },
{ id: 26344, word: "facecloth" },
{ id: 26345, word: "facedown" },
{ id: 26346, word: "faceless" },
{ id: 26351, word: "facelift" },
{ id: 26352, word: "faceplate" },
{ id: 26353, word: "faceted" },
{ id: 26354, word: "facial" },
{ id: 26355, word: "facility" },
{ id: 26356, word: "facing" },
{ id: 26361, word: "facsimile" },
{ id: 26362, word: "faction" },
{ id: 26363, word: "factoid" },
{ id: 26364, word: "factor" },
{ id: 26365, word: "factsheet" },
{ id: 26366, word: "factual" },
{ id: 26411, word: "faculty" },
{ id: 26412, word: "fade" },
{ id: 26413, word: "fading" },
{ id: 26414, word: "failing" },
{ id: 26415, word: "falcon" },
{ id: 26416, word: "fall" },
{ id: 26421, word: "false" },
{ id: 26422, word: "falsify" },
{ id: 26423, word: "fame" },
{ id: 26424, word: "familiar" },
{ id: 26425, word: "family" },
{ id: 26426, word: "famine" },
{ id: 26431, word: "famished" },
{ id: 26432, word: "fanatic" },
{ id: 26433, word: "fancied" },
{ id: 26434, word: "fanciness" },
{ id: 26435, word: "fancy" },
{ id: 26436, word: "fanfare" },
{ id: 26441, word: "fang" },
{ id: 26442, word: "fanning" },
{ id: 26443, word: "fantasize" },
{ id: 26444, word: "fantastic" },
{ id: 26445, word: "fantasy" },
{ id: 26446, word: "fascism" },
{ id: 26451, word: "fastball" },
{ id: 26452, word: "faster" },
{ id: 26453, word: "fasting" },
{ id: 26454, word: "fastness" },
{ id: 26455, word: "faucet" },
{ id: 26456, word: "favorable" },
{ id: 26461, word: "favorably" },
{ id: 26462, word: "favored" },
{ id: 26463, word: "favoring" },
{ id: 26464, word: "favorite" },
{ id: 26465, word: "fax" },
{ id: 26466, word: "feast" },
{ id: 26511, word: "federal" },
{ id: 26512, word: "fedora" },
{ id: 26513, word: "feeble" },
{ id: 26514, word: "feed" },
{ id: 26515, word: "feel" },
{ id: 26516, word: "feisty" },
{ id: 26521, word: "feline" },
{ id: 26522, word: "felt-tip" },
{ id: 26523, word: "feminine" },
{ id: 26524, word: "feminism" },
{ id: 26525, word: "feminist" },
{ id: 26526, word: "feminize" },
{ id: 26531, word: "femur" },
{ id: 26532, word: "fence" },
{ id: 26533, word: "fencing" },
{ id: 26534, word: "fender" },
{ id: 26535, word: "ferment" },
{ id: 26536, word: "fernlike" },
{ id: 26541, word: "ferocious" },
{ id: 26542, word: "ferocity" },
{ id: 26543, word: "ferret" },
{ id: 26544, word: "ferris" },
{ id: 26545, word: "ferry" },
{ id: 26546, word: "fervor" },
{ id: 26551, word: "fester" },
{ id: 26552, word: "festival" },
{ id: 26553, word: "festive" },
{ id: 26554, word: "festivity" },
{ id: 26555, word: "fetal" },
{ id: 26556, word: "fetch" },
{ id: 26561, word: "fever" },
{ id: 26562, word: "fiber" },
{ id: 26563, word: "fiction" },
{ id: 26564, word: "fiddle" },
{ id: 26565, word: "fiddling" },
{ id: 26566, word: "fidelity" },
{ id: 26611, word: "fidgeting" },
{ id: 26612, word: "fidgety" },
{ id: 26613, word: "fifteen" },
{ id: 26614, word: "fifth" },
{ id: 26615, word: "fiftieth" },
{ id: 26616, word: "fifty" },
{ id: 26621, word: "figment" },
{ id: 26622, word: "figure" },
{ id: 26623, word: "figurine" },
{ id: 26624, word: "filing" },
{ id: 26625, word: "filled" },
{ id: 26626, word: "filler" },
{ id: 26631, word: "filling" },
{ id: 26632, word: "film" },
{ id: 26633, word: "filter" },
{ id: 26634, word: "filth" },
{ id: 26635, word: "filtrate" },
{ id: 26636, word: "finale" },
{ id: 26641, word: "finalist" },
{ id: 26642, word: "finalize" },
{ id: 26643, word: "finally" },
{ id: 26644, word: "finance" },
{ id: 26645, word: "financial" },
{ id: 26646, word: "finch" },
{ id: 26651, word: "fineness" },
{ id: 26652, word: "finer" },
{ id: 26653, word: "finicky" },
{ id: 26654, word: "finished" },
{ id: 26655, word: "finisher" },
{ id: 26656, word: "finishing" },
{ id: 26661, word: "finite" },
{ id: 26662, word: "finless" },
{ id: 26663, word: "finlike" },
{ id: 26664, word: "fiscally" },
{ id: 26665, word: "fit" },
{ id: 26666, word: "five" },
{ id: 31111, word: "flaccid" },
{ id: 31112, word: "flagman" },
{ id: 31113, word: "flagpole" },
{ id: 31114, word: "flagship" },
{ id: 31115, word: "flagstick" },
{ id: 31116, word: "flagstone" },
{ id: 31121, word: "flail" },
{ id: 31122, word: "flakily" },
{ id: 31123, word: "flaky" },
{ id: 31124, word: "flame" },
{ id: 31125, word: "flammable" },
{ id: 31126, word: "flanked" },
{ id: 31131, word: "flanking" },
{ id: 31132, word: "flannels" },
{ id: 31133, word: "flap" },
{ id: 31134, word: "flaring" },
{ id: 31135, word: "flashback" },
{ id: 31136, word: "flashbulb" },
{ id: 31141, word: "flashcard" },
{ id: 31142, word: "flashily" },
{ id: 31143, word: "flashing" },
{ id: 31144, word: "flashy" },
{ id: 31145, word: "flask" },
{ id: 31146, word: "flatbed" },
{ id: 31151, word: "flatfoot" },
{ id: 31152, word: "flatly" },
{ id: 31153, word: "flatness" },
{ id: 31154, word: "flatten" },
{ id: 31155, word: "flattered" },
{ id: 31156, word: "flatterer" },
{ id: 31161, word: "flattery" },
{ id: 31162, word: "flattop" },
{ id: 31163, word: "flatware" },
{ id: 31164, word: "flatworm" },
{ id: 31165, word: "flavored" },
{ id: 31166, word: "flavorful" },
{ id: 31211, word: "flavoring" },
{ id: 31212, word: "flaxseed" },
{ id: 31213, word: "fled" },
{ id: 31214, word: "fleshed" },
{ id: 31215, word: "fleshy" },
{ id: 31216, word: "flick" },
{ id: 31221, word: "flier" },
{ id: 31222, word: "flight" },
{ id: 31223, word: "flinch" },
{ id: 31224, word: "fling" },
{ id: 31225, word: "flint" },
{ id: 31226, word: "flip" },
{ id: 31231, word: "flirt" },
{ id: 31232, word: "float" },
{ id: 31233, word: "flock" },
{ id: 31234, word: "flogging" },
{ id: 31235, word: "flop" },
{ id: 31236, word: "floral" },
{ id: 31241, word: "florist" },
{ id: 31242, word: "floss" },
{ id: 31243, word: "flounder" },
{ id: 31244, word: "flyable" },
{ id: 31245, word: "flyaway" },
{ id: 31246, word: "flyer" },
{ id: 31251, word: "flying" },
{ id: 31252, word: "flyover" },
{ id: 31253, word: "flypaper" },
{ id: 31254, word: "foam" },
{ id: 31255, word: "foe" },
{ id: 31256, word: "fog" },
{ id: 31261, word: "foil" },
{ id: 31262, word: "folic" },
{ id: 31263, word: "folk" },
{ id: 31264, word: "follicle" },
{ id: 31265, word: "follow" },
{ id: 31266, word: "fondling" },
{ id: 31311, word: "fondly" },
{ id: 31312, word: "fondness" },
{ id: 31313, word: "fondue" },
{ id: 31314, word: "font" },
{ id: 31315, word: "food" },
{ id: 31316, word: "fool" },
{ id: 31321, word: "footage" },
{ id: 31322, word: "football" },
{ id: 31323, word: "footbath" },
{ id: 31324, word: "footboard" },
{ id: 31325, word: "footer" },
{ id: 31326, word: "footgear" },
{ id: 31331, word: "foothill" },
{ id: 31332, word: "foothold" },
{ id: 31333, word: "footing" },
{ id: 31334, word: "footless" },
{ id: 31335, word: "footman" },
{ id: 31336, word: "footnote" },
{ id: 31341, word: "footpad" },
{ id: 31342, word: "footpath" },
{ id: 31343, word: "footprint" },
{ id: 31344, word: "footrest" },
{ id: 31345, word: "footsie" },
{ id: 31346, word: "footsore" },
{ id: 31351, word: "footwear" },
{ id: 31352, word: "footwork" },
{ id: 31353, word: "fossil" },
{ id: 31354, word: "foster" },
{ id: 31355, word: "founder" },
{ id: 31356, word: "founding" },
{ id: 31361, word: "fountain" },
{ id: 31362, word: "fox" },
{ id: 31363, word: "foyer" },
{ id: 31364, word: "fraction" },
{ id: 31365, word: "fracture" },
{ id: 31366, word: "fragile" },
{ id: 31411, word: "fragility" },
{ id: 31412, word: "fragment" },
{ id: 31413, word: "fragrance" },
{ id: 31414, word: "fragrant" },
{ id: 31415, word: "frail" },
{ id: 31416, word: "frame" },
{ id: 31421, word: "framing" },
{ id: 31422, word: "frantic" },
{ id: 31423, word: "fraternal" },
{ id: 31424, word: "frayed" },
{ id: 31425, word: "fraying" },
{ id: 31426, word: "frays" },
{ id: 31431, word: "freckled" },
{ id: 31432, word: "freckles" },
{ id: 31433, word: "freebase" },
{ id: 31434, word: "freebee" },
{ id: 31435, word: "freebie" },
{ id: 31436, word: "freedom" },
{ id: 31441, word: "freefall" },
{ id: 31442, word: "freehand" },
{ id: 31443, word: "freeing" },
{ id: 31444, word: "freeload" },
{ id: 31445, word: "freely" },
{ id: 31446, word: "freemason" },
{ id: 31451, word: "freeness" },
{ id: 31452, word: "freestyle" },
{ id: 31453, word: "freeware" },
{ id: 31454, word: "freeway" },
{ id: 31455, word: "freewill" },
{ id: 31456, word: "freezable" },
{ id: 31461, word: "freezing" },
{ id: 31462, word: "freight" },
{ id: 31463, word: "french" },
{ id: 31464, word: "frenzied" },
{ id: 31465, word: "frenzy" },
{ id: 31466, word: "frequency" },
{ id: 31511, word: "frequent" },
{ id: 31512, word: "fresh" },
{ id: 31513, word: "fretful" },
{ id: 31514, word: "fretted" },
{ id: 31515, word: "friction" },
{ id: 31516, word: "friday" },
{ id: 31521, word: "fridge" },
{ id: 31522, word: "fried" },
{ id: 31523, word: "friend" },
{ id: 31524, word: "frighten" },
{ id: 31525, word: "frightful" },
{ id: 31526, word: "frigidity" },
{ id: 31531, word: "frigidly" },
{ id: 31532, word: "frill" },
{ id: 31533, word: "fringe" },
{ id: 31534, word: "frisbee" },
{ id: 31535, word: "frisk" },
{ id: 31536, word: "fritter" },
{ id: 31541, word: "frivolous" },
{ id: 31542, word: "frolic" },
{ id: 31543, word: "from" },
{ id: 31544, word: "front" },
{ id: 31545, word: "frostbite" },
{ id: 31546, word: "frosted" },
{ id: 31551, word: "frostily" },
{ id: 31552, word: "frosting" },
{ id: 31553, word: "frostlike" },
{ id: 31554, word: "frosty" },
{ id: 31555, word: "froth" },
{ id: 31556, word: "frown" },
{ id: 31561, word: "frozen" },
{ id: 31562, word: "fructose" },
{ id: 31563, word: "frugality" },
{ id: 31564, word: "frugally" },
{ id: 31565, word: "fruit" },
{ id: 31566, word: "frustrate" },
{ id: 31611, word: "frying" },
{ id: 31612, word: "gab" },
{ id: 31613, word: "gaffe" },
{ id: 31614, word: "gag" },
{ id: 31615, word: "gainfully" },
{ id: 31616, word: "gaining" },
{ id: 31621, word: "gains" },
{ id: 31622, word: "gala" },
{ id: 31623, word: "gallantly" },
{ id: 31624, word: "galleria" },
{ id: 31625, word: "gallery" },
{ id: 31626, word: "galley" },
{ id: 31631, word: "gallon" },
{ id: 31632, word: "gallows" },
{ id: 31633, word: "gallstone" },
{ id: 31634, word: "galore" },
{ id: 31635, word: "galvanize" },
{ id: 31636, word: "gambling" },
{ id: 31641, word: "game" },
{ id: 31642, word: "gaming" },
{ id: 31643, word: "gamma" },
{ id: 31644, word: "gander" },
{ id: 31645, word: "gangly" },
{ id: 31646, word: "gangrene" },
{ id: 31651, word: "gangway" },
{ id: 31652, word: "gap" },
{ id: 31653, word: "garage" },
{ id: 31654, word: "garbage" },
{ id: 31655, word: "garden" },
{ id: 31656, word: "gargle" },
{ id: 31661, word: "garland" },
{ id: 31662, word: "garlic" },
{ id: 31663, word: "garment" },
{ id: 31664, word: "garnet" },
{ id: 31665, word: "garnish" },
{ id: 31666, word: "garter" },
{ id: 32111, word: "gas" },
{ id: 32112, word: "gatherer" },
{ id: 32113, word: "gathering" },
{ id: 32114, word: "gating" },
{ id: 32115, word: "gauging" },
{ id: 32116, word: "gauntlet" },
{ id: 32121, word: "gauze" },
{ id: 32122, word: "gave" },
{ id: 32123, word: "gawk" },
{ id: 32124, word: "gazing" },
{ id: 32125, word: "gear" },
{ id: 32126, word: "gecko" },
{ id: 32131, word: "geek" },
{ id: 32132, word: "geiger" },
{ id: 32133, word: "gem" },
{ id: 32134, word: "gender" },
{ id: 32135, word: "generic" },
{ id: 32136, word: "generous" },
{ id: 32141, word: "genetics" },
{ id: 32142, word: "genre" },
{ id: 32143, word: "gentile" },
{ id: 32144, word: "gentleman" },
{ id: 32145, word: "gently" },
{ id: 32146, word: "gents" },
{ id: 32151, word: "geography" },
{ id: 32152, word: "geologic" },
{ id: 32153, word: "geologist" },
{ id: 32154, word: "geology" },
{ id: 32155, word: "geometric" },
{ id: 32156, word: "geometry" },
{ id: 32161, word: "geranium" },
{ id: 32162, word: "gerbil" },
{ id: 32163, word: "geriatric" },
{ id: 32164, word: "germicide" },
{ id: 32165, word: "germinate" },
{ id: 32166, word: "germless" },
{ id: 32211, word: "germproof" },
{ id: 32212, word: "gestate" },
{ id: 32213, word: "gestation" },
{ id: 32214, word: "gesture" },
{ id: 32215, word: "getaway" },
{ id: 32216, word: "getting" },
{ id: 32221, word: "getup" },
{ id: 32222, word: "giant" },
{ id: 32223, word: "gibberish" },
{ id: 32224, word: "giblet" },
{ id: 32225, word: "giddily" },
{ id: 32226, word: "giddiness" },
{ id: 32231, word: "giddy" },
{ id: 32232, word: "gift" },
{ id: 32233, word: "gigabyte" },
{ id: 32234, word: "gigahertz" },
{ id: 32235, word: "gigantic" },
{ id: 32236, word: "giggle" },
{ id: 32241, word: "giggling" },
{ id: 32242, word: "giggly" },
{ id: 32243, word: "gigolo" },
{ id: 32244, word: "gilled" },
{ id: 32245, word: "gills" },
{ id: 32246, word: "gimmick" },
{ id: 32251, word: "girdle" },
{ id: 32252, word: "giveaway" },
{ id: 32253, word: "given" },
{ id: 32254, word: "giver" },
{ id: 32255, word: "giving" },
{ id: 32256, word: "gizmo" },
{ id: 32261, word: "gizzard" },
{ id: 32262, word: "glacial" },
{ id: 32263, word: "glacier" },
{ id: 32264, word: "glade" },
{ id: 32265, word: "gladiator" },
{ id: 32266, word: "gladly" },
{ id: 32311, word: "glamorous" },
{ id: 32312, word: "glamour" },
{ id: 32313, word: "glance" },
{ id: 32314, word: "glancing" },
{ id: 32315, word: "glandular" },
{ id: 32316, word: "glare" },
{ id: 32321, word: "glaring" },
{ id: 32322, word: "glass" },
{ id: 32323, word: "glaucoma" },
{ id: 32324, word: "glazing" },
{ id: 32325, word: "gleaming" },
{ id: 32326, word: "gleeful" },
{ id: 32331, word: "glider" },
{ id: 32332, word: "gliding" },
{ id: 32333, word: "glimmer" },
{ id: 32334, word: "glimpse" },
{ id: 32335, word: "glisten" },
{ id: 32336, word: "glitch" },
{ id: 32341, word: "glitter" },
{ id: 32342, word: "glitzy" },
{ id: 32343, word: "gloater" },
{ id: 32344, word: "gloating" },
{ id: 32345, word: "gloomily" },
{ id: 32346, word: "gloomy" },
{ id: 32351, word: "glorified" },
{ id: 32352, word: "glorifier" },
{ id: 32353, word: "glorify" },
{ id: 32354, word: "glorious" },
{ id: 32355, word: "glory" },
{ id: 32356, word: "gloss" },
{ id: 32361, word: "glove" },
{ id: 32362, word: "glowing" },
{ id: 32363, word: "glowworm" },
{ id: 32364, word: "glucose" },
{ id: 32365, word: "glue" },
{ id: 32366, word: "gluten" },
{ id: 32411, word: "glutinous" },
{ id: 32412, word: "glutton" },
{ id: 32413, word: "gnarly" },
{ id: 32414, word: "gnat" },
{ id: 32415, word: "goal" },
{ id: 32416, word: "goatskin" },
{ id: 32421, word: "goes" },
{ id: 32422, word: "goggles" },
{ id: 32423, word: "going" },
{ id: 32424, word: "goldfish" },
{ id: 32425, word: "goldmine" },
{ id: 32426, word: "goldsmith" },
{ id: 32431, word: "golf" },
{ id: 32432, word: "goliath" },
{ id: 32433, word: "gonad" },
{ id: 32434, word: "gondola" },
{ id: 32435, word: "gone" },
{ id: 32436, word: "gong" },
{ id: 32441, word: "good" },
{ id: 32442, word: "gooey" },
{ id: 32443, word: "goofball" },
{ id: 32444, word: "goofiness" },
{ id: 32445, word: "goofy" },
{ id: 32446, word: "google" },
{ id: 32451, word: "goon" },
{ id: 32452, word: "gopher" },
{ id: 32453, word: "gore" },
{ id: 32454, word: "gorged" },
{ id: 32455, word: "gorgeous" },
{ id: 32456, word: "gory" },
{ id: 32461, word: "gosling" },
{ id: 32462, word: "gossip" },
{ id: 32463, word: "gothic" },
{ id: 32464, word: "gotten" },
{ id: 32465, word: "gout" },
{ id: 32466, word: "gown" },
{ id: 32511, word: "grab" },
{ id: 32512, word: "graceful" },
{ id: 32513, word: "graceless" },
{ id: 32514, word: "gracious" },
{ id: 32515, word: "gradation" },
{ id: 32516, word: "graded" },
{ id: 32521, word: "grader" },
{ id: 32522, word: "gradient" },
{ id: 32523, word: "grading" },
{ id: 32524, word: "gradually" },
{ id: 32525, word: "graduate" },
{ id: 32526, word: "graffiti" },
{ id: 32531, word: "grafted" },
{ id: 32532, word: "grafting" },
{ id: 32533, word: "grain" },
{ id: 32534, word: "granddad" },
{ id: 32535, word: "grandkid" },
{ id: 32536, word: "grandly" },
{ id: 32541, word: "grandma" },
{ id: 32542, word: "grandpa" },
{ id: 32543, word: "grandson" },
{ id: 32544, word: "granite" },
{ id: 32545, word: "granny" },
{ id: 32546, word: "granola" },
{ id: 32551, word: "grant" },
{ id: 32552, word: "granular" },
{ id: 32553, word: "grape" },
{ id: 32554, word: "graph" },
{ id: 32555, word: "grapple" },
{ id: 32556, word: "grappling" },
{ id: 32561, word: "grasp" },
{ id: 32562, word: "grass" },
{ id: 32563, word: "gratified" },
{ id: 32564, word: "gratify" },
{ id: 32565, word: "grating" },
{ id: 32566, word: "gratitude" },
{ id: 32611, word: "gratuity" },
{ id: 32612, word: "gravel" },
{ id: 32613, word: "graveness" },
{ id: 32614, word: "graves" },
{ id: 32615, word: "graveyard" },
{ id: 32616, word: "gravitate" },
{ id: 32621, word: "gravity" },
{ id: 32622, word: "gravy" },
{ id: 32623, word: "gray" },
{ id: 32624, word: "grazing" },
{ id: 32625, word: "greasily" },
{ id: 32626, word: "greedily" },
{ id: 32631, word: "greedless" },
{ id: 32632, word: "greedy" },
{ id: 32633, word: "green" },
{ id: 32634, word: "greeter" },
{ id: 32635, word: "greeting" },
{ id: 32636, word: "grew" },
{ id: 32641, word: "greyhound" },
{ id: 32642, word: "grid" },
{ id: 32643, word: "grief" },
{ id: 32644, word: "grievance" },
{ id: 32645, word: "grieving" },
{ id: 32646, word: "grievous" },
{ id: 32651, word: "grill" },
{ id: 32652, word: "grimace" },
{ id: 32653, word: "grimacing" },
{ id: 32654, word: "grime" },
{ id: 32655, word: "griminess" },
{ id: 32656, word: "grimy" },
{ id: 32661, word: "grinch" },
{ id: 32662, word: "grinning" },
{ id: 32663, word: "grip" },
{ id: 32664, word: "gristle" },
{ id: 32665, word: "grit" },
{ id: 32666, word: "groggily" },
{ id: 33111, word: "groggy" },
{ id: 33112, word: "groin" },
{ id: 33113, word: "groom" },
{ id: 33114, word: "groove" },
{ id: 33115, word: "grooving" },
{ id: 33116, word: "groovy" },
{ id: 33121, word: "grope" },
{ id: 33122, word: "ground" },
{ id: 33123, word: "grouped" },
{ id: 33124, word: "grout" },
{ id: 33125, word: "grove" },
{ id: 33126, word: "grower" },
{ id: 33131, word: "growing" },
{ id: 33132, word: "growl" },
{ id: 33133, word: "grub" },
{ id: 33134, word: "grudge" },
{ id: 33135, word: "grudging" },
{ id: 33136, word: "grueling" },
{ id: 33141, word: "gruffly" },
{ id: 33142, word: "grumble" },
{ id: 33143, word: "grumbling" },
{ id: 33144, word: "grumbly" },
{ id: 33145, word: "grumpily" },
{ id: 33146, word: "grunge" },
{ id: 33151, word: "grunt" },
{ id: 33152, word: "guacamole" },
{ id: 33153, word: "guidable" },
{ id: 33154, word: "guidance" },
{ id: 33155, word: "guide" },
{ id: 33156, word: "guiding" },
{ id: 33161, word: "guileless" },
{ id: 33162, word: "guise" },
{ id: 33163, word: "gulf" },
{ id: 33164, word: "gullible" },
{ id: 33165, word: "gully" },
{ id: 33166, word: "gulp" },
{ id: 33211, word: "gumball" },
{ id: 33212, word: "gumdrop" },
{ id: 33213, word: "gumminess" },
{ id: 33214, word: "gumming" },
{ id: 33215, word: "gummy" },
{ id: 33216, word: "gurgle" },
{ id: 33221, word: "gurgling" },
{ id: 33222, word: "guru" },
{ id: 33223, word: "gush" },
{ id: 33224, word: "gusto" },
{ id: 33225, word: "gusty" },
{ id: 33226, word: "gutless" },
{ id: 33231, word: "guts" },
{ id: 33232, word: "gutter" },
{ id: 33233, word: "guy" },
{ id: 33234, word: "guzzler" },
{ id: 33235, word: "gyration" },
{ id: 33236, word: "habitable" },
{ id: 33241, word: "habitant" },
{ id: 33242, word: "habitat" },
{ id: 33243, word: "habitual" },
{ id: 33244, word: "hacked" },
{ id: 33245, word: "hacker" },
{ id: 33246, word: "hacking" },
{ id: 33251, word: "hacksaw" },
{ id: 33252, word: "had" },
{ id: 33253, word: "haggler" },
{ id: 33254, word: "haiku" },
{ id: 33255, word: "half" },
{ id: 33256, word: "halogen" },
{ id: 33261, word: "halt" },
{ id: 33262, word: "halved" },
{ id: 33263, word: "halves" },
{ id: 33264, word: "hamburger" },
{ id: 33265, word: "hamlet" },
{ id: 33266, word: "hammock" },
{ id: 33311, word: "hamper" },
{ id: 33312, word: "hamster" },
{ id: 33313, word: "hamstring" },
{ id: 33314, word: "handbag" },
{ id: 33315, word: "handball" },
{ id: 33316, word: "handbook" },
{ id: 33321, word: "handbrake" },
{ id: 33322, word: "handcart" },
{ id: 33323, word: "handclap" },
{ id: 33324, word: "handclasp" },
{ id: 33325, word: "handcraft" },
{ id: 33326, word: "handcuff" },
{ id: 33331, word: "handed" },
{ id: 33332, word: "handful" },
{ id: 33333, word: "handgrip" },
{ id: 33334, word: "handgun" },
{ id: 33335, word: "handheld" },
{ id: 33336, word: "handiness" },
{ id: 33341, word: "handiwork" },
{ id: 33342, word: "handlebar" },
{ id: 33343, word: "handled" },
{ id: 33344, word: "handler" },
{ id: 33345, word: "handling" },
{ id: 33346, word: "handmade" },
{ id: 33351, word: "handoff" },
{ id: 33352, word: "handpick" },
{ id: 33353, word: "handprint" },
{ id: 33354, word: "handrail" },
{ id: 33355, word: "handsaw" },
{ id: 33356, word: "handset" },
{ id: 33361, word: "handsfree" },
{ id: 33362, word: "handshake" },
{ id: 33363, word: "handstand" },
{ id: 33364, word: "handwash" },
{ id: 33365, word: "handwork" },
{ id: 33366, word: "handwoven" },
{ id: 33411, word: "handwrite" },
{ id: 33412, word: "handyman" },
{ id: 33413, word: "hangnail" },
{ id: 33414, word: "hangout" },
{ id: 33415, word: "hangover" },
{ id: 33416, word: "hangup" },
{ id: 33421, word: "hankering" },
{ id: 33422, word: "hankie" },
{ id: 33423, word: "hanky" },
{ id: 33424, word: "haphazard" },
{ id: 33425, word: "happening" },
{ id: 33426, word: "happier" },
{ id: 33431, word: "happiest" },
{ id: 33432, word: "happily" },
{ id: 33433, word: "happiness" },
{ id: 33434, word: "happy" },
{ id: 33435, word: "harbor" },
{ id: 33436, word: "hardcopy" },
{ id: 33441, word: "hardcore" },
{ id: 33442, word: "hardcover" },
{ id: 33443, word: "harddisk" },
{ id: 33444, word: "hardened" },
{ id: 33445, word: "hardener" },
{ id: 33446, word: "hardening" },
{ id: 33451, word: "hardhat" },
{ id: 33452, word: "hardhead" },
{ id: 33453, word: "hardiness" },
{ id: 33454, word: "hardly" },
{ id: 33455, word: "hardness" },
{ id: 33456, word: "hardship" },
{ id: 33461, word: "hardware" },
{ id: 33462, word: "hardwired" },
{ id: 33463, word: "hardwood" },
{ id: 33464, word: "hardy" },
{ id: 33465, word: "harmful" },
{ id: 33466, word: "harmless" },
{ id: 33511, word: "harmonica" },
{ id: 33512, word: "harmonics" },
{ id: 33513, word: "harmonize" },
{ id: 33514, word: "harmony" },
{ id: 33515, word: "harness" },
{ id: 33516, word: "harpist" },
{ id: 33521, word: "harsh" },
{ id: 33522, word: "harvest" },
{ id: 33523, word: "hash" },
{ id: 33524, word: "hassle" },
{ id: 33525, word: "haste" },
{ id: 33526, word: "hastily" },
{ id: 33531, word: "hastiness" },
{ id: 33532, word: "hasty" },
{ id: 33533, word: "hatbox" },
{ id: 33534, word: "hatchback" },
{ id: 33535, word: "hatchery" },
{ id: 33536, word: "hatchet" },
{ id: 33541, word: "hatching" },
{ id: 33542, word: "hatchling" },
{ id: 33543, word: "hate" },
{ id: 33544, word: "hatless" },
{ id: 33545, word: "hatred" },
{ id: 33546, word: "haunt" },
{ id: 33551, word: "haven" },
{ id: 33552, word: "hazard" },
{ id: 33553, word: "hazelnut" },
{ id: 33554, word: "hazily" },
{ id: 33555, word: "haziness" },
{ id: 33556, word: "hazing" },
{ id: 33561, word: "hazy" },
{ id: 33562, word: "headache" },
{ id: 33563, word: "headband" },
{ id: 33564, word: "headboard" },
{ id: 33565, word: "headcount" },
{ id: 33566, word: "headdress" },
{ id: 33611, word: "headed" },
{ id: 33612, word: "header" },
{ id: 33613, word: "headfirst" },
{ id: 33614, word: "headgear" },
{ id: 33615, word: "heading" },
{ id: 33616, word: "headlamp" },
{ id: 33621, word: "headless" },
{ id: 33622, word: "headlock" },
{ id: 33623, word: "headphone" },
{ id: 33624, word: "headpiece" },
{ id: 33625, word: "headrest" },
{ id: 33626, word: "headroom" },
{ id: 33631, word: "headscarf" },
{ id: 33632, word: "headset" },
{ id: 33633, word: "headsman" },
{ id: 33634, word: "headstand" },
{ id: 33635, word: "headstone" },
{ id: 33636, word: "headway" },
{ id: 33641, word: "headwear" },
{ id: 33642, word: "heap" },
{ id: 33643, word: "heat" },
{ id: 33644, word: "heave" },
{ id: 33645, word: "heavily" },
{ id: 33646, word: "heaviness" },
{ id: 33651, word: "heaving" },
{ id: 33652, word: "hedge" },
{ id: 33653, word: "hedging" },
{ id: 33654, word: "heftiness" },
{ id: 33655, word: "hefty" },
{ id: 33656, word: "helium" },
{ id: 33661, word: "helmet" },
{ id: 33662, word: "helper" },
{ id: 33663, word: "helpful" },
{ id: 33664, word: "helping" },
{ id: 33665, word: "helpless" },
{ id: 33666, word: "helpline" },
{ id: 34111, word: "hemlock" },
{ id: 34112, word: "hemstitch" },
{ id: 34113, word: "hence" },
{ id: 34114, word: "henchman" },
{ id: 34115, word: "henna" },
{ id: 34116, word: "herald" },
{ id: 34121, word: "herbal" },
{ id: 34122, word: "herbicide" },
{ id: 34123, word: "herbs" },
{ id: 34124, word: "heritage" },
{ id: 34125, word: "hermit" },
{ id: 34126, word: "heroics" },
{ id: 34131, word: "heroism" },
{ id: 34132, word: "herring" },
{ id: 34133, word: "herself" },
{ id: 34134, word: "hertz" },
{ id: 34135, word: "hesitancy" },
{ id: 34136, word: "hesitant" },
{ id: 34141, word: "hesitate" },
{ id: 34142, word: "hexagon" },
{ id: 34143, word: "hexagram" },
{ id: 34144, word: "hubcap" },
{ id: 34145, word: "huddle" },
{ id: 34146, word: "huddling" },
{ id: 34151, word: "huff" },
{ id: 34152, word: "hug" },
{ id: 34153, word: "hula" },
{ id: 34154, word: "hulk" },
{ id: 34155, word: "hull" },
{ id: 34156, word: "human" },
{ id: 34161, word: "humble" },
{ id: 34162, word: "humbling" },
{ id: 34163, word: "humbly" },
{ id: 34164, word: "humid" },
{ id: 34165, word: "humiliate" },
{ id: 34166, word: "humility" },
{ id: 34211, word: "humming" },
{ id: 34212, word: "hummus" },
{ id: 34213, word: "humongous" },
{ id: 34214, word: "humorist" },
{ id: 34215, word: "humorless" },
{ id: 34216, word: "humorous" },
{ id: 34221, word: "humpback" },
{ id: 34222, word: "humped" },
{ id: 34223, word: "humvee" },
{ id: 34224, word: "hunchback" },
{ id: 34225, word: "hundredth" },
{ id: 34226, word: "hunger" },
{ id: 34231, word: "hungrily" },
{ id: 34232, word: "hungry" },
{ id: 34233, word: "hunk" },
{ id: 34234, word: "hunter" },
{ id: 34235, word: "hunting" },
{ id: 34236, word: "huntress" },
{ id: 34241, word: "huntsman" },
{ id: 34242, word: "hurdle" },
{ id: 34243, word: "hurled" },
{ id: 34244, word: "hurler" },
{ id: 34245, word: "hurling" },
{ id: 34246, word: "hurray" },
{ id: 34251, word: "hurricane" },
{ id: 34252, word: "hurried" },
{ id: 34253, word: "hurry" },
{ id: 34254, word: "hurt" },
{ id: 34255, word: "husband" },
{ id: 34256, word: "hush" },
{ id: 34261, word: "husked" },
{ id: 34262, word: "huskiness" },
{ id: 34263, word: "hut" },
{ id: 34264, word: "hybrid" },
{ id: 34265, word: "hydrant" },
{ id: 34266, word: "hydrated" },
{ id: 34311, word: "hydration" },
{ id: 34312, word: "hydrogen" },
{ id: 34313, word: "hydroxide" },
{ id: 34314, word: "hyperlink" },
{ id: 34315, word: "hypertext" },
{ id: 34316, word: "hyphen" },
{ id: 34321, word: "hypnoses" },
{ id: 34322, word: "hypnosis" },
{ id: 34323, word: "hypnotic" },
{ id: 34324, word: "hypnotism" },
{ id: 34325, word: "hypnotist" },
{ id: 34326, word: "hypnotize" },
{ id: 34331, word: "hypocrisy" },
{ id: 34332, word: "hypocrite" },
{ id: 34333, word: "ibuprofen" },
{ id: 34334, word: "ice" },
{ id: 34335, word: "iciness" },
{ id: 34336, word: "icing" },
{ id: 34341, word: "icky" },
{ id: 34342, word: "icon" },
{ id: 34343, word: "icy" },
{ id: 34344, word: "idealism" },
{ id: 34345, word: "idealist" },
{ id: 34346, word: "idealize" },
{ id: 34351, word: "ideally" },
{ id: 34352, word: "idealness" },
{ id: 34353, word: "identical" },
{ id: 34354, word: "identify" },
{ id: 34355, word: "identity" },
{ id: 34356, word: "ideology" },
{ id: 34361, word: "idiocy" },
{ id: 34362, word: "idiom" },
{ id: 34363, word: "idly" },
{ id: 34364, word: "igloo" },
{ id: 34365, word: "ignition" },
{ id: 34366, word: "ignore" },
{ id: 34411, word: "iguana" },
{ id: 34412, word: "illicitly" },
{ id: 34413, word: "illusion" },
{ id: 34414, word: "illusive" },
{ id: 34415, word: "image" },
{ id: 34416, word: "imaginary" },
{ id: 34421, word: "imagines" },
{ id: 34422, word: "imaging" },
{ id: 34423, word: "imbecile" },
{ id: 34424, word: "imitate" },
{ id: 34425, word: "imitation" },
{ id: 34426, word: "immature" },
{ id: 34431, word: "immerse" },
{ id: 34432, word: "immersion" },
{ id: 34433, word: "imminent" },
{ id: 34434, word: "immobile" },
{ id: 34435, word: "immodest" },
{ id: 34436, word: "immorally" },
{ id: 34441, word: "immortal" },
{ id: 34442, word: "immovable" },
{ id: 34443, word: "immovably" },
{ id: 34444, word: "immunity" },
{ id: 34445, word: "immunize" },
{ id: 34446, word: "impaired" },
{ id: 34451, word: "impale" },
{ id: 34452, word: "impart" },
{ id: 34453, word: "impatient" },
{ id: 34454, word: "impeach" },
{ id: 34455, word: "impeding" },
{ id: 34456, word: "impending" },
{ id: 34461, word: "imperfect" },
{ id: 34462, word: "imperial" },
{ id: 34463, word: "impish" },
{ id: 34464, word: "implant" },
{ id: 34465, word: "implement" },
{ id: 34466, word: "implicate" },
{ id: 34511, word: "implicit" },
{ id: 34512, word: "implode" },
{ id: 34513, word: "implosion" },
{ id: 34514, word: "implosive" },
{ id: 34515, word: "imply" },
{ id: 34516, word: "impolite" },
{ id: 34521, word: "important" },
{ id: 34522, word: "importer" },
{ id: 34523, word: "impose" },
{ id: 34524, word: "imposing" },
{ id: 34525, word: "impotence" },
{ id: 34526, word: "impotency" },
{ id: 34531, word: "impotent" },
{ id: 34532, word: "impound" },
{ id: 34533, word: "imprecise" },
{ id: 34534, word: "imprint" },
{ id: 34535, word: "imprison" },
{ id: 34536, word: "impromptu" },
{ id: 34541, word: "improper" },
{ id: 34542, word: "improve" },
{ id: 34543, word: "improving" },
{ id: 34544, word: "improvise" },
{ id: 34545, word: "imprudent" },
{ id: 34546, word: "impulse" },
{ id: 34551, word: "impulsive" },
{ id: 34552, word: "impure" },
{ id: 34553, word: "impurity" },
{ id: 34554, word: "iodine" },
{ id: 34555, word: "iodize" },
{ id: 34556, word: "ion" },
{ id: 34561, word: "ipad" },
{ id: 34562, word: "iphone" },
{ id: 34563, word: "ipod" },
{ id: 34564, word: "irate" },
{ id: 34565, word: "irk" },
{ id: 34566, word: "iron" },
{ id: 34611, word: "irregular" },
{ id: 34612, word: "irrigate" },
{ id: 34613, word: "irritable" },
{ id: 34614, word: "irritably" },
{ id: 34615, word: "irritant" },
{ id: 34616, word: "irritate" },
{ id: 34621, word: "islamic" },
{ id: 34622, word: "islamist" },
{ id: 34623, word: "isolated" },
{ id: 34624, word: "isolating" },
{ id: 34625, word: "isolation" },
{ id: 34626, word: "isotope" },
{ id: 34631, word: "issue" },
{ id: 34632, word: "issuing" },
{ id: 34633, word: "italicize" },
{ id: 34634, word: "italics" },
{ id: 34635, word: "item" },
{ id: 34636, word: "itinerary" },
{ id: 34641, word: "itunes" },
{ id: 34642, word: "ivory" },
{ id: 34643, word: "ivy" },
{ id: 34644, word: "jab" },
{ id: 34645, word: "jackal" },
{ id: 34646, word: "jacket" },
{ id: 34651, word: "jackknife" },
{ id: 34652, word: "jackpot" },
{ id: 34653, word: "jailbird" },
{ id: 34654, word: "jailbreak" },
{ id: 34655, word: "jailer" },
{ id: 34656, word: "jailhouse" },
{ id: 34661, word: "jalapeno" },
{ id: 34662, word: "jam" },
{ id: 34663, word: "janitor" },
{ id: 34664, word: "january" },
{ id: 34665, word: "jargon" },
{ id: 34666, word: "jarring" },
{ id: 35111, word: "jasmine" },
{ id: 35112, word: "jaundice" },
{ id: 35113, word: "jaunt" },
{ id: 35114, word: "java" },
{ id: 35115, word: "jawed" },
{ id: 35116, word: "jawless" },
{ id: 35121, word: "jawline" },
{ id: 35122, word: "jaws" },
{ id: 35123, word: "jaybird" },
{ id: 35124, word: "jaywalker" },
{ id: 35125, word: "jazz" },
{ id: 35126, word: "jeep" },
{ id: 35131, word: "jeeringly" },
{ id: 35132, word: "jellied" },
{ id: 35133, word: "jelly" },
{ id: 35134, word: "jersey" },
{ id: 35135, word: "jester" },
{ id: 35136, word: "jet" },
{ id: 35141, word: "jiffy" },
{ id: 35142, word: "jigsaw" },
{ id: 35143, word: "jimmy" },
{ id: 35144, word: "jingle" },
{ id: 35145, word: "jingling" },
{ id: 35146, word: "jinx" },
{ id: 35151, word: "jitters" },
{ id: 35152, word: "jittery" },
{ id: 35153, word: "job" },
{ id: 35154, word: "jockey" },
{ id: 35155, word: "jockstrap" },
{ id: 35156, word: "jogger" },
{ id: 35161, word: "jogging" },
{ id: 35162, word: "john" },
{ id: 35163, word: "joining" },
{ id: 35164, word: "jokester" },
{ id: 35165, word: "jokingly" },
{ id: 35166, word: "jolliness" },
{ id: 35211, word: "jolly" },
{ id: 35212, word: "jolt" },
{ id: 35213, word: "jot" },
{ id: 35214, word: "jovial" },
{ id: 35215, word: "joyfully" },
{ id: 35216, word: "joylessly" },
{ id: 35221, word: "joyous" },
{ id: 35222, word: "joyride" },
{ id: 35223, word: "joystick" },
{ id: 35224, word: "jubilance" },
{ id: 35225, word: "jubilant" },
{ id: 35226, word: "judge" },
{ id: 35231, word: "judgingly" },
{ id: 35232, word: "judicial" },
{ id: 35233, word: "judiciary" },
{ id: 35234, word: "judo" },
{ id: 35235, word: "juggle" },
{ id: 35236, word: "juggling" },
{ id: 35241, word: "jugular" },
{ id: 35242, word: "juice" },
{ id: 35243, word: "juiciness" },
{ id: 35244, word: "juicy" },
{ id: 35245, word: "jujitsu" },
{ id: 35246, word: "jukebox" },
{ id: 35251, word: "july" },
{ id: 35252, word: "jumble" },
{ id: 35253, word: "jumbo" },
{ id: 35254, word: "jump" },
{ id: 35255, word: "junction" },
{ id: 35256, word: "juncture" },
{ id: 35261, word: "june" },
{ id: 35262, word: "junior" },
{ id: 35263, word: "juniper" },
{ id: 35264, word: "junkie" },
{ id: 35265, word: "junkman" },
{ id: 35266, word: "junkyard" },
{ id: 35311, word: "jurist" },
{ id: 35312, word: "juror" },
{ id: 35313, word: "jury" },
{ id: 35314, word: "justice" },
{ id: 35315, word: "justifier" },
{ id: 35316, word: "justify" },
{ id: 35321, word: "justly" },
{ id: 35322, word: "justness" },
{ id: 35323, word: "juvenile" },
{ id: 35324, word: "kabob" },
{ id: 35325, word: "kangaroo" },
{ id: 35326, word: "karaoke" },
{ id: 35331, word: "karate" },
{ id: 35332, word: "karma" },
{ id: 35333, word: "kebab" },
{ id: 35334, word: "keenly" },
{ id: 35335, word: "keenness" },
{ id: 35336, word: "keep" },
{ id: 35341, word: "keg" },
{ id: 35342, word: "kelp" },
{ id: 35343, word: "kennel" },
{ id: 35344, word: "kept" },
{ id: 35345, word: "kerchief" },
{ id: 35346, word: "kerosene" },
{ id: 35351, word: "kettle" },
{ id: 35352, word: "kick" },
{ id: 35353, word: "kiln" },
{ id: 35354, word: "kilobyte" },
{ id: 35355, word: "kilogram" },
{ id: 35356, word: "kilometer" },
{ id: 35361, word: "kilowatt" },
{ id: 35362, word: "kilt" },
{ id: 35363, word: "kimono" },
{ id: 35364, word: "kindle" },
{ id: 35365, word: "kindling" },
{ id: 35366, word: "kindly" },
{ id: 35411, word: "kindness" },
{ id: 35412, word: "kindred" },
{ id: 35413, word: "kinetic" },
{ id: 35414, word: "kinfolk" },
{ id: 35415, word: "king" },
{ id: 35416, word: "kinship" },
{ id: 35421, word: "kinsman" },
{ id: 35422, word: "kinswoman" },
{ id: 35423, word: "kissable" },
{ id: 35424, word: "kisser" },
{ id: 35425, word: "kissing" },
{ id: 35426, word: "kitchen" },
{ id: 35431, word: "kite" },
{ id: 35432, word: "kitten" },
{ id: 35433, word: "kitty" },
{ id: 35434, word: "kiwi" },
{ id: 35435, word: "kleenex" },
{ id: 35436, word: "knapsack" },
{ id: 35441, word: "knee" },
{ id: 35442, word: "knelt" },
{ id: 35443, word: "knickers" },
{ id: 35444, word: "knoll" },
{ id: 35445, word: "koala" },
{ id: 35446, word: "kooky" },
{ id: 35451, word: "kosher" },
{ id: 35452, word: "krypton" },
{ id: 35453, word: "kudos" },
{ id: 35454, word: "kung" },
{ id: 35455, word: "labored" },
{ id: 35456, word: "laborer" },
{ id: 35461, word: "laboring" },
{ id: 35462, word: "laborious" },
{ id: 35463, word: "labrador" },
{ id: 35464, word: "ladder" },
{ id: 35465, word: "ladies" },
{ id: 35466, word: "ladle" },
{ id: 35511, word: "ladybug" },
{ id: 35512, word: "ladylike" },
{ id: 35513, word: "lagged" },
{ id: 35514, word: "lagging" },
{ id: 35515, word: "lagoon" },
{ id: 35516, word: "lair" },
{ id: 35521, word: "lake" },
{ id: 35522, word: "lance" },
{ id: 35523, word: "landed" },
{ id: 35524, word: "landfall" },
{ id: 35525, word: "landfill" },
{ id: 35526, word: "landing" },
{ id: 35531, word: "landlady" },
{ id: 35532, word: "landless" },
{ id: 35533, word: "landline" },
{ id: 35534, word: "landlord" },
{ id: 35535, word: "landmark" },
{ id: 35536, word: "landmass" },
{ id: 35541, word: "landmine" },
{ id: 35542, word: "landowner" },
{ id: 35543, word: "landscape" },
{ id: 35544, word: "landside" },
{ id: 35545, word: "landslide" },
{ id: 35546, word: "language" },
{ id: 35551, word: "lankiness" },
{ id: 35552, word: "lanky" },
{ id: 35553, word: "lantern" },
{ id: 35554, word: "lapdog" },
{ id: 35555, word: "lapel" },
{ id: 35556, word: "lapped" },
{ id: 35561, word: "lapping" },
{ id: 35562, word: "laptop" },
{ id: 35563, word: "lard" },
{ id: 35564, word: "large" },
{ id: 35565, word: "lark" },
{ id: 35566, word: "lash" },
{ id: 35611, word: "lasso" },
{ id: 35612, word: "last" },
{ id: 35613, word: "latch" },
{ id: 35614, word: "late" },
{ id: 35615, word: "lather" },
{ id: 35616, word: "latitude" },
{ id: 35621, word: "latrine" },
{ id: 35622, word: "latter" },
{ id: 35623, word: "latticed" },
{ id: 35624, word: "launch" },
{ id: 35625, word: "launder" },
{ id: 35626, word: "laundry" },
{ id: 35631, word: "laurel" },
{ id: 35632, word: "lavender" },
{ id: 35633, word: "lavish" },
{ id: 35634, word: "laxative" },
{ id: 35635, word: "lazily" },
{ id: 35636, word: "laziness" },
{ id: 35641, word: "lazy" },
{ id: 35642, word: "lecturer" },
{ id: 35643, word: "left" },
{ id: 35644, word: "legacy" },
{ id: 35645, word: "legal" },
{ id: 35646, word: "legend" },
{ id: 35651, word: "legged" },
{ id: 35652, word: "leggings" },
{ id: 35653, word: "legible" },
{ id: 35654, word: "legibly" },
{ id: 35655, word: "legislate" },
{ id: 35656, word: "lego" },
{ id: 35661, word: "legroom" },
{ id: 35662, word: "legume" },
{ id: 35663, word: "legwarmer" },
{ id: 35664, word: "legwork" },
{ id: 35665, word: "lemon" },
{ id: 35666, word: "lend" },
{ id: 36111, word: "length" },
{ id: 36112, word: "lens" },
{ id: 36113, word: "lent" },
{ id: 36114, word: "leotard" },
{ id: 36115, word: "lesser" },
{ id: 36116, word: "letdown" },
{ id: 36121, word: "lethargic" },
{ id: 36122, word: "lethargy" },
{ id: 36123, word: "letter" },
{ id: 36124, word: "lettuce" },
{ id: 36125, word: "level" },
{ id: 36126, word: "leverage" },
{ id: 36131, word: "levers" },
{ id: 36132, word: "levitate" },
{ id: 36133, word: "levitator" },
{ id: 36134, word: "liability" },
{ id: 36135, word: "liable" },
{ id: 36136, word: "liberty" },
{ id: 36141, word: "librarian" },
{ id: 36142, word: "library" },
{ id: 36143, word: "licking" },
{ id: 36144, word: "licorice" },
{ id: 36145, word: "lid" },
{ id: 36146, word: "life" },
{ id: 36151, word: "lifter" },
{ id: 36152, word: "lifting" },
{ id: 36153, word: "liftoff" },
{ id: 36154, word: "ligament" },
{ id: 36155, word: "likely" },
{ id: 36156, word: "likeness" },
{ id: 36161, word: "likewise" },
{ id: 36162, word: "liking" },
{ id: 36163, word: "lilac" },
{ id: 36164, word: "lilly" },
{ id: 36165, word: "lily" },
{ id: 36166, word: "limb" },
{ id: 36211, word: "limeade" },
{ id: 36212, word: "limelight" },
{ id: 36213, word: "limes" },
{ id: 36214, word: "limit" },
{ id: 36215, word: "limping" },
{ id: 36216, word: "limpness" },
{ id: 36221, word: "line" },
{ id: 36222, word: "lingo" },
{ id: 36223, word: "linguini" },
{ id: 36224, word: "linguist" },
{ id: 36225, word: "lining" },
{ id: 36226, word: "linked" },
{ id: 36231, word: "linoleum" },
{ id: 36232, word: "linseed" },
{ id: 36233, word: "lint" },
{ id: 36234, word: "lion" },
{ id: 36235, word: "lip" },
{ id: 36236, word: "liquefy" },
{ id: 36241, word: "liqueur" },
{ id: 36242, word: "liquid" },
{ id: 36243, word: "lisp" },
{ id: 36244, word: "list" },
{ id: 36245, word: "litigate" },
{ id: 36246, word: "litigator" },
{ id: 36251, word: "litmus" },
{ id: 36252, word: "litter" },
{ id: 36253, word: "little" },
{ id: 36254, word: "livable" },
{ id: 36255, word: "lived" },
{ id: 36256, word: "lively" },
{ id: 36261, word: "liver" },
{ id: 36262, word: "livestock" },
{ id: 36263, word: "lividly" },
{ id: 36264, word: "living" },
{ id: 36265, word: "lizard" },
{ id: 36266, word: "lubricant" },
{ id: 36311, word: "lubricate" },
{ id: 36312, word: "lucid" },
{ id: 36313, word: "luckily" },
{ id: 36314, word: "luckiness" },
{ id: 36315, word: "luckless" },
{ id: 36316, word: "lucrative" },
{ id: 36321, word: "ludicrous" },
{ id: 36322, word: "lugged" },
{ id: 36323, word: "lukewarm" },
{ id: 36324, word: "lullaby" },
{ id: 36325, word: "lumber" },
{ id: 36326, word: "luminance" },
{ id: 36331, word: "luminous" },
{ id: 36332, word: "lumpiness" },
{ id: 36333, word: "lumping" },
{ id: 36334, word: "lumpish" },
{ id: 36335, word: "lunacy" },
{ id: 36336, word: "lunar" },
{ id: 36341, word: "lunchbox" },
{ id: 36342, word: "luncheon" },
{ id: 36343, word: "lunchroom" },
{ id: 36344, word: "lunchtime" },
{ id: 36345, word: "lung" },
{ id: 36346, word: "lurch" },
{ id: 36351, word: "lure" },
{ id: 36352, word: "luridness" },
{ id: 36353, word: "lurk" },
{ id: 36354, word: "lushly" },
{ id: 36355, word: "lushness" },
{ id: 36356, word: "luster" },
{ id: 36361, word: "lustfully" },
{ id: 36362, word: "lustily" },
{ id: 36363, word: "lustiness" },
{ id: 36364, word: "lustrous" },
{ id: 36365, word: "lusty" },
{ id: 36366, word: "luxurious" },
{ id: 36411, word: "luxury" },
{ id: 36412, word: "lying" },
{ id: 36413, word: "lyrically" },
{ id: 36414, word: "lyricism" },
{ id: 36415, word: "lyricist" },
{ id: 36416, word: "lyrics" },
{ id: 36421, word: "macarena" },
{ id: 36422, word: "macaroni" },
{ id: 36423, word: "macaw" },
{ id: 36424, word: "mace" },
{ id: 36425, word: "machine" },
{ id: 36426, word: "machinist" },
{ id: 36431, word: "magazine" },
{ id: 36432, word: "magenta" },
{ id: 36433, word: "maggot" },
{ id: 36434, word: "magical" },
{ id: 36435, word: "magician" },
{ id: 36436, word: "magma" },
{ id: 36441, word: "magnesium" },
{ id: 36442, word: "magnetic" },
{ id: 36443, word: "magnetism" },
{ id: 36444, word: "magnetize" },
{ id: 36445, word: "magnifier" },
{ id: 36446, word: "magnify" },
{ id: 36451, word: "magnitude" },
{ id: 36452, word: "magnolia" },
{ id: 36453, word: "mahogany" },
{ id: 36454, word: "maimed" },
{ id: 36455, word: "majestic" },
{ id: 36456, word: "majesty" },
{ id: 36461, word: "majorette" },
{ id: 36462, word: "majority" },
{ id: 36463, word: "makeover" },
{ id: 36464, word: "maker" },
{ id: 36465, word: "makeshift" },
{ id: 36466, word: "making" },
{ id: 36511, word: "malformed" },
{ id: 36512, word: "malt" },
{ id: 36513, word: "mama" },
{ id: 36514, word: "mammal" },
{ id: 36515, word: "mammary" },
{ id: 36516, word: "mammogram" },
{ id: 36521, word: "manager" },
{ id: 36522, word: "managing" },
{ id: 36523, word: "manatee" },
{ id: 36524, word: "mandarin" },
{ id: 36525, word: "mandate" },
{ id: 36526, word: "mandatory" },
{ id: 36531, word: "mandolin" },
{ id: 36532, word: "manger" },
{ id: 36533, word: "mangle" },
{ id: 36534, word: "mango" },
{ id: 36535, word: "mangy" },
{ id: 36536, word: "manhandle" },
{ id: 36541, word: "manhole" },
{ id: 36542, word: "manhood" },
{ id: 36543, word: "manhunt" },
{ id: 36544, word: "manicotti" },
{ id: 36545, word: "manicure" },
{ id: 36546, word: "manifesto" },
{ id: 36551, word: "manila" },
{ id: 36552, word: "mankind" },
{ id: 36553, word: "manlike" },
{ id: 36554, word: "manliness" },
{ id: 36555, word: "manly" },
{ id: 36556, word: "manmade" },
{ id: 36561, word: "manned" },
{ id: 36562, word: "mannish" },
{ id: 36563, word: "manor" },
{ id: 36564, word: "manpower" },
{ id: 36565, word: "mantis" },
{ id: 36566, word: "mantra" },
{ id: 36611, word: "manual" },
{ id: 36612, word: "many" },
{ id: 36613, word: "map" },
{ id: 36614, word: "marathon" },
{ id: 36615, word: "marauding" },
{ id: 36616, word: "marbled" },
{ id: 36621, word: "marbles" },
{ id: 36622, word: "marbling" },
{ id: 36623, word: "march" },
{ id: 36624, word: "mardi" },
{ id: 36625, word: "margarine" },
{ id: 36626, word: "margarita" },
{ id: 36631, word: "margin" },
{ id: 36632, word: "marigold" },
{ id: 36633, word: "marina" },
{ id: 36634, word: "marine" },
{ id: 36635, word: "marital" },
{ id: 36636, word: "maritime" },
{ id: 36641, word: "marlin" },
{ id: 36642, word: "marmalade" },
{ id: 36643, word: "maroon" },
{ id: 36644, word: "married" },
{ id: 36645, word: "marrow" },
{ id: 36646, word: "marry" },
{ id: 36651, word: "marshland" },
{ id: 36652, word: "marshy" },
{ id: 36653, word: "marsupial" },
{ id: 36654, word: "marvelous" },
{ id: 36655, word: "marxism" },
{ id: 36656, word: "mascot" },
{ id: 36661, word: "masculine" },
{ id: 36662, word: "mashed" },
{ id: 36663, word: "mashing" },
{ id: 36664, word: "massager" },
{ id: 36665, word: "masses" },
{ id: 36666, word: "massive" },
{ id: 41111, word: "mastiff" },
{ id: 41112, word: "matador" },
{ id: 41113, word: "matchbook" },
{ id: 41114, word: "matchbox" },
{ id: 41115, word: "matcher" },
{ id: 41116, word: "matching" },
{ id: 41121, word: "matchless" },
{ id: 41122, word: "material" },
{ id: 41123, word: "maternal" },
{ id: 41124, word: "maternity" },
{ id: 41125, word: "math" },
{ id: 41126, word: "mating" },
{ id: 41131, word: "matriarch" },
{ id: 41132, word: "matrimony" },
{ id: 41133, word: "matrix" },
{ id: 41134, word: "matron" },
{ id: 41135, word: "matted" },
{ id: 41136, word: "matter" },
{ id: 41141, word: "maturely" },
{ id: 41142, word: "maturing" },
{ id: 41143, word: "maturity" },
{ id: 41144, word: "mauve" },
{ id: 41145, word: "maverick" },
{ id: 41146, word: "maximize" },
{ id: 41151, word: "maximum" },
{ id: 41152, word: "maybe" },
{ id: 41153, word: "mayday" },
{ id: 41154, word: "mayflower" },
{ id: 41155, word: "moaner" },
{ id: 41156, word: "moaning" },
{ id: 41161, word: "mobile" },
{ id: 41162, word: "mobility" },
{ id: 41163, word: "mobilize" },
{ id: 41164, word: "mobster" },
{ id: 41165, word: "mocha" },
{ id: 41166, word: "mocker" },
{ id: 41211, word: "mockup" },
{ id: 41212, word: "modified" },
{ id: 41213, word: "modify" },
{ id: 41214, word: "modular" },
{ id: 41215, word: "modulator" },
{ id: 41216, word: "module" },
{ id: 41221, word: "moisten" },
{ id: 41222, word: "moistness" },
{ id: 41223, word: "moisture" },
{ id: 41224, word: "molar" },
{ id: 41225, word: "molasses" },
{ id: 41226, word: "mold" },
{ id: 41231, word: "molecular" },
{ id: 41232, word: "molecule" },
{ id: 41233, word: "molehill" },
{ id: 41234, word: "mollusk" },
{ id: 41235, word: "mom" },
{ id: 41236, word: "monastery" },
{ id: 41241, word: "monday" },
{ id: 41242, word: "monetary" },
{ id: 41243, word: "monetize" },
{ id: 41244, word: "moneybags" },
{ id: 41245, word: "moneyless" },
{ id: 41246, word: "moneywise" },
{ id: 41251, word: "mongoose" },
{ id: 41252, word: "mongrel" },
{ id: 41253, word: "monitor" },
{ id: 41254, word: "monkhood" },
{ id: 41255, word: "monogamy" },
{ id: 41256, word: "monogram" },
{ id: 41261, word: "monologue" },
{ id: 41262, word: "monopoly" },
{ id: 41263, word: "monorail" },
{ id: 41264, word: "monotone" },
{ id: 41265, word: "monotype" },
{ id: 41266, word: "monoxide" },
{ id: 41311, word: "monsieur" },
{ id: 41312, word: "monsoon" },
{ id: 41313, word: "monstrous" },
{ id: 41314, word: "monthly" },
{ id: 41315, word: "monument" },
{ id: 41316, word: "moocher" },
{ id: 41321, word: "moodiness" },
{ id: 41322, word: "moody" },
{ id: 41323, word: "mooing" },
{ id: 41324, word: "moonbeam" },
{ id: 41325, word: "mooned" },
{ id: 41326, word: "moonlight" },
{ id: 41331, word: "moonlike" },
{ id: 41332, word: "moonlit" },
{ id: 41333, word: "moonrise" },
{ id: 41334, word: "moonscape" },
{ id: 41335, word: "moonshine" },
{ id: 41336, word: "moonstone" },
{ id: 41341, word: "moonwalk" },
{ id: 41342, word: "mop" },
{ id: 41343, word: "morale" },
{ id: 41344, word: "morality" },
{ id: 41345, word: "morally" },
{ id: 41346, word: "morbidity" },
{ id: 41351, word: "morbidly" },
{ id: 41352, word: "morphine" },
{ id: 41353, word: "morphing" },
{ id: 41354, word: "morse" },
{ id: 41355, word: "mortality" },
{ id: 41356, word: "mortally" },
{ id: 41361, word: "mortician" },
{ id: 41362, word: "mortified" },
{ id: 41363, word: "mortify" },
{ id: 41364, word: "mortuary" },
{ id: 41365, word: "mosaic" },
{ id: 41366, word: "mossy" },
{ id: 41411, word: "most" },
{ id: 41412, word: "mothball" },
{ id: 41413, word: "mothproof" },
{ id: 41414, word: "motion" },
{ id: 41415, word: "motivate" },
{ id: 41416, word: "motivator" },
{ id: 41421, word: "motive" },
{ id: 41422, word: "motocross" },
{ id: 41423, word: "motor" },
{ id: 41424, word: "motto" },
{ id: 41425, word: "mountable" },
{ id: 41426, word: "mountain" },
{ id: 41431, word: "mounted" },
{ id: 41432, word: "mounting" },
{ id: 41433, word: "mourner" },
{ id: 41434, word: "mournful" },
{ id: 41435, word: "mouse" },
{ id: 41436, word: "mousiness" },
{ id: 41441, word: "moustache" },
{ id: 41442, word: "mousy" },
{ id: 41443, word: "mouth" },
{ id: 41444, word: "movable" },
{ id: 41445, word: "move" },
{ id: 41446, word: "movie" },
{ id: 41451, word: "moving" },
{ id: 41452, word: "mower" },
{ id: 41453, word: "mowing" },
{ id: 41454, word: "much" },
{ id: 41455, word: "muck" },
{ id: 41456, word: "mud" },
{ id: 41461, word: "mug" },
{ id: 41462, word: "mulberry" },
{ id: 41463, word: "mulch" },
{ id: 41464, word: "mule" },
{ id: 41465, word: "mulled" },
{ id: 41466, word: "mullets" },
{ id: 41511, word: "multiple" },
{ id: 41512, word: "multiply" },
{ id: 41513, word: "multitask" },
{ id: 41514, word: "multitude" },
{ id: 41515, word: "mumble" },
{ id: 41516, word: "mumbling" },
{ id: 41521, word: "mumbo" },
{ id: 41522, word: "mummified" },
{ id: 41523, word: "mummify" },
{ id: 41524, word: "mummy" },
{ id: 41525, word: "mumps" },
{ id: 41526, word: "munchkin" },
{ id: 41531, word: "mundane" },
{ id: 41532, word: "municipal" },
{ id: 41533, word: "muppet" },
{ id: 41534, word: "mural" },
{ id: 41535, word: "murkiness" },
{ id: 41536, word: "murky" },
{ id: 41541, word: "murmuring" },
{ id: 41542, word: "muscular" },
{ id: 41543, word: "museum" },
{ id: 41544, word: "mushily" },
{ id: 41545, word: "mushiness" },
{ id: 41546, word: "mushroom" },
{ id: 41551, word: "mushy" },
{ id: 41552, word: "music" },
{ id: 41553, word: "musket" },
{ id: 41554, word: "muskiness" },
{ id: 41555, word: "musky" },
{ id: 41556, word: "mustang" },
{ id: 41561, word: "mustard" },
{ id: 41562, word: "muster" },
{ id: 41563, word: "mustiness" },
{ id: 41564, word: "musty" },
{ id: 41565, word: "mutable" },
{ id: 41566, word: "mutate" },
{ id: 41611, word: "mutation" },
{ id: 41612, word: "mute" },
{ id: 41613, word: "mutilated" },
{ id: 41614, word: "mutilator" },
{ id: 41615, word: "mutiny" },
{ id: 41616, word: "mutt" },
{ id: 41621, word: "mutual" },
{ id: 41622, word: "muzzle" },
{ id: 41623, word: "myself" },
{ id: 41624, word: "myspace" },
{ id: 41625, word: "mystified" },
{ id: 41626, word: "mystify" },
{ id: 41631, word: "myth" },
{ id: 41632, word: "nacho" },
{ id: 41633, word: "nag" },
{ id: 41634, word: "nail" },
{ id: 41635, word: "name" },
{ id: 41636, word: "naming" },
{ id: 41641, word: "nanny" },
{ id: 41642, word: "nanometer" },
{ id: 41643, word: "nape" },
{ id: 41644, word: "napkin" },
{ id: 41645, word: "napped" },
{ id: 41646, word: "napping" },
{ id: 41651, word: "nappy" },
{ id: 41652, word: "narrow" },
{ id: 41653, word: "nastily" },
{ id: 41654, word: "nastiness" },
{ id: 41655, word: "national" },
{ id: 41656, word: "native" },
{ id: 41661, word: "nativity" },
{ id: 41662, word: "natural" },
{ id: 41663, word: "nature" },
{ id: 41664, word: "naturist" },
{ id: 41665, word: "nautical" },
{ id: 41666, word: "navigate" },
{ id: 42111, word: "navigator" },
{ id: 42112, word: "navy" },
{ id: 42113, word: "nearby" },
{ id: 42114, word: "nearest" },
{ id: 42115, word: "nearly" },
{ id: 42116, word: "nearness" },
{ id: 42121, word: "neatly" },
{ id: 42122, word: "neatness" },
{ id: 42123, word: "nebula" },
{ id: 42124, word: "nebulizer" },
{ id: 42125, word: "nectar" },
{ id: 42126, word: "negate" },
{ id: 42131, word: "negation" },
{ id: 42132, word: "negative" },
{ id: 42133, word: "neglector" },
{ id: 42134, word: "negligee" },
{ id: 42135, word: "negligent" },
{ id: 42136, word: "negotiate" },
{ id: 42141, word: "nemeses" },
{ id: 42142, word: "nemesis" },
{ id: 42143, word: "neon" },
{ id: 42144, word: "nephew" },
{ id: 42145, word: "nerd" },
{ id: 42146, word: "nervous" },
{ id: 42151, word: "nervy" },
{ id: 42152, word: "nest" },
{ id: 42153, word: "net" },
{ id: 42154, word: "neurology" },
{ id: 42155, word: "neuron" },
{ id: 42156, word: "neurosis" },
{ id: 42161, word: "neurotic" },
{ id: 42162, word: "neuter" },
{ id: 42163, word: "neutron" },
{ id: 42164, word: "never" },
{ id: 42165, word: "next" },
{ id: 42166, word: "nibble" },
{ id: 42211, word: "nickname" },
{ id: 42212, word: "nicotine" },
{ id: 42213, word: "niece" },
{ id: 42214, word: "nifty" },
{ id: 42215, word: "nimble" },
{ id: 42216, word: "nimbly" },
{ id: 42221, word: "nineteen" },
{ id: 42222, word: "ninetieth" },
{ id: 42223, word: "ninja" },
{ id: 42224, word: "nintendo" },
{ id: 42225, word: "ninth" },
{ id: 42226, word: "nuclear" },
{ id: 42231, word: "nuclei" },
{ id: 42232, word: "nucleus" },
{ id: 42233, word: "nugget" },
{ id: 42234, word: "nullify" },
{ id: 42235, word: "number" },
{ id: 42236, word: "numbing" },
{ id: 42241, word: "numbly" },
{ id: 42242, word: "numbness" },
{ id: 42243, word: "numeral" },
{ id: 42244, word: "numerate" },
{ id: 42245, word: "numerator" },
{ id: 42246, word: "numeric" },
{ id: 42251, word: "numerous" },
{ id: 42252, word: "nuptials" },
{ id: 42253, word: "nursery" },
{ id: 42254, word: "nursing" },
{ id: 42255, word: "nurture" },
{ id: 42256, word: "nutcase" },
{ id: 42261, word: "nutlike" },
{ id: 42262, word: "nutmeg" },
{ id: 42263, word: "nutrient" },
{ id: 42264, word: "nutshell" },
{ id: 42265, word: "nuttiness" },
{ id: 42266, word: "nutty" },
{ id: 42311, word: "nuzzle" },
{ id: 42312, word: "nylon" },
{ id: 42313, word: "oaf" },
{ id: 42314, word: "oak" },
{ id: 42315, word: "oasis" },
{ id: 42316, word: "oat" },
{ id: 42321, word: "obedience" },
{ id: 42322, word: "obedient" },
{ id: 42323, word: "obituary" },
{ id: 42324, word: "object" },
{ id: 42325, word: "obligate" },
{ id: 42326, word: "obliged" },
{ id: 42331, word: "oblivion" },
{ id: 42332, word: "oblivious" },
{ id: 42333, word: "oblong" },
{ id: 42334, word: "obnoxious" },
{ id: 42335, word: "oboe" },
{ id: 42336, word: "obscure" },
{ id: 42341, word: "obscurity" },
{ id: 42342, word: "observant" },
{ id: 42343, word: "observer" },
{ id: 42344, word: "observing" },
{ id: 42345, word: "obsessed" },
{ id: 42346, word: "obsession" },
{ id: 42351, word: "obsessive" },
{ id: 42352, word: "obsolete" },
{ id: 42353, word: "obstacle" },
{ id: 42354, word: "obstinate" },
{ id: 42355, word: "obstruct" },
{ id: 42356, word: "obtain" },
{ id: 42361, word: "obtrusive" },
{ id: 42362, word: "obtuse" },
{ id: 42363, word: "obvious" },
{ id: 42364, word: "occultist" },
{ id: 42365, word: "occupancy" },
{ id: 42366, word: "occupant" },
{ id: 42411, word: "occupier" },
{ id: 42412, word: "occupy" },
{ id: 42413, word: "ocean" },
{ id: 42414, word: "ocelot" },
{ id: 42415, word: "octagon" },
{ id: 42416, word: "octane" },
{ id: 42421, word: "october" },
{ id: 42422, word: "octopus" },
{ id: 42423, word: "ogle" },
{ id: 42424, word: "oil" },
{ id: 42425, word: "oink" },
{ id: 42426, word: "ointment" },
{ id: 42431, word: "okay" },
{ id: 42432, word: "old" },
{ id: 42433, word: "olive" },
{ id: 42434, word: "olympics" },
{ id: 42435, word: "omega" },
{ id: 42436, word: "omen" },
{ id: 42441, word: "ominous" },
{ id: 42442, word: "omission" },
{ id: 42443, word: "omit" },
{ id: 42444, word: "omnivore" },
{ id: 42445, word: "onboard" },
{ id: 42446, word: "oncoming" },
{ id: 42451, word: "ongoing" },
{ id: 42452, word: "onion" },
{ id: 42453, word: "online" },
{ id: 42454, word: "onlooker" },
{ id: 42455, word: "only" },
{ id: 42456, word: "onscreen" },
{ id: 42461, word: "onset" },
{ id: 42462, word: "onshore" },
{ id: 42463, word: "onslaught" },
{ id: 42464, word: "onstage" },
{ id: 42465, word: "onto" },
{ id: 42466, word: "onward" },
{ id: 42511, word: "onyx" },
{ id: 42512, word: "oops" },
{ id: 42513, word: "ooze" },
{ id: 42514, word: "oozy" },
{ id: 42515, word: "opacity" },
{ id: 42516, word: "opal" },
{ id: 42521, word: "open" },
{ id: 42522, word: "operable" },
{ id: 42523, word: "operate" },
{ id: 42524, word: "operating" },
{ id: 42525, word: "operation" },
{ id: 42526, word: "operative" },
{ id: 42531, word: "operator" },
{ id: 42532, word: "opium" },
{ id: 42533, word: "opossum" },
{ id: 42534, word: "opponent" },
{ id: 42535, word: "oppose" },
{ id: 42536, word: "opposing" },
{ id: 42541, word: "opposite" },
{ id: 42542, word: "oppressed" },
{ id: 42543, word: "oppressor" },
{ id: 42544, word: "opt" },
{ id: 42545, word: "opulently" },
{ id: 42546, word: "osmosis" },
{ id: 42551, word: "other" },
{ id: 42552, word: "otter" },
{ id: 42553, word: "ouch" },
{ id: 42554, word: "ought" },
{ id: 42555, word: "ounce" },
{ id: 42556, word: "outage" },
{ id: 42561, word: "outback" },
{ id: 42562, word: "outbid" },
{ id: 42563, word: "outboard" },
{ id: 42564, word: "outbound" },
{ id: 42565, word: "outbreak" },
{ id: 42566, word: "outburst" },
{ id: 42611, word: "outcast" },
{ id: 42612, word: "outclass" },
{ id: 42613, word: "outcome" },
{ id: 42614, word: "outdated" },
{ id: 42615, word: "outdoors" },
{ id: 42616, word: "outer" },
{ id: 42621, word: "outfield" },
{ id: 42622, word: "outfit" },
{ id: 42623, word: "outflank" },
{ id: 42624, word: "outgoing" },
{ id: 42625, word: "outgrow" },
{ id: 42626, word: "outhouse" },
{ id: 42631, word: "outing" },
{ id: 42632, word: "outlast" },
{ id: 42633, word: "outlet" },
{ id: 42634, word: "outline" },
{ id: 42635, word: "outlook" },
{ id: 42636, word: "outlying" },
{ id: 42641, word: "outmatch" },
{ id: 42642, word: "outmost" },
{ id: 42643, word: "outnumber" },
{ id: 42644, word: "outplayed" },
{ id: 42645, word: "outpost" },
{ id: 42646, word: "outpour" },
{ id: 42651, word: "output" },
{ id: 42652, word: "outrage" },
{ id: 42653, word: "outrank" },
{ id: 42654, word: "outreach" },
{ id: 42655, word: "outright" },
{ id: 42656, word: "outscore" },
{ id: 42661, word: "outsell" },
{ id: 42662, word: "outshine" },
{ id: 42663, word: "outshoot" },
{ id: 42664, word: "outsider" },
{ id: 42665, word: "outskirts" },
{ id: 42666, word: "outsmart" },
{ id: 43111, word: "outsource" },
{ id: 43112, word: "outspoken" },
{ id: 43113, word: "outtakes" },
{ id: 43114, word: "outthink" },
{ id: 43115, word: "outward" },
{ id: 43116, word: "outweigh" },
{ id: 43121, word: "outwit" },
{ id: 43122, word: "oval" },
{ id: 43123, word: "ovary" },
{ id: 43124, word: "oven" },
{ id: 43125, word: "overact" },
{ id: 43126, word: "overall" },
{ id: 43131, word: "overarch" },
{ id: 43132, word: "overbid" },
{ id: 43133, word: "overbill" },
{ id: 43134, word: "overbite" },
{ id: 43135, word: "overblown" },
{ id: 43136, word: "overboard" },
{ id: 43141, word: "overbook" },
{ id: 43142, word: "overbuilt" },
{ id: 43143, word: "overcast" },
{ id: 43144, word: "overcoat" },
{ id: 43145, word: "overcome" },
{ id: 43146, word: "overcook" },
{ id: 43151, word: "overcrowd" },
{ id: 43152, word: "overdraft" },
{ id: 43153, word: "overdrawn" },
{ id: 43154, word: "overdress" },
{ id: 43155, word: "overdrive" },
{ id: 43156, word: "overdue" },
{ id: 43161, word: "overeager" },
{ id: 43162, word: "overeater" },
{ id: 43163, word: "overexert" },
{ id: 43164, word: "overfed" },
{ id: 43165, word: "overfeed" },
{ id: 43166, word: "overfill" },
{ id: 43211, word: "overflow" },
{ id: 43212, word: "overfull" },
{ id: 43213, word: "overgrown" },
{ id: 43214, word: "overhand" },
{ id: 43215, word: "overhang" },
{ id: 43216, word: "overhaul" },
{ id: 43221, word: "overhead" },
{ id: 43222, word: "overhear" },
{ id: 43223, word: "overheat" },
{ id: 43224, word: "overhung" },
{ id: 43225, word: "overjoyed" },
{ id: 43226, word: "overkill" },
{ id: 43231, word: "overlabor" },
{ id: 43232, word: "overlaid" },
{ id: 43233, word: "overlap" },
{ id: 43234, word: "overlay" },
{ id: 43235, word: "overload" },
{ id: 43236, word: "overlook" },
{ id: 43241, word: "overlord" },
{ id: 43242, word: "overlying" },
{ id: 43243, word: "overnight" },
{ id: 43244, word: "overpass" },
{ id: 43245, word: "overpay" },
{ id: 43246, word: "overplant" },
{ id: 43251, word: "overplay" },
{ id: 43252, word: "overpower" },
{ id: 43253, word: "overprice" },
{ id: 43254, word: "overrate" },
{ id: 43255, word: "overreach" },
{ id: 43256, word: "overreact" },
{ id: 43261, word: "override" },
{ id: 43262, word: "overripe" },
{ id: 43263, word: "overrule" },
{ id: 43264, word: "overrun" },
{ id: 43265, word: "overshoot" },
{ id: 43266, word: "overshot" },
{ id: 43311, word: "oversight" },
{ id: 43312, word: "oversized" },
{ id: 43313, word: "oversleep" },
{ id: 43314, word: "oversold" },
{ id: 43315, word: "overspend" },
{ id: 43316, word: "overstate" },
{ id: 43321, word: "overstay" },
{ id: 43322, word: "overstep" },
{ id: 43323, word: "overstock" },
{ id: 43324, word: "overstuff" },
{ id: 43325, word: "oversweet" },
{ id: 43326, word: "overtake" },
{ id: 43331, word: "overthrow" },
{ id: 43332, word: "overtime" },
{ id: 43333, word: "overtly" },
{ id: 43334, word: "overtone" },
{ id: 43335, word: "overture" },
{ id: 43336, word: "overturn" },
{ id: 43341, word: "overuse" },
{ id: 43342, word: "overvalue" },
{ id: 43343, word: "overview" },
{ id: 43344, word: "overwrite" },
{ id: 43345, word: "owl" },
{ id: 43346, word: "oxford" },
{ id: 43351, word: "oxidant" },
{ id: 43352, word: "oxidation" },
{ id: 43353, word: "oxidize" },
{ id: 43354, word: "oxidizing" },
{ id: 43355, word: "oxygen" },
{ id: 43356, word: "oxymoron" },
{ id: 43361, word: "oyster" },
{ id: 43362, word: "ozone" },
{ id: 43363, word: "paced" },
{ id: 43364, word: "pacemaker" },
{ id: 43365, word: "pacific" },
{ id: 43366, word: "pacifier" },
{ id: 43411, word: "pacifism" },
{ id: 43412, word: "pacifist" },
{ id: 43413, word: "pacify" },
{ id: 43414, word: "padded" },
{ id: 43415, word: "padding" },
{ id: 43416, word: "paddle" },
{ id: 43421, word: "paddling" },
{ id: 43422, word: "padlock" },
{ id: 43423, word: "pagan" },
{ id: 43424, word: "pager" },
{ id: 43425, word: "paging" },
{ id: 43426, word: "pajamas" },
{ id: 43431, word: "palace" },
{ id: 43432, word: "palatable" },
{ id: 43433, word: "palm" },
{ id: 43434, word: "palpable" },
{ id: 43435, word: "palpitate" },
{ id: 43436, word: "paltry" },
{ id: 43441, word: "pampered" },
{ id: 43442, word: "pamperer" },
{ id: 43443, word: "pampers" },
{ id: 43444, word: "pamphlet" },
{ id: 43445, word: "panama" },
{ id: 43446, word: "pancake" },
{ id: 43451, word: "pancreas" },
{ id: 43452, word: "panda" },
{ id: 43453, word: "pandemic" },
{ id: 43454, word: "pang" },
{ id: 43455, word: "panhandle" },
{ id: 43456, word: "panic" },
{ id: 43461, word: "panning" },
{ id: 43462, word: "panorama" },
{ id: 43463, word: "panoramic" },
{ id: 43464, word: "panther" },
{ id: 43465, word: "pantomime" },
{ id: 43466, word: "pantry" },
{ id: 43511, word: "pants" },
{ id: 43512, word: "pantyhose" },
{ id: 43513, word: "paparazzi" },
{ id: 43514, word: "papaya" },
{ id: 43515, word: "paper" },
{ id: 43516, word: "paprika" },
{ id: 43521, word: "papyrus" },
{ id: 43522, word: "parabola" },
{ id: 43523, word: "parachute" },
{ id: 43524, word: "parade" },
{ id: 43525, word: "paradox" },
{ id: 43526, word: "paragraph" },
{ id: 43531, word: "parakeet" },
{ id: 43532, word: "paralegal" },
{ id: 43533, word: "paralyses" },
{ id: 43534, word: "paralysis" },
{ id: 43535, word: "paralyze" },
{ id: 43536, word: "paramedic" },
{ id: 43541, word: "parameter" },
{ id: 43542, word: "paramount" },
{ id: 43543, word: "parasail" },
{ id: 43544, word: "parasite" },
{ id: 43545, word: "parasitic" },
{ id: 43546, word: "parcel" },
{ id: 43551, word: "parched" },
{ id: 43552, word: "parchment" },
{ id: 43553, word: "pardon" },
{ id: 43554, word: "parish" },
{ id: 43555, word: "parka" },
{ id: 43556, word: "parking" },
{ id: 43561, word: "parkway" },
{ id: 43562, word: "parlor" },
{ id: 43563, word: "parmesan" },
{ id: 43564, word: "parole" },
{ id: 43565, word: "parrot" },
{ id: 43566, word: "parsley" },
{ id: 43611, word: "parsnip" },
{ id: 43612, word: "partake" },
{ id: 43613, word: "parted" },
{ id: 43614, word: "parting" },
{ id: 43615, word: "partition" },
{ id: 43616, word: "partly" },
{ id: 43621, word: "partner" },
{ id: 43622, word: "partridge" },
{ id: 43623, word: "party" },
{ id: 43624, word: "passable" },
{ id: 43625, word: "passably" },
{ id: 43626, word: "passage" },
{ id: 43631, word: "passcode" },
{ id: 43632, word: "passenger" },
{ id: 43633, word: "passerby" },
{ id: 43634, word: "passing" },
{ id: 43635, word: "passion" },
{ id: 43636, word: "passive" },
{ id: 43641, word: "passivism" },
{ id: 43642, word: "passover" },
{ id: 43643, word: "passport" },
{ id: 43644, word: "password" },
{ id: 43645, word: "pasta" },
{ id: 43646, word: "pasted" },
{ id: 43651, word: "pastel" },
{ id: 43652, word: "pastime" },
{ id: 43653, word: "pastor" },
{ id: 43654, word: "pastrami" },
{ id: 43655, word: "pasture" },
{ id: 43656, word: "pasty" },
{ id: 43661, word: "patchwork" },
{ id: 43662, word: "patchy" },
{ id: 43663, word: "paternal" },
{ id: 43664, word: "paternity" },
{ id: 43665, word: "path" },
{ id: 43666, word: "patience" },
{ id: 44111, word: "patient" },
{ id: 44112, word: "patio" },
{ id: 44113, word: "patriarch" },
{ id: 44114, word: "patriot" },
{ id: 44115, word: "patrol" },
{ id: 44116, word: "patronage" },
{ id: 44121, word: "patronize" },
{ id: 44122, word: "pauper" },
{ id: 44123, word: "pavement" },
{ id: 44124, word: "paver" },
{ id: 44125, word: "pavestone" },
{ id: 44126, word: "pavilion" },
{ id: 44131, word: "paving" },
{ id: 44132, word: "pawing" },
{ id: 44133, word: "payable" },
{ id: 44134, word: "payback" },
{ id: 44135, word: "paycheck" },
{ id: 44136, word: "payday" },
{ id: 44141, word: "payee" },
{ id: 44142, word: "payer" },
{ id: 44143, word: "paying" },
{ id: 44144, word: "payment" },
{ id: 44145, word: "payphone" },
{ id: 44146, word: "payroll" },
{ id: 44151, word: "pebble" },
{ id: 44152, word: "pebbly" },
{ id: 44153, word: "pecan" },
{ id: 44154, word: "pectin" },
{ id: 44155, word: "peculiar" },
{ id: 44156, word: "peddling" },
{ id: 44161, word: "pediatric" },
{ id: 44162, word: "pedicure" },
{ id: 44163, word: "pedigree" },
{ id: 44164, word: "pedometer" },
{ id: 44165, word: "pegboard" },
{ id: 44166, word: "pelican" },
{ id: 44211, word: "pellet" },
{ id: 44212, word: "pelt" },
{ id: 44213, word: "pelvis" },
{ id: 44214, word: "penalize" },
{ id: 44215, word: "penalty" },
{ id: 44216, word: "pencil" },
{ id: 44221, word: "pendant" },
{ id: 44222, word: "pending" },
{ id: 44223, word: "penholder" },
{ id: 44224, word: "penknife" },
{ id: 44225, word: "pennant" },
{ id: 44226, word: "penniless" },
{ id: 44231, word: "penny" },
{ id: 44232, word: "penpal" },
{ id: 44233, word: "pension" },
{ id: 44234, word: "pentagon" },
{ id: 44235, word: "pentagram" },
{ id: 44236, word: "pep" },
{ id: 44241, word: "perceive" },
{ id: 44242, word: "percent" },
{ id: 44243, word: "perch" },
{ id: 44244, word: "percolate" },
{ id: 44245, word: "perennial" },
{ id: 44246, word: "perfected" },
{ id: 44251, word: "perfectly" },
{ id: 44252, word: "perfume" },
{ id: 44253, word: "periscope" },
{ id: 44254, word: "perish" },
{ id: 44255, word: "perjurer" },
{ id: 44256, word: "perjury" },
{ id: 44261, word: "perkiness" },
{ id: 44262, word: "perky" },
{ id: 44263, word: "perm" },
{ id: 44264, word: "peroxide" },
{ id: 44265, word: "perpetual" },
{ id: 44266, word: "perplexed" },
{ id: 44311, word: "persecute" },
{ id: 44312, word: "persevere" },
{ id: 44313, word: "persuaded" },
{ id: 44314, word: "persuader" },
{ id: 44315, word: "pesky" },
{ id: 44316, word: "peso" },
{ id: 44321, word: "pessimism" },
{ id: 44322, word: "pessimist" },
{ id: 44323, word: "pester" },
{ id: 44324, word: "pesticide" },
{ id: 44325, word: "petal" },
{ id: 44326, word: "petite" },
{ id: 44331, word: "petition" },
{ id: 44332, word: "petri" },
{ id: 44333, word: "petroleum" },
{ id: 44334, word: "petted" },
{ id: 44335, word: "petticoat" },
{ id: 44336, word: "pettiness" },
{ id: 44341, word: "petty" },
{ id: 44342, word: "petunia" },
{ id: 44343, word: "phantom" },
{ id: 44344, word: "phobia" },
{ id: 44345, word: "phoenix" },
{ id: 44346, word: "phonebook" },
{ id: 44351, word: "phoney" },
{ id: 44352, word: "phonics" },
{ id: 44353, word: "phoniness" },
{ id: 44354, word: "phony" },
{ id: 44355, word: "phosphate" },
{ id: 44356, word: "photo" },
{ id: 44361, word: "phrase" },
{ id: 44362, word: "phrasing" },
{ id: 44363, word: "placard" },
{ id: 44364, word: "placate" },
{ id: 44365, word: "placidly" },
{ id: 44366, word: "plank" },
{ id: 44411, word: "planner" },
{ id: 44412, word: "plant" },
{ id: 44413, word: "plasma" },
{ id: 44414, word: "plaster" },
{ id: 44415, word: "plastic" },
{ id: 44416, word: "plated" },
{ id: 44421, word: "platform" },
{ id: 44422, word: "plating" },
{ id: 44423, word: "platinum" },
{ id: 44424, word: "platonic" },
{ id: 44425, word: "platter" },
{ id: 44426, word: "platypus" },
{ id: 44431, word: "plausible" },
{ id: 44432, word: "plausibly" },
{ id: 44433, word: "playable" },
{ id: 44434, word: "playback" },
{ id: 44435, word: "player" },
{ id: 44436, word: "playful" },
{ id: 44441, word: "playgroup" },
{ id: 44442, word: "playhouse" },
{ id: 44443, word: "playing" },
{ id: 44444, word: "playlist" },
{ id: 44445, word: "playmaker" },
{ id: 44446, word: "playmate" },
{ id: 44451, word: "playoff" },
{ id: 44452, word: "playpen" },
{ id: 44453, word: "playroom" },
{ id: 44454, word: "playset" },
{ id: 44455, word: "plaything" },
{ id: 44456, word: "playtime" },
{ id: 44461, word: "plaza" },
{ id: 44462, word: "pleading" },
{ id: 44463, word: "pleat" },
{ id: 44464, word: "pledge" },
{ id: 44465, word: "plentiful" },
{ id: 44466, word: "plenty" },
{ id: 44511, word: "plethora" },
{ id: 44512, word: "plexiglas" },
{ id: 44513, word: "pliable" },
{ id: 44514, word: "plod" },
{ id: 44515, word: "plop" },
{ id: 44516, word: "plot" },
{ id: 44521, word: "plow" },
{ id: 44522, word: "ploy" },
{ id: 44523, word: "pluck" },
{ id: 44524, word: "plug" },
{ id: 44525, word: "plunder" },
{ id: 44526, word: "plunging" },
{ id: 44531, word: "plural" },
{ id: 44532, word: "plus" },
{ id: 44533, word: "plutonium" },
{ id: 44534, word: "plywood" },
{ id: 44535, word: "poach" },
{ id: 44536, word: "pod" },
{ id: 44541, word: "poem" },
{ id: 44542, word: "poet" },
{ id: 44543, word: "pogo" },
{ id: 44544, word: "pointed" },
{ id: 44545, word: "pointer" },
{ id: 44546, word: "pointing" },
{ id: 44551, word: "pointless" },
{ id: 44552, word: "pointy" },
{ id: 44553, word: "poise" },
{ id: 44554, word: "poison" },
{ id: 44555, word: "poker" },
{ id: 44556, word: "poking" },
{ id: 44561, word: "polar" },
{ id: 44562, word: "police" },
{ id: 44563, word: "policy" },
{ id: 44564, word: "polio" },
{ id: 44565, word: "polish" },
{ id: 44566, word: "politely" },
{ id: 44611, word: "polka" },
{ id: 44612, word: "polo" },
{ id: 44613, word: "polyester" },
{ id: 44614, word: "polygon" },
{ id: 44615, word: "polygraph" },
{ id: 44616, word: "polymer" },
{ id: 44621, word: "poncho" },
{ id: 44622, word: "pond" },
{ id: 44623, word: "pony" },
{ id: 44624, word: "popcorn" },
{ id: 44625, word: "pope" },
{ id: 44626, word: "poplar" },
{ id: 44631, word: "popper" },
{ id: 44632, word: "poppy" },
{ id: 44633, word: "popsicle" },
{ id: 44634, word: "populace" },
{ id: 44635, word: "popular" },
{ id: 44636, word: "populate" },
{ id: 44641, word: "porcupine" },
{ id: 44642, word: "pork" },
{ id: 44643, word: "porous" },
{ id: 44644, word: "porridge" },
{ id: 44645, word: "portable" },
{ id: 44646, word: "portal" },
{ id: 44651, word: "portfolio" },
{ id: 44652, word: "porthole" },
{ id: 44653, word: "portion" },
{ id: 44654, word: "portly" },
{ id: 44655, word: "portside" },
{ id: 44656, word: "poser" },
{ id: 44661, word: "posh" },
{ id: 44662, word: "posing" },
{ id: 44663, word: "possible" },
{ id: 44664, word: "possibly" },
{ id: 44665, word: "possum" },
{ id: 44666, word: "postage" },
{ id: 45111, word: "postal" },
{ id: 45112, word: "postbox" },
{ id: 45113, word: "postcard" },
{ id: 45114, word: "posted" },
{ id: 45115, word: "poster" },
{ id: 45116, word: "posting" },
{ id: 45121, word: "postnasal" },
{ id: 45122, word: "posture" },
{ id: 45123, word: "postwar" },
{ id: 45124, word: "pouch" },
{ id: 45125, word: "pounce" },
{ id: 45126, word: "pouncing" },
{ id: 45131, word: "pound" },
{ id: 45132, word: "pouring" },
{ id: 45133, word: "pout" },
{ id: 45134, word: "powdered" },
{ id: 45135, word: "powdering" },
{ id: 45136, word: "powdery" },
{ id: 45141, word: "power" },
{ id: 45142, word: "powwow" },
{ id: 45143, word: "pox" },
{ id: 45144, word: "praising" },
{ id: 45145, word: "prance" },
{ id: 45146, word: "prancing" },
{ id: 45151, word: "pranker" },
{ id: 45152, word: "prankish" },
{ id: 45153, word: "prankster" },
{ id: 45154, word: "prayer" },
{ id: 45155, word: "praying" },
{ id: 45156, word: "preacher" },
{ id: 45161, word: "preaching" },
{ id: 45162, word: "preachy" },
{ id: 45163, word: "preamble" },
{ id: 45164, word: "precinct" },
{ id: 45165, word: "precise" },
{ id: 45166, word: "precision" },
{ id: 45211, word: "precook" },
{ id: 45212, word: "precut" },
{ id: 45213, word: "predator" },
{ id: 45214, word: "predefine" },
{ id: 45215, word: "predict" },
{ id: 45216, word: "preface" },
{ id: 45221, word: "prefix" },
{ id: 45222, word: "preflight" },
{ id: 45223, word: "preformed" },
{ id: 45224, word: "pregame" },
{ id: 45225, word: "pregnancy" },
{ id: 45226, word: "pregnant" },
{ id: 45231, word: "preheated" },
{ id: 45232, word: "prelaunch" },
{ id: 45233, word: "prelaw" },
{ id: 45234, word: "prelude" },
{ id: 45235, word: "premiere" },
{ id: 45236, word: "premises" },
{ id: 45241, word: "premium" },
{ id: 45242, word: "prenatal" },
{ id: 45243, word: "preoccupy" },
{ id: 45244, word: "preorder" },
{ id: 45245, word: "prepaid" },
{ id: 45246, word: "prepay" },
{ id: 45251, word: "preplan" },
{ id: 45252, word: "preppy" },
{ id: 45253, word: "preschool" },
{ id: 45254, word: "prescribe" },
{ id: 45255, word: "preseason" },
{ id: 45256, word: "preset" },
{ id: 45261, word: "preshow" },
{ id: 45262, word: "president" },
{ id: 45263, word: "presoak" },
{ id: 45264, word: "press" },
{ id: 45265, word: "presume" },
{ id: 45266, word: "presuming" },
{ id: 45311, word: "preteen" },
{ id: 45312, word: "pretended" },
{ id: 45313, word: "pretender" },
{ id: 45314, word: "pretense" },
{ id: 45315, word: "pretext" },
{ id: 45316, word: "pretty" },
{ id: 45321, word: "pretzel" },
{ id: 45322, word: "prevail" },
{ id: 45323, word: "prevalent" },
{ id: 45324, word: "prevent" },
{ id: 45325, word: "preview" },
{ id: 45326, word: "previous" },
{ id: 45331, word: "prewar" },
{ id: 45332, word: "prewashed" },
{ id: 45333, word: "prideful" },
{ id: 45334, word: "pried" },
{ id: 45335, word: "primal" },
{ id: 45336, word: "primarily" },
{ id: 45341, word: "primary" },
{ id: 45342, word: "primate" },
{ id: 45343, word: "primer" },
{ id: 45344, word: "primp" },
{ id: 45345, word: "princess" },
{ id: 45346, word: "print" },
{ id: 45351, word: "prior" },
{ id: 45352, word: "prism" },
{ id: 45353, word: "prison" },
{ id: 45354, word: "prissy" },
{ id: 45355, word: "pristine" },
{ id: 45356, word: "privacy" },
{ id: 45361, word: "private" },
{ id: 45362, word: "privatize" },
{ id: 45363, word: "prize" },
{ id: 45364, word: "proactive" },
{ id: 45365, word: "probable" },
{ id: 45366, word: "probably" },
{ id: 45411, word: "probation" },
{ id: 45412, word: "probe" },
{ id: 45413, word: "probing" },
{ id: 45414, word: "probiotic" },
{ id: 45415, word: "problem" },
{ id: 45416, word: "procedure" },
{ id: 45421, word: "process" },
{ id: 45422, word: "proclaim" },
{ id: 45423, word: "procreate" },
{ id: 45424, word: "procurer" },
{ id: 45425, word: "prodigal" },
{ id: 45426, word: "prodigy" },
{ id: 45431, word: "produce" },
{ id: 45432, word: "product" },
{ id: 45433, word: "profane" },
{ id: 45434, word: "profanity" },
{ id: 45435, word: "professed" },
{ id: 45436, word: "professor" },
{ id: 45441, word: "profile" },
{ id: 45442, word: "profound" },
{ id: 45443, word: "profusely" },
{ id: 45444, word: "progeny" },
{ id: 45445, word: "prognosis" },
{ id: 45446, word: "program" },
{ id: 45451, word: "progress" },
{ id: 45452, word: "projector" },
{ id: 45453, word: "prologue" },
{ id: 45454, word: "prolonged" },
{ id: 45455, word: "promenade" },
{ id: 45456, word: "prominent" },
{ id: 45461, word: "promoter" },
{ id: 45462, word: "promotion" },
{ id: 45463, word: "prompter" },
{ id: 45464, word: "promptly" },
{ id: 45465, word: "prone" },
{ id: 45466, word: "prong" },
{ id: 45511, word: "pronounce" },
{ id: 45512, word: "pronto" },
{ id: 45513, word: "proofing" },
{ id: 45514, word: "proofread" },
{ id: 45515, word: "proofs" },
{ id: 45516, word: "propeller" },
{ id: 45521, word: "properly" },
{ id: 45522, word: "property" },
{ id: 45523, word: "proponent" },
{ id: 45524, word: "proposal" },
{ id: 45525, word: "propose" },
{ id: 45526, word: "props" },
{ id: 45531, word: "prorate" },
{ id: 45532, word: "protector" },
{ id: 45533, word: "protegee" },
{ id: 45534, word: "proton" },
{ id: 45535, word: "prototype" },
{ id: 45536, word: "protozoan" },
{ id: 45541, word: "protract" },
{ id: 45542, word: "protrude" },
{ id: 45543, word: "proud" },
{ id: 45544, word: "provable" },
{ id: 45545, word: "proved" },
{ id: 45546, word: "proven" },
{ id: 45551, word: "provided" },
{ id: 45552, word: "provider" },
{ id: 45553, word: "providing" },
{ id: 45554, word: "province" },
{ id: 45555, word: "proving" },
{ id: 45556, word: "provoke" },
{ id: 45561, word: "provoking" },
{ id: 45562, word: "provolone" },
{ id: 45563, word: "prowess" },
{ id: 45564, word: "prowler" },
{ id: 45565, word: "prowling" },
{ id: 45566, word: "proximity" },
{ id: 45611, word: "proxy" },
{ id: 45612, word: "prozac" },
{ id: 45613, word: "prude" },
{ id: 45614, word: "prudishly" },
{ id: 45615, word: "prune" },
{ id: 45616, word: "pruning" },
{ id: 45621, word: "pry" },
{ id: 45622, word: "psychic" },
{ id: 45623, word: "public" },
{ id: 45624, word: "publisher" },
{ id: 45625, word: "pucker" },
{ id: 45626, word: "pueblo" },
{ id: 45631, word: "pug" },
{ id: 45632, word: "pull" },
{ id: 45633, word: "pulmonary" },
{ id: 45634, word: "pulp" },
{ id: 45635, word: "pulsate" },
{ id: 45636, word: "pulse" },
{ id: 45641, word: "pulverize" },
{ id: 45642, word: "puma" },
{ id: 45643, word: "pumice" },
{ id: 45644, word: "pummel" },
{ id: 45645, word: "punch" },
{ id: 45646, word: "punctual" },
{ id: 45651, word: "punctuate" },
{ id: 45652, word: "punctured" },
{ id: 45653, word: "pungent" },
{ id: 45654, word: "punisher" },
{ id: 45655, word: "punk" },
{ id: 45656, word: "pupil" },
{ id: 45661, word: "puppet" },
{ id: 45662, word: "puppy" },
{ id: 45663, word: "purchase" },
{ id: 45664, word: "pureblood" },
{ id: 45665, word: "purebred" },
{ id: 45666, word: "purely" },
{ id: 46111, word: "pureness" },
{ id: 46112, word: "purgatory" },
{ id: 46113, word: "purge" },
{ id: 46114, word: "purging" },
{ id: 46115, word: "purifier" },
{ id: 46116, word: "purify" },
{ id: 46121, word: "purist" },
{ id: 46122, word: "puritan" },
{ id: 46123, word: "purity" },
{ id: 46124, word: "purple" },
{ id: 46125, word: "purplish" },
{ id: 46126, word: "purposely" },
{ id: 46131, word: "purr" },
{ id: 46132, word: "purse" },
{ id: 46133, word: "pursuable" },
{ id: 46134, word: "pursuant" },
{ id: 46135, word: "pursuit" },
{ id: 46136, word: "purveyor" },
{ id: 46141, word: "pushcart" },
{ id: 46142, word: "pushchair" },
{ id: 46143, word: "pusher" },
{ id: 46144, word: "pushiness" },
{ id: 46145, word: "pushing" },
{ id: 46146, word: "pushover" },
{ id: 46151, word: "pushpin" },
{ id: 46152, word: "pushup" },
{ id: 46153, word: "pushy" },
{ id: 46154, word: "putdown" },
{ id: 46155, word: "putt" },
{ id: 46156, word: "puzzle" },
{ id: 46161, word: "puzzling" },
{ id: 46162, word: "pyramid" },
{ id: 46163, word: "pyromania" },
{ id: 46164, word: "python" },
{ id: 46165, word: "quack" },
{ id: 46166, word: "quadrant" },
{ id: 46211, word: "quail" },
{ id: 46212, word: "quaintly" },
{ id: 46213, word: "quake" },
{ id: 46214, word: "quaking" },
{ id: 46215, word: "qualified" },
{ id: 46216, word: "qualifier" },
{ id: 46221, word: "qualify" },
{ id: 46222, word: "quality" },
{ id: 46223, word: "qualm" },
{ id: 46224, word: "quantum" },
{ id: 46225, word: "quarrel" },
{ id: 46226, word: "quarry" },
{ id: 46231, word: "quartered" },
{ id: 46232, word: "quarterly" },
{ id: 46233, word: "quarters" },
{ id: 46234, word: "quartet" },
{ id: 46235, word: "quench" },
{ id: 46236, word: "query" },
{ id: 46241, word: "quicken" },
{ id: 46242, word: "quickly" },
{ id: 46243, word: "quickness" },
{ id: 46244, word: "quicksand" },
{ id: 46245, word: "quickstep" },
{ id: 46246, word: "quiet" },
{ id: 46251, word: "quill" },
{ id: 46252, word: "quilt" },
{ id: 46253, word: "quintet" },
{ id: 46254, word: "quintuple" },
{ id: 46255, word: "quirk" },
{ id: 46256, word: "quit" },
{ id: 46261, word: "quiver" },
{ id: 46262, word: "quizzical" },
{ id: 46263, word: "quotable" },
{ id: 46264, word: "quotation" },
{ id: 46265, word: "quote" },
{ id: 46266, word: "rabid" },
{ id: 46311, word: "race" },
{ id: 46312, word: "racing" },
{ id: 46313, word: "racism" },
{ id: 46314, word: "rack" },
{ id: 46315, word: "racoon" },
{ id: 46316, word: "radar" },
{ id: 46321, word: "radial" },
{ id: 46322, word: "radiance" },
{ id: 46323, word: "radiantly" },
{ id: 46324, word: "radiated" },
{ id: 46325, word: "radiation" },
{ id: 46326, word: "radiator" },
{ id: 46331, word: "radio" },
{ id: 46332, word: "radish" },
{ id: 46333, word: "raffle" },
{ id: 46334, word: "raft" },
{ id: 46335, word: "rage" },
{ id: 46336, word: "ragged" },
{ id: 46341, word: "raging" },
{ id: 46342, word: "ragweed" },
{ id: 46343, word: "raider" },
{ id: 46344, word: "railcar" },
{ id: 46345, word: "railing" },
{ id: 46346, word: "railroad" },
{ id: 46351, word: "railway" },
{ id: 46352, word: "raisin" },
{ id: 46353, word: "rake" },
{ id: 46354, word: "raking" },
{ id: 46355, word: "rally" },
{ id: 46356, word: "ramble" },
{ id: 46361, word: "rambling" },
{ id: 46362, word: "ramp" },
{ id: 46363, word: "ramrod" },
{ id: 46364, word: "ranch" },
{ id: 46365, word: "rancidity" },
{ id: 46366, word: "random" },
{ id: 46411, word: "ranged" },
{ id: 46412, word: "ranger" },
{ id: 46413, word: "ranging" },
{ id: 46414, word: "ranked" },
{ id: 46415, word: "ranking" },
{ id: 46416, word: "ransack" },
{ id: 46421, word: "ranting" },
{ id: 46422, word: "rants" },
{ id: 46423, word: "rare" },
{ id: 46424, word: "rarity" },
{ id: 46425, word: "rascal" },
{ id: 46426, word: "rash" },
{ id: 46431, word: "rasping" },
{ id: 46432, word: "ravage" },
{ id: 46433, word: "raven" },
{ id: 46434, word: "ravine" },
{ id: 46435, word: "raving" },
{ id: 46436, word: "ravioli" },
{ id: 46441, word: "ravishing" },
{ id: 46442, word: "reabsorb" },
{ id: 46443, word: "reach" },
{ id: 46444, word: "reacquire" },
{ id: 46445, word: "reaction" },
{ id: 46446, word: "reactive" },
{ id: 46451, word: "reactor" },
{ id: 46452, word: "reaffirm" },
{ id: 46453, word: "ream" },
{ id: 46454, word: "reanalyze" },
{ id: 46455, word: "reappear" },
{ id: 46456, word: "reapply" },
{ id: 46461, word: "reappoint" },
{ id: 46462, word: "reapprove" },
{ id: 46463, word: "rearrange" },
{ id: 46464, word: "rearview" },
{ id: 46465, word: "reason" },
{ id: 46466, word: "reassign" },
{ id: 46511, word: "reassure" },
{ id: 46512, word: "reattach" },
{ id: 46513, word: "reawake" },
{ id: 46514, word: "rebalance" },
{ id: 46515, word: "rebate" },
{ id: 46516, word: "rebel" },
{ id: 46521, word: "rebirth" },
{ id: 46522, word: "reboot" },
{ id: 46523, word: "reborn" },
{ id: 46524, word: "rebound" },
{ id: 46525, word: "rebuff" },
{ id: 46526, word: "rebuild" },
{ id: 46531, word: "rebuilt" },
{ id: 46532, word: "reburial" },
{ id: 46533, word: "rebuttal" },
{ id: 46534, word: "recall" },
{ id: 46535, word: "recant" },
{ id: 46536, word: "recapture" },
{ id: 46541, word: "recast" },
{ id: 46542, word: "recede" },
{ id: 46543, word: "recent" },
{ id: 46544, word: "recess" },
{ id: 46545, word: "recharger" },
{ id: 46546, word: "recipient" },
{ id: 46551, word: "recital" },
{ id: 46552, word: "recite" },
{ id: 46553, word: "reckless" },
{ id: 46554, word: "reclaim" },
{ id: 46555, word: "recliner" },
{ id: 46556, word: "reclining" },
{ id: 46561, word: "recluse" },
{ id: 46562, word: "reclusive" },
{ id: 46563, word: "recognize" },
{ id: 46564, word: "recoil" },
{ id: 46565, word: "recollect" },
{ id: 46566, word: "recolor" },
{ id: 46611, word: "reconcile" },
{ id: 46612, word: "reconfirm" },
{ id: 46613, word: "reconvene" },
{ id: 46614, word: "recopy" },
{ id: 46615, word: "record" },
{ id: 46616, word: "recount" },
{ id: 46621, word: "recoup" },
{ id: 46622, word: "recovery" },
{ id: 46623, word: "recreate" },
{ id: 46624, word: "rectal" },
{ id: 46625, word: "rectangle" },
{ id: 46626, word: "rectified" },
{ id: 46631, word: "rectify" },
{ id: 46632, word: "recycled" },
{ id: 46633, word: "recycler" },
{ id: 46634, word: "recycling" },
{ id: 46635, word: "reemerge" },
{ id: 46636, word: "reenact" },
{ id: 46641, word: "reenter" },
{ id: 46642, word: "reentry" },
{ id: 46643, word: "reexamine" },
{ id: 46644, word: "referable" },
{ id: 46645, word: "referee" },
{ id: 46646, word: "reference" },
{ id: 46651, word: "refill" },
{ id: 46652, word: "refinance" },
{ id: 46653, word: "refined" },
{ id: 46654, word: "refinery" },
{ id: 46655, word: "refining" },
{ id: 46656, word: "refinish" },
{ id: 46661, word: "reflected" },
{ id: 46662, word: "reflector" },
{ id: 46663, word: "reflex" },
{ id: 46664, word: "reflux" },
{ id: 46665, word: "refocus" },
{ id: 46666, word: "refold" },
{ id: 51111, word: "reforest" },
{ id: 51112, word: "reformat" },
{ id: 51113, word: "reformed" },
{ id: 51114, word: "reformer" },
{ id: 51115, word: "reformist" },
{ id: 51116, word: "refract" },
{ id: 51121, word: "refrain" },
{ id: 51122, word: "refreeze" },
{ id: 51123, word: "refresh" },
{ id: 51124, word: "refried" },
{ id: 51125, word: "refueling" },
{ id: 51126, word: "refund" },
{ id: 51131, word: "refurbish" },
{ id: 51132, word: "refurnish" },
{ id: 51133, word: "refusal" },
{ id: 51134, word: "refuse" },
{ id: 51135, word: "refusing" },
{ id: 51136, word: "refutable" },
{ id: 51141, word: "refute" },
{ id: 51142, word: "regain" },
{ id: 51143, word: "regalia" },
{ id: 51144, word: "regally" },
{ id: 51145, word: "reggae" },
{ id: 51146, word: "regime" },
{ id: 51151, word: "region" },
{ id: 51152, word: "register" },
{ id: 51153, word: "registrar" },
{ id: 51154, word: "registry" },
{ id: 51155, word: "regress" },
{ id: 51156, word: "regretful" },
{ id: 51161, word: "regroup" },
{ id: 51162, word: "regular" },
{ id: 51163, word: "regulate" },
{ id: 51164, word: "regulator" },
{ id: 51165, word: "rehab" },
{ id: 51166, word: "reheat" },
{ id: 51211, word: "rehire" },
{ id: 51212, word: "rehydrate" },
{ id: 51213, word: "reimburse" },
{ id: 51214, word: "reissue" },
{ id: 51215, word: "reiterate" },
{ id: 51216, word: "rejoice" },
{ id: 51221, word: "rejoicing" },
{ id: 51222, word: "rejoin" },
{ id: 51223, word: "rekindle" },
{ id: 51224, word: "relapse" },
{ id: 51225, word: "relapsing" },
{ id: 51226, word: "relatable" },
{ id: 51231, word: "related" },
{ id: 51232, word: "relation" },
{ id: 51233, word: "relative" },
{ id: 51234, word: "relax" },
{ id: 51235, word: "relay" },
{ id: 51236, word: "relearn" },
{ id: 51241, word: "release" },
{ id: 51242, word: "relenting" },
{ id: 51243, word: "reliable" },
{ id: 51244, word: "reliably" },
{ id: 51245, word: "reliance" },
{ id: 51246, word: "reliant" },
{ id: 51251, word: "relic" },
{ id: 51252, word: "relieve" },
{ id: 51253, word: "relieving" },
{ id: 51254, word: "relight" },
{ id: 51255, word: "relish" },
{ id: 51256, word: "relive" },
{ id: 51261, word: "reload" },
{ id: 51262, word: "relocate" },
{ id: 51263, word: "relock" },
{ id: 51264, word: "reluctant" },
{ id: 51265, word: "rely" },
{ id: 51266, word: "remake" },
{ id: 51311, word: "remark" },
{ id: 51312, word: "remarry" },
{ id: 51313, word: "rematch" },
{ id: 51314, word: "remedial" },
{ id: 51315, word: "remedy" },
{ id: 51316, word: "remember" },
{ id: 51321, word: "reminder" },
{ id: 51322, word: "remindful" },
{ id: 51323, word: "remission" },
{ id: 51324, word: "remix" },
{ id: 51325, word: "remnant" },
{ id: 51326, word: "remodeler" },
{ id: 51331, word: "remold" },
{ id: 51332, word: "remorse" },
{ id: 51333, word: "remote" },
{ id: 51334, word: "removable" },
{ id: 51335, word: "removal" },
{ id: 51336, word: "removed" },
{ id: 51341, word: "remover" },
{ id: 51342, word: "removing" },
{ id: 51343, word: "rename" },
{ id: 51344, word: "renderer" },
{ id: 51345, word: "rendering" },
{ id: 51346, word: "rendition" },
{ id: 51351, word: "renegade" },
{ id: 51352, word: "renewable" },
{ id: 51353, word: "renewably" },
{ id: 51354, word: "renewal" },
{ id: 51355, word: "renewed" },
{ id: 51356, word: "renounce" },
{ id: 51361, word: "renovate" },
{ id: 51362, word: "renovator" },
{ id: 51363, word: "rentable" },
{ id: 51364, word: "rental" },
{ id: 51365, word: "rented" },
{ id: 51366, word: "renter" },
{ id: 51411, word: "reoccupy" },
{ id: 51412, word: "reoccur" },
{ id: 51413, word: "reopen" },
{ id: 51414, word: "reorder" },
{ id: 51415, word: "repackage" },
{ id: 51416, word: "repacking" },
{ id: 51421, word: "repaint" },
{ id: 51422, word: "repair" },
{ id: 51423, word: "repave" },
{ id: 51424, word: "repaying" },
{ id: 51425, word: "repayment" },
{ id: 51426, word: "repeal" },
{ id: 51431, word: "repeated" },
{ id: 51432, word: "repeater" },
{ id: 51433, word: "repent" },
{ id: 51434, word: "rephrase" },
{ id: 51435, word: "replace" },
{ id: 51436, word: "replay" },
{ id: 51441, word: "replica" },
{ id: 51442, word: "reply" },
{ id: 51443, word: "reporter" },
{ id: 51444, word: "repose" },
{ id: 51445, word: "repossess" },
{ id: 51446, word: "repost" },
{ id: 51451, word: "repressed" },
{ id: 51452, word: "reprimand" },
{ id: 51453, word: "reprint" },
{ id: 51454, word: "reprise" },
{ id: 51455, word: "reproach" },
{ id: 51456, word: "reprocess" },
{ id: 51461, word: "reproduce" },
{ id: 51462, word: "reprogram" },
{ id: 51463, word: "reps" },
{ id: 51464, word: "reptile" },
{ id: 51465, word: "reptilian" },
{ id: 51466, word: "repugnant" },
{ id: 51511, word: "repulsion" },
{ id: 51512, word: "repulsive" },
{ id: 51513, word: "repurpose" },
{ id: 51514, word: "reputable" },
{ id: 51515, word: "reputably" },
{ id: 51516, word: "request" },
{ id: 51521, word: "require" },
{ id: 51522, word: "requisite" },
{ id: 51523, word: "reroute" },
{ id: 51524, word: "rerun" },
{ id: 51525, word: "resale" },
{ id: 51526, word: "resample" },
{ id: 51531, word: "rescuer" },
{ id: 51532, word: "reseal" },
{ id: 51533, word: "research" },
{ id: 51534, word: "reselect" },
{ id: 51535, word: "reseller" },
{ id: 51536, word: "resemble" },
{ id: 51541, word: "resend" },
{ id: 51542, word: "resent" },
{ id: 51543, word: "reset" },
{ id: 51544, word: "reshape" },
{ id: 51545, word: "reshoot" },
{ id: 51546, word: "reshuffle" },
{ id: 51551, word: "residence" },
{ id: 51552, word: "residency" },
{ id: 51553, word: "resident" },
{ id: 51554, word: "residual" },
{ id: 51555, word: "residue" },
{ id: 51556, word: "resigned" },
{ id: 51561, word: "resilient" },
{ id: 51562, word: "resistant" },
{ id: 51563, word: "resisting" },
{ id: 51564, word: "resize" },
{ id: 51565, word: "resolute" },
{ id: 51566, word: "resolved" },
{ id: 51611, word: "resonant" },
{ id: 51612, word: "resonate" },
{ id: 51613, word: "resort" },
{ id: 51614, word: "resource" },
{ id: 51615, word: "respect" },
{ id: 51616, word: "resubmit" },
{ id: 51621, word: "result" },
{ id: 51622, word: "resume" },
{ id: 51623, word: "resupply" },
{ id: 51624, word: "resurface" },
{ id: 51625, word: "resurrect" },
{ id: 51626, word: "retail" },
{ id: 51631, word: "retainer" },
{ id: 51632, word: "retaining" },
{ id: 51633, word: "retake" },
{ id: 51634, word: "retaliate" },
{ id: 51635, word: "retention" },
{ id: 51636, word: "rethink" },
{ id: 51641, word: "retinal" },
{ id: 51642, word: "retired" },
{ id: 51643, word: "retiree" },
{ id: 51644, word: "retiring" },
{ id: 51645, word: "retold" },
{ id: 51646, word: "retool" },
{ id: 51651, word: "retorted" },
{ id: 51652, word: "retouch" },
{ id: 51653, word: "retrace" },
{ id: 51654, word: "retract" },
{ id: 51655, word: "retrain" },
{ id: 51656, word: "retread" },
{ id: 51661, word: "retreat" },
{ id: 51662, word: "retrial" },
{ id: 51663, word: "retrieval" },
{ id: 51664, word: "retriever" },
{ id: 51665, word: "retry" },
{ id: 51666, word: "return" },
{ id: 52111, word: "retying" },
{ id: 52112, word: "retype" },
{ id: 52113, word: "reunion" },
{ id: 52114, word: "reunite" },
{ id: 52115, word: "reusable" },
{ id: 52116, word: "reuse" },
{ id: 52121, word: "reveal" },
{ id: 52122, word: "reveler" },
{ id: 52123, word: "revenge" },
{ id: 52124, word: "revenue" },
{ id: 52125, word: "reverb" },
{ id: 52126, word: "revered" },
{ id: 52131, word: "reverence" },
{ id: 52132, word: "reverend" },
{ id: 52133, word: "reversal" },
{ id: 52134, word: "reverse" },
{ id: 52135, word: "reversing" },
{ id: 52136, word: "reversion" },
{ id: 52141, word: "revert" },
{ id: 52142, word: "revisable" },
{ id: 52143, word: "revise" },
{ id: 52144, word: "revision" },
{ id: 52145, word: "revisit" },
{ id: 52146, word: "revivable" },
{ id: 52151, word: "revival" },
{ id: 52152, word: "reviver" },
{ id: 52153, word: "reviving" },
{ id: 52154, word: "revocable" },
{ id: 52155, word: "revoke" },
{ id: 52156, word: "revolt" },
{ id: 52161, word: "revolver" },
{ id: 52162, word: "revolving" },
{ id: 52163, word: "reward" },
{ id: 52164, word: "rewash" },
{ id: 52165, word: "rewind" },
{ id: 52166, word: "rewire" },
{ id: 52211, word: "reword" },
{ id: 52212, word: "rework" },
{ id: 52213, word: "rewrap" },
{ id: 52214, word: "rewrite" },
{ id: 52215, word: "rhyme" },
{ id: 52216, word: "ribbon" },
{ id: 52221, word: "ribcage" },
{ id: 52222, word: "rice" },
{ id: 52223, word: "riches" },
{ id: 52224, word: "richly" },
{ id: 52225, word: "richness" },
{ id: 52226, word: "rickety" },
{ id: 52231, word: "ricotta" },
{ id: 52232, word: "riddance" },
{ id: 52233, word: "ridden" },
{ id: 52234, word: "ride" },
{ id: 52235, word: "riding" },
{ id: 52236, word: "rifling" },
{ id: 52241, word: "rift" },
{ id: 52242, word: "rigging" },
{ id: 52243, word: "rigid" },
{ id: 52244, word: "rigor" },
{ id: 52245, word: "rimless" },
{ id: 52246, word: "rimmed" },
{ id: 52251, word: "rind" },
{ id: 52252, word: "rink" },
{ id: 52253, word: "rinse" },
{ id: 52254, word: "rinsing" },
{ id: 52255, word: "riot" },
{ id: 52256, word: "ripcord" },
{ id: 52261, word: "ripeness" },
{ id: 52262, word: "ripening" },
{ id: 52263, word: "ripping" },
{ id: 52264, word: "ripple" },
{ id: 52265, word: "rippling" },
{ id: 52266, word: "riptide" },
{ id: 52311, word: "rise" },
{ id: 52312, word: "rising" },
{ id: 52313, word: "risk" },
{ id: 52314, word: "risotto" },
{ id: 52315, word: "ritalin" },
{ id: 52316, word: "ritzy" },
{ id: 52321, word: "rival" },
{ id: 52322, word: "riverbank" },
{ id: 52323, word: "riverbed" },
{ id: 52324, word: "riverboat" },
{ id: 52325, word: "riverside" },
{ id: 52326, word: "riveter" },
{ id: 52331, word: "riveting" },
{ id: 52332, word: "roamer" },
{ id: 52333, word: "roaming" },
{ id: 52334, word: "roast" },
{ id: 52335, word: "robbing" },
{ id: 52336, word: "robe" },
{ id: 52341, word: "robin" },
{ id: 52342, word: "robotics" },
{ id: 52343, word: "robust" },
{ id: 52344, word: "rockband" },
{ id: 52345, word: "rocker" },
{ id: 52346, word: "rocket" },
{ id: 52351, word: "rockfish" },
{ id: 52352, word: "rockiness" },
{ id: 52353, word: "rocking" },
{ id: 52354, word: "rocklike" },
{ id: 52355, word: "rockslide" },
{ id: 52356, word: "rockstar" },
{ id: 52361, word: "rocky" },
{ id: 52362, word: "rogue" },
{ id: 52363, word: "roman" },
{ id: 52364, word: "romp" },
{ id: 52365, word: "rope" },
{ id: 52366, word: "roping" },
{ id: 52411, word: "roster" },
{ id: 52412, word: "rosy" },
{ id: 52413, word: "rotten" },
{ id: 52414, word: "rotting" },
{ id: 52415, word: "rotunda" },
{ id: 52416, word: "roulette" },
{ id: 52421, word: "rounding" },
{ id: 52422, word: "roundish" },
{ id: 52423, word: "roundness" },
{ id: 52424, word: "roundup" },
{ id: 52425, word: "roundworm" },
{ id: 52426, word: "routine" },
{ id: 52431, word: "routing" },
{ id: 52432, word: "rover" },
{ id: 52433, word: "roving" },
{ id: 52434, word: "royal" },
{ id: 52435, word: "rubbed" },
{ id: 52436, word: "rubber" },
{ id: 52441, word: "rubbing" },
{ id: 52442, word: "rubble" },
{ id: 52443, word: "rubdown" },
{ id: 52444, word: "ruby" },
{ id: 52445, word: "ruckus" },
{ id: 52446, word: "rudder" },
{ id: 52451, word: "rug" },
{ id: 52452, word: "ruined" },
{ id: 52453, word: "rule" },
{ id: 52454, word: "rumble" },
{ id: 52455, word: "rumbling" },
{ id: 52456, word: "rummage" },
{ id: 52461, word: "rumor" },
{ id: 52462, word: "runaround" },
{ id: 52463, word: "rundown" },
{ id: 52464, word: "runner" },
{ id: 52465, word: "running" },
{ id: 52466, word: "runny" },
{ id: 52511, word: "runt" },
{ id: 52512, word: "runway" },
{ id: 52513, word: "rupture" },
{ id: 52514, word: "rural" },
{ id: 52515, word: "ruse" },
{ id: 52516, word: "rush" },
{ id: 52521, word: "rust" },
{ id: 52522, word: "rut" },
{ id: 52523, word: "sabbath" },
{ id: 52524, word: "sabotage" },
{ id: 52525, word: "sacrament" },
{ id: 52526, word: "sacred" },
{ id: 52531, word: "sacrifice" },
{ id: 52532, word: "sadden" },
{ id: 52533, word: "saddlebag" },
{ id: 52534, word: "saddled" },
{ id: 52535, word: "saddling" },
{ id: 52536, word: "sadly" },
{ id: 52541, word: "sadness" },
{ id: 52542, word: "safari" },
{ id: 52543, word: "safeguard" },
{ id: 52544, word: "safehouse" },
{ id: 52545, word: "safely" },
{ id: 52546, word: "safeness" },
{ id: 52551, word: "saffron" },
{ id: 52552, word: "saga" },
{ id: 52553, word: "sage" },
{ id: 52554, word: "sagging" },
{ id: 52555, word: "saggy" },
{ id: 52556, word: "said" },
{ id: 52561, word: "saint" },
{ id: 52562, word: "sake" },
{ id: 52563, word: "salad" },
{ id: 52564, word: "salami" },
{ id: 52565, word: "salaried" },
{ id: 52566, word: "salary" },
{ id: 52611, word: "saline" },
{ id: 52612, word: "salon" },
{ id: 52613, word: "saloon" },
{ id: 52614, word: "salsa" },
{ id: 52615, word: "salt" },
{ id: 52616, word: "salutary" },
{ id: 52621, word: "salute" },
{ id: 52622, word: "salvage" },
{ id: 52623, word: "salvaging" },
{ id: 52624, word: "salvation" },
{ id: 52625, word: "same" },
{ id: 52626, word: "sample" },
{ id: 52631, word: "sampling" },
{ id: 52632, word: "sanction" },
{ id: 52633, word: "sanctity" },
{ id: 52634, word: "sanctuary" },
{ id: 52635, word: "sandal" },
{ id: 52636, word: "sandbag" },
{ id: 52641, word: "sandbank" },
{ id: 52642, word: "sandbar" },
{ id: 52643, word: "sandblast" },
{ id: 52644, word: "sandbox" },
{ id: 52645, word: "sanded" },
{ id: 52646, word: "sandfish" },
{ id: 52651, word: "sanding" },
{ id: 52652, word: "sandlot" },
{ id: 52653, word: "sandpaper" },
{ id: 52654, word: "sandpit" },
{ id: 52655, word: "sandstone" },
{ id: 52656, word: "sandstorm" },
{ id: 52661, word: "sandworm" },
{ id: 52662, word: "sandy" },
{ id: 52663, word: "sanitary" },
{ id: 52664, word: "sanitizer" },
{ id: 52665, word: "sank" },
{ id: 52666, word: "santa" },
{ id: 53111, word: "sapling" },
{ id: 53112, word: "sappiness" },
{ id: 53113, word: "sappy" },
{ id: 53114, word: "sarcasm" },
{ id: 53115, word: "sarcastic" },
{ id: 53116, word: "sardine" },
{ id: 53121, word: "sash" },
{ id: 53122, word: "sasquatch" },
{ id: 53123, word: "sassy" },
{ id: 53124, word: "satchel" },
{ id: 53125, word: "satiable" },
{ id: 53126, word: "satin" },
{ id: 53131, word: "satirical" },
{ id: 53132, word: "satisfied" },
{ id: 53133, word: "satisfy" },
{ id: 53134, word: "saturate" },
{ id: 53135, word: "saturday" },
{ id: 53136, word: "sauciness" },
{ id: 53141, word: "saucy" },
{ id: 53142, word: "sauna" },
{ id: 53143, word: "savage" },
{ id: 53144, word: "savanna" },
{ id: 53145, word: "saved" },
{ id: 53146, word: "savings" },
{ id: 53151, word: "savior" },
{ id: 53152, word: "savor" },
{ id: 53153, word: "saxophone" },
{ id: 53154, word: "say" },
{ id: 53155, word: "scabbed" },
{ id: 53156, word: "scabby" },
{ id: 53161, word: "scalded" },
{ id: 53162, word: "scalding" },
{ id: 53163, word: "scale" },
{ id: 53164, word: "scaling" },
{ id: 53165, word: "scallion" },
{ id: 53166, word: "scallop" },
{ id: 53211, word: "scalping" },
{ id: 53212, word: "scam" },
{ id: 53213, word: "scandal" },
{ id: 53214, word: "scanner" },
{ id: 53215, word: "scanning" },
{ id: 53216, word: "scant" },
{ id: 53221, word: "scapegoat" },
{ id: 53222, word: "scarce" },
{ id: 53223, word: "scarcity" },
{ id: 53224, word: "scarecrow" },
{ id: 53225, word: "scared" },
{ id: 53226, word: "scarf" },
{ id: 53231, word: "scarily" },
{ id: 53232, word: "scariness" },
{ id: 53233, word: "scarring" },
{ id: 53234, word: "scary" },
{ id: 53235, word: "scavenger" },
{ id: 53236, word: "scenic" },
{ id: 53241, word: "schedule" },
{ id: 53242, word: "schematic" },
{ id: 53243, word: "scheme" },
{ id: 53244, word: "scheming" },
{ id: 53245, word: "schilling" },
{ id: 53246, word: "schnapps" },
{ id: 53251, word: "scholar" },
{ id: 53252, word: "science" },
{ id: 53253, word: "scientist" },
{ id: 53254, word: "scion" },
{ id: 53255, word: "scoff" },
{ id: 53256, word: "scolding" },
{ id: 53261, word: "scone" },
{ id: 53262, word: "scoop" },
{ id: 53263, word: "scooter" },
{ id: 53264, word: "scope" },
{ id: 53265, word: "scorch" },
{ id: 53266, word: "scorebook" },
{ id: 53311, word: "scorecard" },
{ id: 53312, word: "scored" },
{ id: 53313, word: "scoreless" },
{ id: 53314, word: "scorer" },
{ id: 53315, word: "scoring" },
{ id: 53316, word: "scorn" },
{ id: 53321, word: "scorpion" },
{ id: 53322, word: "scotch" },
{ id: 53323, word: "scoundrel" },
{ id: 53324, word: "scoured" },
{ id: 53325, word: "scouring" },
{ id: 53326, word: "scouting" },
{ id: 53331, word: "scouts" },
{ id: 53332, word: "scowling" },
{ id: 53333, word: "scrabble" },
{ id: 53334, word: "scraggly" },
{ id: 53335, word: "scrambled" },
{ id: 53336, word: "scrambler" },
{ id: 53341, word: "scrap" },
{ id: 53342, word: "scratch" },
{ id: 53343, word: "scrawny" },
{ id: 53344, word: "screen" },
{ id: 53345, word: "scribble" },
{ id: 53346, word: "scribe" },
{ id: 53351, word: "scribing" },
{ id: 53352, word: "scrimmage" },
{ id: 53353, word: "script" },
{ id: 53354, word: "scroll" },
{ id: 53355, word: "scrooge" },
{ id: 53356, word: "scrounger" },
{ id: 53361, word: "scrubbed" },
{ id: 53362, word: "scrubber" },
{ id: 53363, word: "scruffy" },
{ id: 53364, word: "scrunch" },
{ id: 53365, word: "scrutiny" },
{ id: 53366, word: "scuba" },
{ id: 53411, word: "scuff" },
{ id: 53412, word: "sculptor" },
{ id: 53413, word: "sculpture" },
{ id: 53414, word: "scurvy" },
{ id: 53415, word: "scuttle" },
{ id: 53416, word: "secluded" },
{ id: 53421, word: "secluding" },
{ id: 53422, word: "seclusion" },
{ id: 53423, word: "second" },
{ id: 53424, word: "secrecy" },
{ id: 53425, word: "secret" },
{ id: 53426, word: "sectional" },
{ id: 53431, word: "sector" },
{ id: 53432, word: "secular" },
{ id: 53433, word: "securely" },
{ id: 53434, word: "security" },
{ id: 53435, word: "sedan" },
{ id: 53436, word: "sedate" },
{ id: 53441, word: "sedation" },
{ id: 53442, word: "sedative" },
{ id: 53443, word: "sediment" },
{ id: 53444, word: "seduce" },
{ id: 53445, word: "seducing" },
{ id: 53446, word: "segment" },
{ id: 53451, word: "seismic" },
{ id: 53452, word: "seizing" },
{ id: 53453, word: "seldom" },
{ id: 53454, word: "selected" },
{ id: 53455, word: "selection" },
{ id: 53456, word: "selective" },
{ id: 53461, word: "selector" },
{ id: 53462, word: "self" },
{ id: 53463, word: "seltzer" },
{ id: 53464, word: "semantic" },
{ id: 53465, word: "semester" },
{ id: 53466, word: "semicolon" },
{ id: 53511, word: "semifinal" },
{ id: 53512, word: "seminar" },
{ id: 53513, word: "semisoft" },
{ id: 53514, word: "semisweet" },
{ id: 53515, word: "senate" },
{ id: 53516, word: "senator" },
{ id: 53521, word: "send" },
{ id: 53522, word: "senior" },
{ id: 53523, word: "senorita" },
{ id: 53524, word: "sensation" },
{ id: 53525, word: "sensitive" },
{ id: 53526, word: "sensitize" },
{ id: 53531, word: "sensually" },
{ id: 53532, word: "sensuous" },
{ id: 53533, word: "sepia" },
{ id: 53534, word: "september" },
{ id: 53535, word: "septic" },
{ id: 53536, word: "septum" },
{ id: 53541, word: "sequel" },
{ id: 53542, word: "sequence" },
{ id: 53543, word: "sequester" },
{ id: 53544, word: "series" },
{ id: 53545, word: "sermon" },
{ id: 53546, word: "serotonin" },
{ id: 53551, word: "serpent" },
{ id: 53552, word: "serrated" },
{ id: 53553, word: "serve" },
{ id: 53554, word: "service" },
{ id: 53555, word: "serving" },
{ id: 53556, word: "sesame" },
{ id: 53561, word: "sessions" },
{ id: 53562, word: "setback" },
{ id: 53563, word: "setting" },
{ id: 53564, word: "settle" },
{ id: 53565, word: "settling" },
{ id: 53566, word: "setup" },
{ id: 53611, word: "sevenfold" },
{ id: 53612, word: "seventeen" },
{ id: 53613, word: "seventh" },
{ id: 53614, word: "seventy" },
{ id: 53615, word: "severity" },
{ id: 53616, word: "shabby" },
{ id: 53621, word: "shack" },
{ id: 53622, word: "shaded" },
{ id: 53623, word: "shadily" },
{ id: 53624, word: "shadiness" },
{ id: 53625, word: "shading" },
{ id: 53626, word: "shadow" },
{ id: 53631, word: "shady" },
{ id: 53632, word: "shaft" },
{ id: 53633, word: "shakable" },
{ id: 53634, word: "shakily" },
{ id: 53635, word: "shakiness" },
{ id: 53636, word: "shaking" },
{ id: 53641, word: "shaky" },
{ id: 53642, word: "shale" },
{ id: 53643, word: "shallot" },
{ id: 53644, word: "shallow" },
{ id: 53645, word: "shame" },
{ id: 53646, word: "shampoo" },
{ id: 53651, word: "shamrock" },
{ id: 53652, word: "shank" },
{ id: 53653, word: "shanty" },
{ id: 53654, word: "shape" },
{ id: 53655, word: "shaping" },
{ id: 53656, word: "share" },
{ id: 53661, word: "sharpener" },
{ id: 53662, word: "sharper" },
{ id: 53663, word: "sharpie" },
{ id: 53664, word: "sharply" },
{ id: 53665, word: "sharpness" },
{ id: 53666, word: "shawl" },
{ id: 54111, word: "sheath" },
{ id: 54112, word: "shed" },
{ id: 54113, word: "sheep" },
{ id: 54114, word: "sheet" },
{ id: 54115, word: "shelf" },
{ id: 54116, word: "shell" },
{ id: 54121, word: "shelter" },
{ id: 54122, word: "shelve" },
{ id: 54123, word: "shelving" },
{ id: 54124, word: "sherry" },
{ id: 54125, word: "shield" },
{ id: 54126, word: "shifter" },
{ id: 54131, word: "shifting" },
{ id: 54132, word: "shiftless" },
{ id: 54133, word: "shifty" },
{ id: 54134, word: "shimmer" },
{ id: 54135, word: "shimmy" },
{ id: 54136, word: "shindig" },
{ id: 54141, word: "shine" },
{ id: 54142, word: "shingle" },
{ id: 54143, word: "shininess" },
{ id: 54144, word: "shining" },
{ id: 54145, word: "shiny" },
{ id: 54146, word: "ship" },
{ id: 54151, word: "shirt" },
{ id: 54152, word: "shivering" },
{ id: 54153, word: "shock" },
{ id: 54154, word: "shone" },
{ id: 54155, word: "shoplift" },
{ id: 54156, word: "shopper" },
{ id: 54161, word: "shopping" },
{ id: 54162, word: "shoptalk" },
{ id: 54163, word: "shore" },
{ id: 54164, word: "shortage" },
{ id: 54165, word: "shortcake" },
{ id: 54166, word: "shortcut" },
{ id: 54211, word: "shorten" },
{ id: 54212, word: "shorter" },
{ id: 54213, word: "shorthand" },
{ id: 54214, word: "shortlist" },
{ id: 54215, word: "shortly" },
{ id: 54216, word: "shortness" },
{ id: 54221, word: "shorts" },
{ id: 54222, word: "shortwave" },
{ id: 54223, word: "shorty" },
{ id: 54224, word: "shout" },
{ id: 54225, word: "shove" },
{ id: 54226, word: "showbiz" },
{ id: 54231, word: "showcase" },
{ id: 54232, word: "showdown" },
{ id: 54233, word: "shower" },
{ id: 54234, word: "showgirl" },
{ id: 54235, word: "showing" },
{ id: 54236, word: "showman" },
{ id: 54241, word: "shown" },
{ id: 54242, word: "showoff" },
{ id: 54243, word: "showpiece" },
{ id: 54244, word: "showplace" },
{ id: 54245, word: "showroom" },
{ id: 54246, word: "showy" },
{ id: 54251, word: "shrank" },
{ id: 54252, word: "shrapnel" },
{ id: 54253, word: "shredder" },
{ id: 54254, word: "shredding" },
{ id: 54255, word: "shrewdly" },
{ id: 54256, word: "shriek" },
{ id: 54261, word: "shrill" },
{ id: 54262, word: "shrimp" },
{ id: 54263, word: "shrine" },
{ id: 54264, word: "shrink" },
{ id: 54265, word: "shrivel" },
{ id: 54266, word: "shrouded" },
{ id: 54311, word: "shrubbery" },
{ id: 54312, word: "shrubs" },
{ id: 54313, word: "shrug" },
{ id: 54314, word: "shrunk" },
{ id: 54315, word: "shucking" },
{ id: 54316, word: "shudder" },
{ id: 54321, word: "shuffle" },
{ id: 54322, word: "shuffling" },
{ id: 54323, word: "shun" },
{ id: 54324, word: "shush" },
{ id: 54325, word: "shut" },
{ id: 54326, word: "shy" },
{ id: 54331, word: "siamese" },
{ id: 54332, word: "siberian" },
{ id: 54333, word: "sibling" },
{ id: 54334, word: "siding" },
{ id: 54335, word: "sierra" },
{ id: 54336, word: "siesta" },
{ id: 54341, word: "sift" },
{ id: 54342, word: "sighing" },
{ id: 54343, word: "silenced" },
{ id: 54344, word: "silencer" },
{ id: 54345, word: "silent" },
{ id: 54346, word: "silica" },
{ id: 54351, word: "silicon" },
{ id: 54352, word: "silk" },
{ id: 54353, word: "silliness" },
{ id: 54354, word: "silly" },
{ id: 54355, word: "silo" },
{ id: 54356, word: "silt" },
{ id: 54361, word: "silver" },
{ id: 54362, word: "similarly" },
{ id: 54363, word: "simile" },
{ id: 54364, word: "simmering" },
{ id: 54365, word: "simple" },
{ id: 54366, word: "simplify" },
{ id: 54411, word: "simply" },
{ id: 54412, word: "sincere" },
{ id: 54413, word: "sincerity" },
{ id: 54414, word: "singer" },
{ id: 54415, word: "singing" },
{ id: 54416, word: "single" },
{ id: 54421, word: "singular" },
{ id: 54422, word: "sinister" },
{ id: 54423, word: "sinless" },
{ id: 54424, word: "sinner" },
{ id: 54425, word: "sinuous" },
{ id: 54426, word: "sip" },
{ id: 54431, word: "siren" },
{ id: 54432, word: "sister" },
{ id: 54433, word: "sitcom" },
{ id: 54434, word: "sitter" },
{ id: 54435, word: "sitting" },
{ id: 54436, word: "situated" },
{ id: 54441, word: "situation" },
{ id: 54442, word: "sixfold" },
{ id: 54443, word: "sixteen" },
{ id: 54444, word: "sixth" },
{ id: 54445, word: "sixties" },
{ id: 54446, word: "sixtieth" },
{ id: 54451, word: "sixtyfold" },
{ id: 54452, word: "sizable" },
{ id: 54453, word: "sizably" },
{ id: 54454, word: "size" },
{ id: 54455, word: "sizing" },
{ id: 54456, word: "sizzle" },
{ id: 54461, word: "sizzling" },
{ id: 54462, word: "skater" },
{ id: 54463, word: "skating" },
{ id: 54464, word: "skedaddle" },
{ id: 54465, word: "skeletal" },
{ id: 54466, word: "skeleton" },
{ id: 54511, word: "skeptic" },
{ id: 54512, word: "sketch" },
{ id: 54513, word: "skewed" },
{ id: 54514, word: "skewer" },
{ id: 54515, word: "skid" },
{ id: 54516, word: "skied" },
{ id: 54521, word: "skier" },
{ id: 54522, word: "skies" },
{ id: 54523, word: "skiing" },
{ id: 54524, word: "skilled" },
{ id: 54525, word: "skillet" },
{ id: 54526, word: "skillful" },
{ id: 54531, word: "skimmed" },
{ id: 54532, word: "skimmer" },
{ id: 54533, word: "skimming" },
{ id: 54534, word: "skimpily" },
{ id: 54535, word: "skincare" },
{ id: 54536, word: "skinhead" },
{ id: 54541, word: "skinless" },
{ id: 54542, word: "skinning" },
{ id: 54543, word: "skinny" },
{ id: 54544, word: "skintight" },
{ id: 54545, word: "skipper" },
{ id: 54546, word: "skipping" },
{ id: 54551, word: "skirmish" },
{ id: 54552, word: "skirt" },
{ id: 54553, word: "skittle" },
{ id: 54554, word: "skydiver" },
{ id: 54555, word: "skylight" },
{ id: 54556, word: "skyline" },
{ id: 54561, word: "skype" },
{ id: 54562, word: "skyrocket" },
{ id: 54563, word: "skyward" },
{ id: 54564, word: "slab" },
{ id: 54565, word: "slacked" },
{ id: 54566, word: "slacker" },
{ id: 54611, word: "slacking" },
{ id: 54612, word: "slackness" },
{ id: 54613, word: "slacks" },
{ id: 54614, word: "slain" },
{ id: 54615, word: "slam" },
{ id: 54616, word: "slander" },
{ id: 54621, word: "slang" },
{ id: 54622, word: "slapping" },
{ id: 54623, word: "slapstick" },
{ id: 54624, word: "slashed" },
{ id: 54625, word: "slashing" },
{ id: 54626, word: "slate" },
{ id: 54631, word: "slather" },
{ id: 54632, word: "slaw" },
{ id: 54633, word: "sled" },
{ id: 54634, word: "sleek" },
{ id: 54635, word: "sleep" },
{ id: 54636, word: "sleet" },
{ id: 54641, word: "sleeve" },
{ id: 54642, word: "slept" },
{ id: 54643, word: "sliceable" },
{ id: 54644, word: "sliced" },
{ id: 54645, word: "slicer" },
{ id: 54646, word: "slicing" },
{ id: 54651, word: "slick" },
{ id: 54652, word: "slider" },
{ id: 54653, word: "slideshow" },
{ id: 54654, word: "sliding" },
{ id: 54655, word: "slighted" },
{ id: 54656, word: "slighting" },
{ id: 54661, word: "slightly" },
{ id: 54662, word: "slimness" },
{ id: 54663, word: "slimy" },
{ id: 54664, word: "slinging" },
{ id: 54665, word: "slingshot" },
{ id: 54666, word: "slinky" },
{ id: 55111, word: "slip" },
{ id: 55112, word: "slit" },
{ id: 55113, word: "sliver" },
{ id: 55114, word: "slobbery" },
{ id: 55115, word: "slogan" },
{ id: 55116, word: "sloped" },
{ id: 55121, word: "sloping" },
{ id: 55122, word: "sloppily" },
{ id: 55123, word: "sloppy" },
{ id: 55124, word: "slot" },
{ id: 55125, word: "slouching" },
{ id: 55126, word: "slouchy" },
{ id: 55131, word: "sludge" },
{ id: 55132, word: "slug" },
{ id: 55133, word: "slum" },
{ id: 55134, word: "slurp" },
{ id: 55135, word: "slush" },
{ id: 55136, word: "sly" },
{ id: 55141, word: "small" },
{ id: 55142, word: "smartly" },
{ id: 55143, word: "smartness" },
{ id: 55144, word: "smasher" },
{ id: 55145, word: "smashing" },
{ id: 55146, word: "smashup" },
{ id: 55151, word: "smell" },
{ id: 55152, word: "smelting" },
{ id: 55153, word: "smile" },
{ id: 55154, word: "smilingly" },
{ id: 55155, word: "smirk" },
{ id: 55156, word: "smite" },
{ id: 55161, word: "smith" },
{ id: 55162, word: "smitten" },
{ id: 55163, word: "smock" },
{ id: 55164, word: "smog" },
{ id: 55165, word: "smoked" },
{ id: 55166, word: "smokeless" },
{ id: 55211, word: "smokiness" },
{ id: 55212, word: "smoking" },
{ id: 55213, word: "smoky" },
{ id: 55214, word: "smolder" },
{ id: 55215, word: "smooth" },
{ id: 55216, word: "smother" },
{ id: 55221, word: "smudge" },
{ id: 55222, word: "smudgy" },
{ id: 55223, word: "smuggler" },
{ id: 55224, word: "smuggling" },
{ id: 55225, word: "smugly" },
{ id: 55226, word: "smugness" },
{ id: 55231, word: "snack" },
{ id: 55232, word: "snagged" },
{ id: 55233, word: "snaking" },
{ id: 55234, word: "snap" },
{ id: 55235, word: "snare" },
{ id: 55236, word: "snarl" },
{ id: 55241, word: "snazzy" },
{ id: 55242, word: "sneak" },
{ id: 55243, word: "sneer" },
{ id: 55244, word: "sneeze" },
{ id: 55245, word: "sneezing" },
{ id: 55246, word: "snide" },
{ id: 55251, word: "sniff" },
{ id: 55252, word: "snippet" },
{ id: 55253, word: "snipping" },
{ id: 55254, word: "snitch" },
{ id: 55255, word: "snooper" },
{ id: 55256, word: "snooze" },
{ id: 55261, word: "snore" },
{ id: 55262, word: "snoring" },
{ id: 55263, word: "snorkel" },
{ id: 55264, word: "snort" },
{ id: 55265, word: "snout" },
{ id: 55266, word: "snowbird" },
{ id: 55311, word: "snowboard" },
{ id: 55312, word: "snowbound" },
{ id: 55313, word: "snowcap" },
{ id: 55314, word: "snowdrift" },
{ id: 55315, word: "snowdrop" },
{ id: 55316, word: "snowfall" },
{ id: 55321, word: "snowfield" },
{ id: 55322, word: "snowflake" },
{ id: 55323, word: "snowiness" },
{ id: 55324, word: "snowless" },
{ id: 55325, word: "snowman" },
{ id: 55326, word: "snowplow" },
{ id: 55331, word: "snowshoe" },
{ id: 55332, word: "snowstorm" },
{ id: 55333, word: "snowsuit" },
{ id: 55334, word: "snowy" },
{ id: 55335, word: "snub" },
{ id: 55336, word: "snuff" },
{ id: 55341, word: "snuggle" },
{ id: 55342, word: "snugly" },
{ id: 55343, word: "snugness" },
{ id: 55344, word: "speak" },
{ id: 55345, word: "spearfish" },
{ id: 55346, word: "spearhead" },
{ id: 55351, word: "spearman" },
{ id: 55352, word: "spearmint" },
{ id: 55353, word: "species" },
{ id: 55354, word: "specimen" },
{ id: 55355, word: "specked" },
{ id: 55356, word: "speckled" },
{ id: 55361, word: "specks" },
{ id: 55362, word: "spectacle" },
{ id: 55363, word: "spectator" },
{ id: 55364, word: "spectrum" },
{ id: 55365, word: "speculate" },
{ id: 55366, word: "speech" },
{ id: 55411, word: "speed" },
{ id: 55412, word: "spellbind" },
{ id: 55413, word: "speller" },
{ id: 55414, word: "spelling" },
{ id: 55415, word: "spendable" },
{ id: 55416, word: "spender" },
{ id: 55421, word: "spending" },
{ id: 55422, word: "spent" },
{ id: 55423, word: "spew" },
{ id: 55424, word: "sphere" },
{ id: 55425, word: "spherical" },
{ id: 55426, word: "sphinx" },
{ id: 55431, word: "spider" },
{ id: 55432, word: "spied" },
{ id: 55433, word: "spiffy" },
{ id: 55434, word: "spill" },
{ id: 55435, word: "spilt" },
{ id: 55436, word: "spinach" },
{ id: 55441, word: "spinal" },
{ id: 55442, word: "spindle" },
{ id: 55443, word: "spinner" },
{ id: 55444, word: "spinning" },
{ id: 55445, word: "spinout" },
{ id: 55446, word: "spinster" },
{ id: 55451, word: "spiny" },
{ id: 55452, word: "spiral" },
{ id: 55453, word: "spirited" },
{ id: 55454, word: "spiritism" },
{ id: 55455, word: "spirits" },
{ id: 55456, word: "spiritual" },
{ id: 55461, word: "splashed" },
{ id: 55462, word: "splashing" },
{ id: 55463, word: "splashy" },
{ id: 55464, word: "splatter" },
{ id: 55465, word: "spleen" },
{ id: 55466, word: "splendid" },
{ id: 55511, word: "splendor" },
{ id: 55512, word: "splice" },
{ id: 55513, word: "splicing" },
{ id: 55514, word: "splinter" },
{ id: 55515, word: "splotchy" },
{ id: 55516, word: "splurge" },
{ id: 55521, word: "spoilage" },
{ id: 55522, word: "spoiled" },
{ id: 55523, word: "spoiler" },
{ id: 55524, word: "spoiling" },
{ id: 55525, word: "spoils" },
{ id: 55526, word: "spoken" },
{ id: 55531, word: "spokesman" },
{ id: 55532, word: "sponge" },
{ id: 55533, word: "spongy" },
{ id: 55534, word: "sponsor" },
{ id: 55535, word: "spoof" },
{ id: 55536, word: "spookily" },
{ id: 55541, word: "spooky" },
{ id: 55542, word: "spool" },
{ id: 55543, word: "spoon" },
{ id: 55544, word: "spore" },
{ id: 55545, word: "sporting" },
{ id: 55546, word: "sports" },
{ id: 55551, word: "sporty" },
{ id: 55552, word: "spotless" },
{ id: 55553, word: "spotlight" },
{ id: 55554, word: "spotted" },
{ id: 55555, word: "spotter" },
{ id: 55556, word: "spotting" },
{ id: 55561, word: "spotty" },
{ id: 55562, word: "spousal" },
{ id: 55563, word: "spouse" },
{ id: 55564, word: "spout" },
{ id: 55565, word: "sprain" },
{ id: 55566, word: "sprang" },
{ id: 55611, word: "sprawl" },
{ id: 55612, word: "spray" },
{ id: 55613, word: "spree" },
{ id: 55614, word: "sprig" },
{ id: 55615, word: "spring" },
{ id: 55616, word: "sprinkled" },
{ id: 55621, word: "sprinkler" },
{ id: 55622, word: "sprint" },
{ id: 55623, word: "sprite" },
{ id: 55624, word: "sprout" },
{ id: 55625, word: "spruce" },
{ id: 55626, word: "sprung" },
{ id: 55631, word: "spry" },
{ id: 55632, word: "spud" },
{ id: 55633, word: "spur" },
{ id: 55634, word: "sputter" },
{ id: 55635, word: "spyglass" },
{ id: 55636, word: "squabble" },
{ id: 55641, word: "squad" },
{ id: 55642, word: "squall" },
{ id: 55643, word: "squander" },
{ id: 55644, word: "squash" },
{ id: 55645, word: "squatted" },
{ id: 55646, word: "squatter" },
{ id: 55651, word: "squatting" },
{ id: 55652, word: "squeak" },
{ id: 55653, word: "squealer" },
{ id: 55654, word: "squealing" },
{ id: 55655, word: "squeamish" },
{ id: 55656, word: "squeegee" },
{ id: 55661, word: "squeeze" },
{ id: 55662, word: "squeezing" },
{ id: 55663, word: "squid" },
{ id: 55664, word: "squiggle" },
{ id: 55665, word: "squiggly" },
{ id: 55666, word: "squint" },
{ id: 56111, word: "squire" },
{ id: 56112, word: "squirt" },
{ id: 56113, word: "squishier" },
{ id: 56114, word: "squishy" },
{ id: 56115, word: "stability" },
{ id: 56116, word: "stabilize" },
{ id: 56121, word: "stable" },
{ id: 56122, word: "stack" },
{ id: 56123, word: "stadium" },
{ id: 56124, word: "staff" },
{ id: 56125, word: "stage" },
{ id: 56126, word: "staging" },
{ id: 56131, word: "stagnant" },
{ id: 56132, word: "stagnate" },
{ id: 56133, word: "stainable" },
{ id: 56134, word: "stained" },
{ id: 56135, word: "staining" },
{ id: 56136, word: "stainless" },
{ id: 56141, word: "stalemate" },
{ id: 56142, word: "staleness" },
{ id: 56143, word: "stalling" },
{ id: 56144, word: "stallion" },
{ id: 56145, word: "stamina" },
{ id: 56146, word: "stammer" },
{ id: 56151, word: "stamp" },
{ id: 56152, word: "stand" },
{ id: 56153, word: "stank" },
{ id: 56154, word: "staple" },
{ id: 56155, word: "stapling" },
{ id: 56156, word: "starboard" },
{ id: 56161, word: "starch" },
{ id: 56162, word: "stardom" },
{ id: 56163, word: "stardust" },
{ id: 56164, word: "starfish" },
{ id: 56165, word: "stargazer" },
{ id: 56166, word: "staring" },
{ id: 56211, word: "stark" },
{ id: 56212, word: "starless" },
{ id: 56213, word: "starlet" },
{ id: 56214, word: "starlight" },
{ id: 56215, word: "starlit" },
{ id: 56216, word: "starring" },
{ id: 56221, word: "starry" },
{ id: 56222, word: "starship" },
{ id: 56223, word: "starter" },
{ id: 56224, word: "starting" },
{ id: 56225, word: "startle" },
{ id: 56226, word: "startling" },
{ id: 56231, word: "startup" },
{ id: 56232, word: "starved" },
{ id: 56233, word: "starving" },
{ id: 56234, word: "stash" },
{ id: 56235, word: "state" },
{ id: 56236, word: "static" },
{ id: 56241, word: "statistic" },
{ id: 56242, word: "statue" },
{ id: 56243, word: "stature" },
{ id: 56244, word: "status" },
{ id: 56245, word: "statute" },
{ id: 56246, word: "statutory" },
{ id: 56251, word: "staunch" },
{ id: 56252, word: "stays" },
{ id: 56253, word: "steadfast" },
{ id: 56254, word: "steadier" },
{ id: 56255, word: "steadily" },
{ id: 56256, word: "steadying" },
{ id: 56261, word: "steam" },
{ id: 56262, word: "steed" },
{ id: 56263, word: "steep" },
{ id: 56264, word: "steerable" },
{ id: 56265, word: "steering" },
{ id: 56266, word: "steersman" },
{ id: 56311, word: "stegosaur" },
{ id: 56312, word: "stellar" },
{ id: 56313, word: "stem" },
{ id: 56314, word: "stench" },
{ id: 56315, word: "stencil" },
{ id: 56316, word: "step" },
{ id: 56321, word: "stereo" },
{ id: 56322, word: "sterile" },
{ id: 56323, word: "sterility" },
{ id: 56324, word: "sterilize" },
{ id: 56325, word: "sterling" },
{ id: 56326, word: "sternness" },
{ id: 56331, word: "sternum" },
{ id: 56332, word: "stew" },
{ id: 56333, word: "stick" },
{ id: 56334, word: "stiffen" },
{ id: 56335, word: "stiffly" },
{ id: 56336, word: "stiffness" },
{ id: 56341, word: "stifle" },
{ id: 56342, word: "stifling" },
{ id: 56343, word: "stillness" },
{ id: 56344, word: "stilt" },
{ id: 56345, word: "stimulant" },
{ id: 56346, word: "stimulate" },
{ id: 56351, word: "stimuli" },
{ id: 56352, word: "stimulus" },
{ id: 56353, word: "stinger" },
{ id: 56354, word: "stingily" },
{ id: 56355, word: "stinging" },
{ id: 56356, word: "stingray" },
{ id: 56361, word: "stingy" },
{ id: 56362, word: "stinking" },
{ id: 56363, word: "stinky" },
{ id: 56364, word: "stipend" },
{ id: 56365, word: "stipulate" },
{ id: 56366, word: "stir" },
{ id: 56411, word: "stitch" },
{ id: 56412, word: "stock" },
{ id: 56413, word: "stoic" },
{ id: 56414, word: "stoke" },
{ id: 56415, word: "stole" },
{ id: 56416, word: "stomp" },
{ id: 56421, word: "stonewall" },
{ id: 56422, word: "stoneware" },
{ id: 56423, word: "stonework" },
{ id: 56424, word: "stoning" },
{ id: 56425, word: "stony" },
{ id: 56426, word: "stood" },
{ id: 56431, word: "stooge" },
{ id: 56432, word: "stool" },
{ id: 56433, word: "stoop" },
{ id: 56434, word: "stoplight" },
{ id: 56435, word: "stoppable" },
{ id: 56436, word: "stoppage" },
{ id: 56441, word: "stopped" },
{ id: 56442, word: "stopper" },
{ id: 56443, word: "stopping" },
{ id: 56444, word: "stopwatch" },
{ id: 56445, word: "storable" },
{ id: 56446, word: "storage" },
{ id: 56451, word: "storeroom" },
{ id: 56452, word: "storewide" },
{ id: 56453, word: "storm" },
{ id: 56454, word: "stout" },
{ id: 56455, word: "stove" },
{ id: 56456, word: "stowaway" },
{ id: 56461, word: "stowing" },
{ id: 56462, word: "straddle" },
{ id: 56463, word: "straggler" },
{ id: 56464, word: "strained" },
{ id: 56465, word: "strainer" },
{ id: 56466, word: "straining" },
{ id: 56511, word: "strangely" },
{ id: 56512, word: "stranger" },
{ id: 56513, word: "strangle" },
{ id: 56514, word: "strategic" },
{ id: 56515, word: "strategy" },
{ id: 56516, word: "stratus" },
{ id: 56521, word: "straw" },
{ id: 56522, word: "stray" },
{ id: 56523, word: "streak" },
{ id: 56524, word: "stream" },
{ id: 56525, word: "street" },
{ id: 56526, word: "strength" },
{ id: 56531, word: "strenuous" },
{ id: 56532, word: "strep" },
{ id: 56533, word: "stress" },
{ id: 56534, word: "stretch" },
{ id: 56535, word: "strewn" },
{ id: 56536, word: "stricken" },
{ id: 56541, word: "strict" },
{ id: 56542, word: "stride" },
{ id: 56543, word: "strife" },
{ id: 56544, word: "strike" },
{ id: 56545, word: "striking" },
{ id: 56546, word: "strive" },
{ id: 56551, word: "striving" },
{ id: 56552, word: "strobe" },
{ id: 56553, word: "strode" },
{ id: 56554, word: "stroller" },
{ id: 56555, word: "strongbox" },
{ id: 56556, word: "strongly" },
{ id: 56561, word: "strongman" },
{ id: 56562, word: "struck" },
{ id: 56563, word: "structure" },
{ id: 56564, word: "strudel" },
{ id: 56565, word: "struggle" },
{ id: 56566, word: "strum" },
{ id: 56611, word: "strung" },
{ id: 56612, word: "strut" },
{ id: 56613, word: "stubbed" },
{ id: 56614, word: "stubble" },
{ id: 56615, word: "stubbly" },
{ id: 56616, word: "stubborn" },
{ id: 56621, word: "stucco" },
{ id: 56622, word: "stuck" },
{ id: 56623, word: "student" },
{ id: 56624, word: "studied" },
{ id: 56625, word: "studio" },
{ id: 56626, word: "study" },
{ id: 56631, word: "stuffed" },
{ id: 56632, word: "stuffing" },
{ id: 56633, word: "stuffy" },
{ id: 56634, word: "stumble" },
{ id: 56635, word: "stumbling" },
{ id: 56636, word: "stump" },
{ id: 56641, word: "stung" },
{ id: 56642, word: "stunned" },
{ id: 56643, word: "stunner" },
{ id: 56644, word: "stunning" },
{ id: 56645, word: "stunt" },
{ id: 56646, word: "stupor" },
{ id: 56651, word: "sturdily" },
{ id: 56652, word: "sturdy" },
{ id: 56653, word: "styling" },
{ id: 56654, word: "stylishly" },
{ id: 56655, word: "stylist" },
{ id: 56656, word: "stylized" },
{ id: 56661, word: "stylus" },
{ id: 56662, word: "suave" },
{ id: 56663, word: "subarctic" },
{ id: 56664, word: "subatomic" },
{ id: 56665, word: "subdivide" },
{ id: 56666, word: "subdued" },
{ id: 61111, word: "subduing" },
{ id: 61112, word: "subfloor" },
{ id: 61113, word: "subgroup" },
{ id: 61114, word: "subheader" },
{ id: 61115, word: "subject" },
{ id: 61116, word: "sublease" },
{ id: 61121, word: "sublet" },
{ id: 61122, word: "sublevel" },
{ id: 61123, word: "sublime" },
{ id: 61124, word: "submarine" },
{ id: 61125, word: "submerge" },
{ id: 61126, word: "submersed" },
{ id: 61131, word: "submitter" },
{ id: 61132, word: "subpanel" },
{ id: 61133, word: "subpar" },
{ id: 61134, word: "subplot" },
{ id: 61135, word: "subprime" },
{ id: 61136, word: "subscribe" },
<|fim▁hole|> { id: 61144, word: "subsiding" },
{ id: 61145, word: "subsidize" },
{ id: 61146, word: "subsidy" },
{ id: 61151, word: "subsoil" },
{ id: 61152, word: "subsonic" },
{ id: 61153, word: "substance" },
{ id: 61154, word: "subsystem" },
{ id: 61155, word: "subtext" },
{ id: 61156, word: "subtitle" },
{ id: 61161, word: "subtly" },
{ id: 61162, word: "subtotal" },
{ id: 61163, word: "subtract" },
{ id: 61164, word: "subtype" },
{ id: 61165, word: "suburb" },
{ id: 61166, word: "subway" },
{ id: 61211, word: "subwoofer" },
{ id: 61212, word: "subzero" },
{ id: 61213, word: "succulent" },
{ id: 61214, word: "such" },
{ id: 61215, word: "suction" },
{ id: 61216, word: "sudden" },
{ id: 61221, word: "sudoku" },
{ id: 61222, word: "suds" },
{ id: 61223, word: "sufferer" },
{ id: 61224, word: "suffering" },
{ id: 61225, word: "suffice" },
{ id: 61226, word: "suffix" },
{ id: 61231, word: "suffocate" },
{ id: 61232, word: "suffrage" },
{ id: 61233, word: "sugar" },
{ id: 61234, word: "suggest" },
{ id: 61235, word: "suing" },
{ id: 61236, word: "suitable" },
{ id: 61241, word: "suitably" },
{ id: 61242, word: "suitcase" },
{ id: 61243, word: "suitor" },
{ id: 61244, word: "sulfate" },
{ id: 61245, word: "sulfide" },
{ id: 61246, word: "sulfite" },
{ id: 61251, word: "sulfur" },
{ id: 61252, word: "sulk" },
{ id: 61253, word: "sullen" },
{ id: 61254, word: "sulphate" },
{ id: 61255, word: "sulphuric" },
{ id: 61256, word: "sultry" },
{ id: 61261, word: "superbowl" },
{ id: 61262, word: "superglue" },
{ id: 61263, word: "superhero" },
{ id: 61264, word: "superior" },
{ id: 61265, word: "superjet" },
{ id: 61266, word: "superman" },
{ id: 61311, word: "supermom" },
{ id: 61312, word: "supernova" },
{ id: 61313, word: "supervise" },
{ id: 61314, word: "supper" },
{ id: 61315, word: "supplier" },
{ id: 61316, word: "supply" },
{ id: 61321, word: "support" },
{ id: 61322, word: "supremacy" },
{ id: 61323, word: "supreme" },
{ id: 61324, word: "surcharge" },
{ id: 61325, word: "surely" },
{ id: 61326, word: "sureness" },
{ id: 61331, word: "surface" },
{ id: 61332, word: "surfacing" },
{ id: 61333, word: "surfboard" },
{ id: 61334, word: "surfer" },
{ id: 61335, word: "surgery" },
{ id: 61336, word: "surgical" },
{ id: 61341, word: "surging" },
{ id: 61342, word: "surname" },
{ id: 61343, word: "surpass" },
{ id: 61344, word: "surplus" },
{ id: 61345, word: "surprise" },
{ id: 61346, word: "surreal" },
{ id: 61351, word: "surrender" },
{ id: 61352, word: "surrogate" },
{ id: 61353, word: "surround" },
{ id: 61354, word: "survey" },
{ id: 61355, word: "survival" },
{ id: 61356, word: "survive" },
{ id: 61361, word: "surviving" },
{ id: 61362, word: "survivor" },
{ id: 61363, word: "sushi" },
{ id: 61364, word: "suspect" },
{ id: 61365, word: "suspend" },
{ id: 61366, word: "suspense" },
{ id: 61411, word: "sustained" },
{ id: 61412, word: "sustainer" },
{ id: 61413, word: "swab" },
{ id: 61414, word: "swaddling" },
{ id: 61415, word: "swagger" },
{ id: 61416, word: "swampland" },
{ id: 61421, word: "swan" },
{ id: 61422, word: "swapping" },
{ id: 61423, word: "swarm" },
{ id: 61424, word: "sway" },
{ id: 61425, word: "swear" },
{ id: 61426, word: "sweat" },
{ id: 61431, word: "sweep" },
{ id: 61432, word: "swell" },
{ id: 61433, word: "swept" },
{ id: 61434, word: "swerve" },
{ id: 61435, word: "swifter" },
{ id: 61436, word: "swiftly" },
{ id: 61441, word: "swiftness" },
{ id: 61442, word: "swimmable" },
{ id: 61443, word: "swimmer" },
{ id: 61444, word: "swimming" },
{ id: 61445, word: "swimsuit" },
{ id: 61446, word: "swimwear" },
{ id: 61451, word: "swinger" },
{ id: 61452, word: "swinging" },
{ id: 61453, word: "swipe" },
{ id: 61454, word: "swirl" },
{ id: 61455, word: "switch" },
{ id: 61456, word: "swivel" },
{ id: 61461, word: "swizzle" },
{ id: 61462, word: "swooned" },
{ id: 61463, word: "swoop" },
{ id: 61464, word: "swoosh" },
{ id: 61465, word: "swore" },
{ id: 61466, word: "sworn" },
{ id: 61511, word: "swung" },
{ id: 61512, word: "sycamore" },
{ id: 61513, word: "sympathy" },
{ id: 61514, word: "symphonic" },
{ id: 61515, word: "symphony" },
{ id: 61516, word: "symptom" },
{ id: 61521, word: "synapse" },
{ id: 61522, word: "syndrome" },
{ id: 61523, word: "synergy" },
{ id: 61524, word: "synopses" },
{ id: 61525, word: "synopsis" },
{ id: 61526, word: "synthesis" },
{ id: 61531, word: "synthetic" },
{ id: 61532, word: "syrup" },
{ id: 61533, word: "system" },
{ id: 61534, word: "t-shirt" },
{ id: 61535, word: "tabasco" },
{ id: 61536, word: "tabby" },
{ id: 61541, word: "tableful" },
{ id: 61542, word: "tables" },
{ id: 61543, word: "tablet" },
{ id: 61544, word: "tableware" },
{ id: 61545, word: "tabloid" },
{ id: 61546, word: "tackiness" },
{ id: 61551, word: "tacking" },
{ id: 61552, word: "tackle" },
{ id: 61553, word: "tackling" },
{ id: 61554, word: "tacky" },
{ id: 61555, word: "taco" },
{ id: 61556, word: "tactful" },
{ id: 61561, word: "tactical" },
{ id: 61562, word: "tactics" },
{ id: 61563, word: "tactile" },
{ id: 61564, word: "tactless" },
{ id: 61565, word: "tadpole" },
{ id: 61566, word: "taekwondo" },
{ id: 61611, word: "tag" },
{ id: 61612, word: "tainted" },
{ id: 61613, word: "take" },
{ id: 61614, word: "taking" },
{ id: 61615, word: "talcum" },
{ id: 61616, word: "talisman" },
{ id: 61621, word: "tall" },
{ id: 61622, word: "talon" },
{ id: 61623, word: "tamale" },
{ id: 61624, word: "tameness" },
{ id: 61625, word: "tamer" },
{ id: 61626, word: "tamper" },
{ id: 61631, word: "tank" },
{ id: 61632, word: "tanned" },
{ id: 61633, word: "tannery" },
{ id: 61634, word: "tanning" },
{ id: 61635, word: "tantrum" },
{ id: 61636, word: "tapeless" },
{ id: 61641, word: "tapered" },
{ id: 61642, word: "tapering" },
{ id: 61643, word: "tapestry" },
{ id: 61644, word: "tapioca" },
{ id: 61645, word: "tapping" },
{ id: 61646, word: "taps" },
{ id: 61651, word: "tarantula" },
{ id: 61652, word: "target" },
{ id: 61653, word: "tarmac" },
{ id: 61654, word: "tarnish" },
{ id: 61655, word: "tarot" },
{ id: 61656, word: "tartar" },
{ id: 61661, word: "tartly" },
{ id: 61662, word: "tartness" },
{ id: 61663, word: "task" },
{ id: 61664, word: "tassel" },
{ id: 61665, word: "taste" },
{ id: 61666, word: "tastiness" },
{ id: 62111, word: "tasting" },
{ id: 62112, word: "tasty" },
{ id: 62113, word: "tattered" },
{ id: 62114, word: "tattle" },
{ id: 62115, word: "tattling" },
{ id: 62116, word: "tattoo" },
{ id: 62121, word: "taunt" },
{ id: 62122, word: "tavern" },
{ id: 62123, word: "thank" },
{ id: 62124, word: "that" },
{ id: 62125, word: "thaw" },
{ id: 62126, word: "theater" },
{ id: 62131, word: "theatrics" },
{ id: 62132, word: "thee" },
{ id: 62133, word: "theft" },
{ id: 62134, word: "theme" },
{ id: 62135, word: "theology" },
{ id: 62136, word: "theorize" },
{ id: 62141, word: "thermal" },
{ id: 62142, word: "thermos" },
{ id: 62143, word: "thesaurus" },
{ id: 62144, word: "these" },
{ id: 62145, word: "thesis" },
{ id: 62146, word: "thespian" },
{ id: 62151, word: "thicken" },
{ id: 62152, word: "thicket" },
{ id: 62153, word: "thickness" },
{ id: 62154, word: "thieving" },
{ id: 62155, word: "thievish" },
{ id: 62156, word: "thigh" },
{ id: 62161, word: "thimble" },
{ id: 62162, word: "thing" },
{ id: 62163, word: "think" },
{ id: 62164, word: "thinly" },
{ id: 62165, word: "thinner" },
{ id: 62166, word: "thinness" },
{ id: 62211, word: "thinning" },
{ id: 62212, word: "thirstily" },
{ id: 62213, word: "thirsting" },
{ id: 62214, word: "thirsty" },
{ id: 62215, word: "thirteen" },
{ id: 62216, word: "thirty" },
{ id: 62221, word: "thong" },
{ id: 62222, word: "thorn" },
{ id: 62223, word: "those" },
{ id: 62224, word: "thousand" },
{ id: 62225, word: "thrash" },
{ id: 62226, word: "thread" },
{ id: 62231, word: "threaten" },
{ id: 62232, word: "threefold" },
{ id: 62233, word: "thrift" },
{ id: 62234, word: "thrill" },
{ id: 62235, word: "thrive" },
{ id: 62236, word: "thriving" },
{ id: 62241, word: "throat" },
{ id: 62242, word: "throbbing" },
{ id: 62243, word: "throng" },
{ id: 62244, word: "throttle" },
{ id: 62245, word: "throwaway" },
{ id: 62246, word: "throwback" },
{ id: 62251, word: "thrower" },
{ id: 62252, word: "throwing" },
{ id: 62253, word: "thud" },
{ id: 62254, word: "thumb" },
{ id: 62255, word: "thumping" },
{ id: 62256, word: "thursday" },
{ id: 62261, word: "thus" },
{ id: 62262, word: "thwarting" },
{ id: 62263, word: "thyself" },
{ id: 62264, word: "tiara" },
{ id: 62265, word: "tibia" },
{ id: 62266, word: "tidal" },
{ id: 62311, word: "tidbit" },
{ id: 62312, word: "tidiness" },
{ id: 62313, word: "tidings" },
{ id: 62314, word: "tidy" },
{ id: 62315, word: "tiger" },
{ id: 62316, word: "tighten" },
{ id: 62321, word: "tightly" },
{ id: 62322, word: "tightness" },
{ id: 62323, word: "tightrope" },
{ id: 62324, word: "tightwad" },
{ id: 62325, word: "tigress" },
{ id: 62326, word: "tile" },
{ id: 62331, word: "tiling" },
{ id: 62332, word: "till" },
{ id: 62333, word: "tilt" },
{ id: 62334, word: "timid" },
{ id: 62335, word: "timing" },
{ id: 62336, word: "timothy" },
{ id: 62341, word: "tinderbox" },
{ id: 62342, word: "tinfoil" },
{ id: 62343, word: "tingle" },
{ id: 62344, word: "tingling" },
{ id: 62345, word: "tingly" },
{ id: 62346, word: "tinker" },
{ id: 62351, word: "tinkling" },
{ id: 62352, word: "tinsel" },
{ id: 62353, word: "tinsmith" },
{ id: 62354, word: "tint" },
{ id: 62355, word: "tinwork" },
{ id: 62356, word: "tiny" },
{ id: 62361, word: "tipoff" },
{ id: 62362, word: "tipped" },
{ id: 62363, word: "tipper" },
{ id: 62364, word: "tipping" },
{ id: 62365, word: "tiptoeing" },
{ id: 62366, word: "tiptop" },
{ id: 62411, word: "tiring" },
{ id: 62412, word: "tissue" },
{ id: 62413, word: "trace" },
{ id: 62414, word: "tracing" },
{ id: 62415, word: "track" },
{ id: 62416, word: "traction" },
{ id: 62421, word: "tractor" },
{ id: 62422, word: "trade" },
{ id: 62423, word: "trading" },
{ id: 62424, word: "tradition" },
{ id: 62425, word: "traffic" },
{ id: 62426, word: "tragedy" },
{ id: 62431, word: "trailing" },
{ id: 62432, word: "trailside" },
{ id: 62433, word: "train" },
{ id: 62434, word: "traitor" },
{ id: 62435, word: "trance" },
{ id: 62436, word: "tranquil" },
{ id: 62441, word: "transfer" },
{ id: 62442, word: "transform" },
{ id: 62443, word: "translate" },
{ id: 62444, word: "transpire" },
{ id: 62445, word: "transport" },
{ id: 62446, word: "transpose" },
{ id: 62451, word: "trapdoor" },
{ id: 62452, word: "trapeze" },
{ id: 62453, word: "trapezoid" },
{ id: 62454, word: "trapped" },
{ id: 62455, word: "trapper" },
{ id: 62456, word: "trapping" },
{ id: 62461, word: "traps" },
{ id: 62462, word: "trash" },
{ id: 62463, word: "travel" },
{ id: 62464, word: "traverse" },
{ id: 62465, word: "travesty" },
{ id: 62466, word: "tray" },
{ id: 62511, word: "treachery" },
{ id: 62512, word: "treading" },
{ id: 62513, word: "treadmill" },
{ id: 62514, word: "treason" },
{ id: 62515, word: "treat" },
{ id: 62516, word: "treble" },
{ id: 62521, word: "tree" },
{ id: 62522, word: "trekker" },
{ id: 62523, word: "tremble" },
{ id: 62524, word: "trembling" },
{ id: 62525, word: "tremor" },
{ id: 62526, word: "trench" },
{ id: 62531, word: "trend" },
{ id: 62532, word: "trespass" },
{ id: 62533, word: "triage" },
{ id: 62534, word: "trial" },
{ id: 62535, word: "triangle" },
{ id: 62536, word: "tribesman" },
{ id: 62541, word: "tribunal" },
{ id: 62542, word: "tribune" },
{ id: 62543, word: "tributary" },
{ id: 62544, word: "tribute" },
{ id: 62545, word: "triceps" },
{ id: 62546, word: "trickery" },
{ id: 62551, word: "trickily" },
{ id: 62552, word: "tricking" },
{ id: 62553, word: "trickle" },
{ id: 62554, word: "trickster" },
{ id: 62555, word: "tricky" },
{ id: 62556, word: "tricolor" },
{ id: 62561, word: "tricycle" },
{ id: 62562, word: "trident" },
{ id: 62563, word: "tried" },
{ id: 62564, word: "trifle" },
{ id: 62565, word: "trifocals" },
{ id: 62566, word: "trillion" },
{ id: 62611, word: "trilogy" },
{ id: 62612, word: "trimester" },
{ id: 62613, word: "trimmer" },
{ id: 62614, word: "trimming" },
{ id: 62615, word: "trimness" },
{ id: 62616, word: "trinity" },
{ id: 62621, word: "trio" },
{ id: 62622, word: "tripod" },
{ id: 62623, word: "tripping" },
{ id: 62624, word: "triumph" },
{ id: 62625, word: "trivial" },
{ id: 62626, word: "trodden" },
{ id: 62631, word: "trolling" },
{ id: 62632, word: "trombone" },
{ id: 62633, word: "trophy" },
{ id: 62634, word: "tropical" },
{ id: 62635, word: "tropics" },
{ id: 62636, word: "trouble" },
{ id: 62641, word: "troubling" },
{ id: 62642, word: "trough" },
{ id: 62643, word: "trousers" },
{ id: 62644, word: "trout" },
{ id: 62645, word: "trowel" },
{ id: 62646, word: "truce" },
{ id: 62651, word: "truck" },
{ id: 62652, word: "truffle" },
{ id: 62653, word: "trump" },
{ id: 62654, word: "trunks" },
{ id: 62655, word: "trustable" },
{ id: 62656, word: "trustee" },
{ id: 62661, word: "trustful" },
{ id: 62662, word: "trusting" },
{ id: 62663, word: "trustless" },
{ id: 62664, word: "truth" },
{ id: 62665, word: "try" },
{ id: 62666, word: "tubby" },
{ id: 63111, word: "tubeless" },
{ id: 63112, word: "tubular" },
{ id: 63113, word: "tucking" },
{ id: 63114, word: "tuesday" },
{ id: 63115, word: "tug" },
{ id: 63116, word: "tuition" },
{ id: 63121, word: "tulip" },
{ id: 63122, word: "tumble" },
{ id: 63123, word: "tumbling" },
{ id: 63124, word: "tummy" },
{ id: 63125, word: "turban" },
{ id: 63126, word: "turbine" },
{ id: 63131, word: "turbofan" },
{ id: 63132, word: "turbojet" },
{ id: 63133, word: "turbulent" },
{ id: 63134, word: "turf" },
{ id: 63135, word: "turkey" },
{ id: 63136, word: "turmoil" },
{ id: 63141, word: "turret" },
{ id: 63142, word: "turtle" },
{ id: 63143, word: "tusk" },
{ id: 63144, word: "tutor" },
{ id: 63145, word: "tutu" },
{ id: 63146, word: "tux" },
{ id: 63151, word: "tweak" },
{ id: 63152, word: "tweed" },
{ id: 63153, word: "tweet" },
{ id: 63154, word: "tweezers" },
{ id: 63155, word: "twelve" },
{ id: 63156, word: "twentieth" },
{ id: 63161, word: "twenty" },
{ id: 63162, word: "twerp" },
{ id: 63163, word: "twice" },
{ id: 63164, word: "twiddle" },
{ id: 63165, word: "twiddling" },
{ id: 63166, word: "twig" },
{ id: 63211, word: "twilight" },
{ id: 63212, word: "twine" },
{ id: 63213, word: "twins" },
{ id: 63214, word: "twirl" },
{ id: 63215, word: "twistable" },
{ id: 63216, word: "twisted" },
{ id: 63221, word: "twister" },
{ id: 63222, word: "twisting" },
{ id: 63223, word: "twisty" },
{ id: 63224, word: "twitch" },
{ id: 63225, word: "twitter" },
{ id: 63226, word: "tycoon" },
{ id: 63231, word: "tying" },
{ id: 63232, word: "tyke" },
{ id: 63233, word: "udder" },
{ id: 63234, word: "ultimate" },
{ id: 63235, word: "ultimatum" },
{ id: 63236, word: "ultra" },
{ id: 63241, word: "umbilical" },
{ id: 63242, word: "umbrella" },
{ id: 63243, word: "umpire" },
{ id: 63244, word: "unabashed" },
{ id: 63245, word: "unable" },
{ id: 63246, word: "unadorned" },
{ id: 63251, word: "unadvised" },
{ id: 63252, word: "unafraid" },
{ id: 63253, word: "unaired" },
{ id: 63254, word: "unaligned" },
{ id: 63255, word: "unaltered" },
{ id: 63256, word: "unarmored" },
{ id: 63261, word: "unashamed" },
{ id: 63262, word: "unaudited" },
{ id: 63263, word: "unawake" },
{ id: 63264, word: "unaware" },
{ id: 63265, word: "unbaked" },
{ id: 63266, word: "unbalance" },
{ id: 63311, word: "unbeaten" },
{ id: 63312, word: "unbend" },
{ id: 63313, word: "unbent" },
{ id: 63314, word: "unbiased" },
{ id: 63315, word: "unbitten" },
{ id: 63316, word: "unblended" },
{ id: 63321, word: "unblessed" },
{ id: 63322, word: "unblock" },
{ id: 63323, word: "unbolted" },
{ id: 63324, word: "unbounded" },
{ id: 63325, word: "unboxed" },
{ id: 63326, word: "unbraided" },
{ id: 63331, word: "unbridle" },
{ id: 63332, word: "unbroken" },
{ id: 63333, word: "unbuckled" },
{ id: 63334, word: "unbundle" },
{ id: 63335, word: "unburned" },
{ id: 63336, word: "unbutton" },
{ id: 63341, word: "uncanny" },
{ id: 63342, word: "uncapped" },
{ id: 63343, word: "uncaring" },
{ id: 63344, word: "uncertain" },
{ id: 63345, word: "unchain" },
{ id: 63346, word: "unchanged" },
{ id: 63351, word: "uncharted" },
{ id: 63352, word: "uncheck" },
{ id: 63353, word: "uncivil" },
{ id: 63354, word: "unclad" },
{ id: 63355, word: "unclaimed" },
{ id: 63356, word: "unclamped" },
{ id: 63361, word: "unclasp" },
{ id: 63362, word: "uncle" },
{ id: 63363, word: "unclip" },
{ id: 63364, word: "uncloak" },
{ id: 63365, word: "unclog" },
{ id: 63366, word: "unclothed" },
{ id: 63411, word: "uncoated" },
{ id: 63412, word: "uncoiled" },
{ id: 63413, word: "uncolored" },
{ id: 63414, word: "uncombed" },
{ id: 63415, word: "uncommon" },
{ id: 63416, word: "uncooked" },
{ id: 63421, word: "uncork" },
{ id: 63422, word: "uncorrupt" },
{ id: 63423, word: "uncounted" },
{ id: 63424, word: "uncouple" },
{ id: 63425, word: "uncouth" },
{ id: 63426, word: "uncover" },
{ id: 63431, word: "uncross" },
{ id: 63432, word: "uncrown" },
{ id: 63433, word: "uncrushed" },
{ id: 63434, word: "uncured" },
{ id: 63435, word: "uncurious" },
{ id: 63436, word: "uncurled" },
{ id: 63441, word: "uncut" },
{ id: 63442, word: "undamaged" },
{ id: 63443, word: "undated" },
{ id: 63444, word: "undaunted" },
{ id: 63445, word: "undead" },
{ id: 63446, word: "undecided" },
{ id: 63451, word: "undefined" },
{ id: 63452, word: "underage" },
{ id: 63453, word: "underarm" },
{ id: 63454, word: "undercoat" },
{ id: 63455, word: "undercook" },
{ id: 63456, word: "undercut" },
{ id: 63461, word: "underdog" },
{ id: 63462, word: "underdone" },
{ id: 63463, word: "underfed" },
{ id: 63464, word: "underfeed" },
{ id: 63465, word: "underfoot" },
{ id: 63466, word: "undergo" },
{ id: 63511, word: "undergrad" },
{ id: 63512, word: "underhand" },
{ id: 63513, word: "underline" },
{ id: 63514, word: "underling" },
{ id: 63515, word: "undermine" },
{ id: 63516, word: "undermost" },
{ id: 63521, word: "underpaid" },
{ id: 63522, word: "underpass" },
{ id: 63523, word: "underpay" },
{ id: 63524, word: "underrate" },
{ id: 63525, word: "undertake" },
{ id: 63526, word: "undertone" },
{ id: 63531, word: "undertook" },
{ id: 63532, word: "undertow" },
{ id: 63533, word: "underuse" },
{ id: 63534, word: "underwear" },
{ id: 63535, word: "underwent" },
{ id: 63536, word: "underwire" },
{ id: 63541, word: "undesired" },
{ id: 63542, word: "undiluted" },
{ id: 63543, word: "undivided" },
{ id: 63544, word: "undocked" },
{ id: 63545, word: "undoing" },
{ id: 63546, word: "undone" },
{ id: 63551, word: "undrafted" },
{ id: 63552, word: "undress" },
{ id: 63553, word: "undrilled" },
{ id: 63554, word: "undusted" },
{ id: 63555, word: "undying" },
{ id: 63556, word: "unearned" },
{ id: 63561, word: "unearth" },
{ id: 63562, word: "unease" },
{ id: 63563, word: "uneasily" },
{ id: 63564, word: "uneasy" },
{ id: 63565, word: "uneatable" },
{ id: 63566, word: "uneaten" },
{ id: 63611, word: "unedited" },
{ id: 63612, word: "unelected" },
{ id: 63613, word: "unending" },
{ id: 63614, word: "unengaged" },
{ id: 63615, word: "unenvied" },
{ id: 63616, word: "unequal" },
{ id: 63621, word: "unethical" },
{ id: 63622, word: "uneven" },
{ id: 63623, word: "unexpired" },
{ id: 63624, word: "unexposed" },
{ id: 63625, word: "unfailing" },
{ id: 63626, word: "unfair" },
{ id: 63631, word: "unfasten" },
{ id: 63632, word: "unfazed" },
{ id: 63633, word: "unfeeling" },
{ id: 63634, word: "unfiled" },
{ id: 63635, word: "unfilled" },
{ id: 63636, word: "unfitted" },
{ id: 63641, word: "unfitting" },
{ id: 63642, word: "unfixable" },
{ id: 63643, word: "unfixed" },
{ id: 63644, word: "unflawed" },
{ id: 63645, word: "unfocused" },
{ id: 63646, word: "unfold" },
{ id: 63651, word: "unfounded" },
{ id: 63652, word: "unframed" },
{ id: 63653, word: "unfreeze" },
{ id: 63654, word: "unfrosted" },
{ id: 63655, word: "unfrozen" },
{ id: 63656, word: "unfunded" },
{ id: 63661, word: "unglazed" },
{ id: 63662, word: "ungloved" },
{ id: 63663, word: "unglue" },
{ id: 63664, word: "ungodly" },
{ id: 63665, word: "ungraded" },
{ id: 63666, word: "ungreased" },
{ id: 64111, word: "unguarded" },
{ id: 64112, word: "unguided" },
{ id: 64113, word: "unhappily" },
{ id: 64114, word: "unhappy" },
{ id: 64115, word: "unharmed" },
{ id: 64116, word: "unhealthy" },
{ id: 64121, word: "unheard" },
{ id: 64122, word: "unhearing" },
{ id: 64123, word: "unheated" },
{ id: 64124, word: "unhelpful" },
{ id: 64125, word: "unhidden" },
{ id: 64126, word: "unhinge" },
{ id: 64131, word: "unhitched" },
{ id: 64132, word: "unholy" },
{ id: 64133, word: "unhook" },
{ id: 64134, word: "unicorn" },
{ id: 64135, word: "unicycle" },
{ id: 64136, word: "unified" },
{ id: 64141, word: "unifier" },
{ id: 64142, word: "uniformed" },
{ id: 64143, word: "uniformly" },
{ id: 64144, word: "unify" },
{ id: 64145, word: "unimpeded" },
{ id: 64146, word: "uninjured" },
{ id: 64151, word: "uninstall" },
{ id: 64152, word: "uninsured" },
{ id: 64153, word: "uninvited" },
{ id: 64154, word: "union" },
{ id: 64155, word: "uniquely" },
{ id: 64156, word: "unisexual" },
{ id: 64161, word: "unison" },
{ id: 64162, word: "unissued" },
{ id: 64163, word: "unit" },
{ id: 64164, word: "universal" },
{ id: 64165, word: "universe" },
{ id: 64166, word: "unjustly" },
{ id: 64211, word: "unkempt" },
{ id: 64212, word: "unkind" },
{ id: 64213, word: "unknotted" },
{ id: 64214, word: "unknowing" },
{ id: 64215, word: "unknown" },
{ id: 64216, word: "unlaced" },
{ id: 64221, word: "unlatch" },
{ id: 64222, word: "unlawful" },
{ id: 64223, word: "unleaded" },
{ id: 64224, word: "unlearned" },
{ id: 64225, word: "unleash" },
{ id: 64226, word: "unless" },
{ id: 64231, word: "unleveled" },
{ id: 64232, word: "unlighted" },
{ id: 64233, word: "unlikable" },
{ id: 64234, word: "unlimited" },
{ id: 64235, word: "unlined" },
{ id: 64236, word: "unlinked" },
{ id: 64241, word: "unlisted" },
{ id: 64242, word: "unlit" },
{ id: 64243, word: "unlivable" },
{ id: 64244, word: "unloaded" },
{ id: 64245, word: "unloader" },
{ id: 64246, word: "unlocked" },
{ id: 64251, word: "unlocking" },
{ id: 64252, word: "unlovable" },
{ id: 64253, word: "unloved" },
{ id: 64254, word: "unlovely" },
{ id: 64255, word: "unloving" },
{ id: 64256, word: "unluckily" },
{ id: 64261, word: "unlucky" },
{ id: 64262, word: "unmade" },
{ id: 64263, word: "unmanaged" },
{ id: 64264, word: "unmanned" },
{ id: 64265, word: "unmapped" },
{ id: 64266, word: "unmarked" },
{ id: 64311, word: "unmasked" },
{ id: 64312, word: "unmasking" },
{ id: 64313, word: "unmatched" },
{ id: 64314, word: "unmindful" },
{ id: 64315, word: "unmixable" },
{ id: 64316, word: "unmixed" },
{ id: 64321, word: "unmolded" },
{ id: 64322, word: "unmoral" },
{ id: 64323, word: "unmovable" },
{ id: 64324, word: "unmoved" },
{ id: 64325, word: "unmoving" },
{ id: 64326, word: "unnamable" },
{ id: 64331, word: "unnamed" },
{ id: 64332, word: "unnatural" },
{ id: 64333, word: "unneeded" },
{ id: 64334, word: "unnerve" },
{ id: 64335, word: "unnerving" },
{ id: 64336, word: "unnoticed" },
{ id: 64341, word: "unopened" },
{ id: 64342, word: "unopposed" },
{ id: 64343, word: "unpack" },
{ id: 64344, word: "unpadded" },
{ id: 64345, word: "unpaid" },
{ id: 64346, word: "unpainted" },
{ id: 64351, word: "unpaired" },
{ id: 64352, word: "unpaved" },
{ id: 64353, word: "unpeeled" },
{ id: 64354, word: "unpicked" },
{ id: 64355, word: "unpiloted" },
{ id: 64356, word: "unpinned" },
{ id: 64361, word: "unplanned" },
{ id: 64362, word: "unplanted" },
{ id: 64363, word: "unpleased" },
{ id: 64364, word: "unpledged" },
{ id: 64365, word: "unplowed" },
{ id: 64366, word: "unplug" },
{ id: 64411, word: "unpopular" },
{ id: 64412, word: "unproven" },
{ id: 64413, word: "unquote" },
{ id: 64414, word: "unranked" },
{ id: 64415, word: "unrated" },
{ id: 64416, word: "unraveled" },
{ id: 64421, word: "unreached" },
{ id: 64422, word: "unread" },
{ id: 64423, word: "unreal" },
{ id: 64424, word: "unreeling" },
{ id: 64425, word: "unrefined" },
{ id: 64426, word: "unrelated" },
{ id: 64431, word: "unrented" },
{ id: 64432, word: "unrest" },
{ id: 64433, word: "unretired" },
{ id: 64434, word: "unrevised" },
{ id: 64435, word: "unrigged" },
{ id: 64436, word: "unripe" },
{ id: 64441, word: "unrivaled" },
{ id: 64442, word: "unroasted" },
{ id: 64443, word: "unrobed" },
{ id: 64444, word: "unroll" },
{ id: 64445, word: "unruffled" },
{ id: 64446, word: "unruly" },
{ id: 64451, word: "unrushed" },
{ id: 64452, word: "unsaddle" },
{ id: 64453, word: "unsafe" },
{ id: 64454, word: "unsaid" },
{ id: 64455, word: "unsalted" },
{ id: 64456, word: "unsaved" },
{ id: 64461, word: "unsavory" },
{ id: 64462, word: "unscathed" },
{ id: 64463, word: "unscented" },
{ id: 64464, word: "unscrew" },
{ id: 64465, word: "unsealed" },
{ id: 64466, word: "unseated" },
{ id: 64511, word: "unsecured" },
{ id: 64512, word: "unseeing" },
{ id: 64513, word: "unseemly" },
{ id: 64514, word: "unseen" },
{ id: 64515, word: "unselect" },
{ id: 64516, word: "unselfish" },
{ id: 64521, word: "unsent" },
{ id: 64522, word: "unsettled" },
{ id: 64523, word: "unshackle" },
{ id: 64524, word: "unshaken" },
{ id: 64525, word: "unshaved" },
{ id: 64526, word: "unshaven" },
{ id: 64531, word: "unsheathe" },
{ id: 64532, word: "unshipped" },
{ id: 64533, word: "unsightly" },
{ id: 64534, word: "unsigned" },
{ id: 64535, word: "unskilled" },
{ id: 64536, word: "unsliced" },
{ id: 64541, word: "unsmooth" },
{ id: 64542, word: "unsnap" },
{ id: 64543, word: "unsocial" },
{ id: 64544, word: "unsoiled" },
{ id: 64545, word: "unsold" },
{ id: 64546, word: "unsolved" },
{ id: 64551, word: "unsorted" },
{ id: 64552, word: "unspoiled" },
{ id: 64553, word: "unspoken" },
{ id: 64554, word: "unstable" },
{ id: 64555, word: "unstaffed" },
{ id: 64556, word: "unstamped" },
{ id: 64561, word: "unsteady" },
{ id: 64562, word: "unsterile" },
{ id: 64563, word: "unstirred" },
{ id: 64564, word: "unstitch" },
{ id: 64565, word: "unstopped" },
{ id: 64566, word: "unstuck" },
{ id: 64611, word: "unstuffed" },
{ id: 64612, word: "unstylish" },
{ id: 64613, word: "unsubtle" },
{ id: 64614, word: "unsubtly" },
{ id: 64615, word: "unsuited" },
{ id: 64616, word: "unsure" },
{ id: 64621, word: "unsworn" },
{ id: 64622, word: "untagged" },
{ id: 64623, word: "untainted" },
{ id: 64624, word: "untaken" },
{ id: 64625, word: "untamed" },
{ id: 64626, word: "untangled" },
{ id: 64631, word: "untapped" },
{ id: 64632, word: "untaxed" },
{ id: 64633, word: "unthawed" },
{ id: 64634, word: "unthread" },
{ id: 64635, word: "untidy" },
{ id: 64636, word: "untie" },
{ id: 64641, word: "until" },
{ id: 64642, word: "untimed" },
{ id: 64643, word: "untimely" },
{ id: 64644, word: "untitled" },
{ id: 64645, word: "untoasted" },
{ id: 64646, word: "untold" },
{ id: 64651, word: "untouched" },
{ id: 64652, word: "untracked" },
{ id: 64653, word: "untrained" },
{ id: 64654, word: "untreated" },
{ id: 64655, word: "untried" },
{ id: 64656, word: "untrimmed" },
{ id: 64661, word: "untrue" },
{ id: 64662, word: "untruth" },
{ id: 64663, word: "unturned" },
{ id: 64664, word: "untwist" },
{ id: 64665, word: "untying" },
{ id: 64666, word: "unusable" },
{ id: 65111, word: "unused" },
{ id: 65112, word: "unusual" },
{ id: 65113, word: "unvalued" },
{ id: 65114, word: "unvaried" },
{ id: 65115, word: "unvarying" },
{ id: 65116, word: "unveiled" },
{ id: 65121, word: "unveiling" },
{ id: 65122, word: "unvented" },
{ id: 65123, word: "unviable" },
{ id: 65124, word: "unvisited" },
{ id: 65125, word: "unvocal" },
{ id: 65126, word: "unwanted" },
{ id: 65131, word: "unwarlike" },
{ id: 65132, word: "unwary" },
{ id: 65133, word: "unwashed" },
{ id: 65134, word: "unwatched" },
{ id: 65135, word: "unweave" },
{ id: 65136, word: "unwed" },
{ id: 65141, word: "unwelcome" },
{ id: 65142, word: "unwell" },
{ id: 65143, word: "unwieldy" },
{ id: 65144, word: "unwilling" },
{ id: 65145, word: "unwind" },
{ id: 65146, word: "unwired" },
{ id: 65151, word: "unwitting" },
{ id: 65152, word: "unwomanly" },
{ id: 65153, word: "unworldly" },
{ id: 65154, word: "unworn" },
{ id: 65155, word: "unworried" },
{ id: 65156, word: "unworthy" },
{ id: 65161, word: "unwound" },
{ id: 65162, word: "unwoven" },
{ id: 65163, word: "unwrapped" },
{ id: 65164, word: "unwritten" },
{ id: 65165, word: "unzip" },
{ id: 65166, word: "upbeat" },
{ id: 65211, word: "upchuck" },
{ id: 65212, word: "upcoming" },
{ id: 65213, word: "upcountry" },
{ id: 65214, word: "update" },
{ id: 65215, word: "upfront" },
{ id: 65216, word: "upgrade" },
{ id: 65221, word: "upheaval" },
{ id: 65222, word: "upheld" },
{ id: 65223, word: "uphill" },
{ id: 65224, word: "uphold" },
{ id: 65225, word: "uplifted" },
{ id: 65226, word: "uplifting" },
{ id: 65231, word: "upload" },
{ id: 65232, word: "upon" },
{ id: 65233, word: "upper" },
{ id: 65234, word: "upright" },
{ id: 65235, word: "uprising" },
{ id: 65236, word: "upriver" },
{ id: 65241, word: "uproar" },
{ id: 65242, word: "uproot" },
{ id: 65243, word: "upscale" },
{ id: 65244, word: "upside" },
{ id: 65245, word: "upstage" },
{ id: 65246, word: "upstairs" },
{ id: 65251, word: "upstart" },
{ id: 65252, word: "upstate" },
{ id: 65253, word: "upstream" },
{ id: 65254, word: "upstroke" },
{ id: 65255, word: "upswing" },
{ id: 65256, word: "uptake" },
{ id: 65261, word: "uptight" },
{ id: 65262, word: "uptown" },
{ id: 65263, word: "upturned" },
{ id: 65264, word: "upward" },
{ id: 65265, word: "upwind" },
{ id: 65266, word: "uranium" },
{ id: 65311, word: "urban" },
{ id: 65312, word: "urchin" },
{ id: 65313, word: "urethane" },
{ id: 65314, word: "urgency" },
{ id: 65315, word: "urgent" },
{ id: 65316, word: "urging" },
{ id: 65321, word: "urologist" },
{ id: 65322, word: "urology" },
{ id: 65323, word: "usable" },
{ id: 65324, word: "usage" },
{ id: 65325, word: "useable" },
{ id: 65326, word: "used" },
{ id: 65331, word: "uselessly" },
{ id: 65332, word: "user" },
{ id: 65333, word: "usher" },
{ id: 65334, word: "usual" },
{ id: 65335, word: "utensil" },
{ id: 65336, word: "utility" },
{ id: 65341, word: "utilize" },
{ id: 65342, word: "utmost" },
{ id: 65343, word: "utopia" },
{ id: 65344, word: "utter" },
{ id: 65345, word: "vacancy" },
{ id: 65346, word: "vacant" },
{ id: 65351, word: "vacate" },
{ id: 65352, word: "vacation" },
{ id: 65353, word: "vagabond" },
{ id: 65354, word: "vagrancy" },
{ id: 65355, word: "vagrantly" },
{ id: 65356, word: "vaguely" },
{ id: 65361, word: "vagueness" },
{ id: 65362, word: "valiant" },
{ id: 65363, word: "valid" },
{ id: 65364, word: "valium" },
{ id: 65365, word: "valley" },
{ id: 65366, word: "valuables" },
{ id: 65411, word: "value" },
{ id: 65412, word: "vanilla" },
{ id: 65413, word: "vanish" },
{ id: 65414, word: "vanity" },
{ id: 65415, word: "vanquish" },
{ id: 65416, word: "vantage" },
{ id: 65421, word: "vaporizer" },
{ id: 65422, word: "variable" },
{ id: 65423, word: "variably" },
{ id: 65424, word: "varied" },
{ id: 65425, word: "variety" },
{ id: 65426, word: "various" },
{ id: 65431, word: "varmint" },
{ id: 65432, word: "varnish" },
{ id: 65433, word: "varsity" },
{ id: 65434, word: "varying" },
{ id: 65435, word: "vascular" },
{ id: 65436, word: "vaseline" },
{ id: 65441, word: "vastly" },
{ id: 65442, word: "vastness" },
{ id: 65443, word: "veal" },
{ id: 65444, word: "vegan" },
{ id: 65445, word: "veggie" },
{ id: 65446, word: "vehicular" },
{ id: 65451, word: "velcro" },
{ id: 65452, word: "velocity" },
{ id: 65453, word: "velvet" },
{ id: 65454, word: "vendetta" },
{ id: 65455, word: "vending" },
{ id: 65456, word: "vendor" },
{ id: 65461, word: "veneering" },
{ id: 65462, word: "vengeful" },
{ id: 65463, word: "venomous" },
{ id: 65464, word: "ventricle" },
{ id: 65465, word: "venture" },
{ id: 65466, word: "venue" },
{ id: 65511, word: "venus" },
{ id: 65512, word: "verbalize" },
{ id: 65513, word: "verbally" },
{ id: 65514, word: "verbose" },
{ id: 65515, word: "verdict" },
{ id: 65516, word: "verify" },
{ id: 65521, word: "verse" },
{ id: 65522, word: "version" },
{ id: 65523, word: "versus" },
{ id: 65524, word: "vertebrae" },
{ id: 65525, word: "vertical" },
{ id: 65526, word: "vertigo" },
{ id: 65531, word: "very" },
{ id: 65532, word: "vessel" },
{ id: 65533, word: "vest" },
{ id: 65534, word: "veteran" },
{ id: 65535, word: "veto" },
{ id: 65536, word: "vexingly" },
{ id: 65541, word: "viability" },
{ id: 65542, word: "viable" },
{ id: 65543, word: "vibes" },
{ id: 65544, word: "vice" },
{ id: 65545, word: "vicinity" },
{ id: 65546, word: "victory" },
{ id: 65551, word: "video" },
{ id: 65552, word: "viewable" },
{ id: 65553, word: "viewer" },
{ id: 65554, word: "viewing" },
{ id: 65555, word: "viewless" },
{ id: 65556, word: "viewpoint" },
{ id: 65561, word: "vigorous" },
{ id: 65562, word: "village" },
{ id: 65563, word: "villain" },
{ id: 65564, word: "vindicate" },
{ id: 65565, word: "vineyard" },
{ id: 65566, word: "vintage" },
{ id: 65611, word: "violate" },
{ id: 65612, word: "violation" },
{ id: 65613, word: "violator" },
{ id: 65614, word: "violet" },
{ id: 65615, word: "violin" },
{ id: 65616, word: "viper" },
{ id: 65621, word: "viral" },
{ id: 65622, word: "virtual" },
{ id: 65623, word: "virtuous" },
{ id: 65624, word: "virus" },
{ id: 65625, word: "visa" },
{ id: 65626, word: "viscosity" },
{ id: 65631, word: "viscous" },
{ id: 65632, word: "viselike" },
{ id: 65633, word: "visible" },
{ id: 65634, word: "visibly" },
{ id: 65635, word: "vision" },
{ id: 65636, word: "visiting" },
{ id: 65641, word: "visitor" },
{ id: 65642, word: "visor" },
{ id: 65643, word: "vista" },
{ id: 65644, word: "vitality" },
{ id: 65645, word: "vitalize" },
{ id: 65646, word: "vitally" },
{ id: 65651, word: "vitamins" },
{ id: 65652, word: "vivacious" },
{ id: 65653, word: "vividly" },
{ id: 65654, word: "vividness" },
{ id: 65655, word: "vixen" },
{ id: 65656, word: "vocalist" },
{ id: 65661, word: "vocalize" },
{ id: 65662, word: "vocally" },
{ id: 65663, word: "vocation" },
{ id: 65664, word: "voice" },
{ id: 65665, word: "voicing" },
{ id: 65666, word: "void" },
{ id: 66111, word: "volatile" },
{ id: 66112, word: "volley" },
{ id: 66113, word: "voltage" },
{ id: 66114, word: "volumes" },
{ id: 66115, word: "voter" },
{ id: 66116, word: "voting" },
{ id: 66121, word: "voucher" },
{ id: 66122, word: "vowed" },
{ id: 66123, word: "vowel" },
{ id: 66124, word: "voyage" },
{ id: 66125, word: "wackiness" },
{ id: 66126, word: "wad" },
{ id: 66131, word: "wafer" },
{ id: 66132, word: "waffle" },
{ id: 66133, word: "waged" },
{ id: 66134, word: "wager" },
{ id: 66135, word: "wages" },
{ id: 66136, word: "waggle" },
{ id: 66141, word: "wagon" },
{ id: 66142, word: "wake" },
{ id: 66143, word: "waking" },
{ id: 66144, word: "walk" },
{ id: 66145, word: "walmart" },
{ id: 66146, word: "walnut" },
{ id: 66151, word: "walrus" },
{ id: 66152, word: "waltz" },
{ id: 66153, word: "wand" },
{ id: 66154, word: "wannabe" },
{ id: 66155, word: "wanted" },
{ id: 66156, word: "wanting" },
{ id: 66161, word: "wasabi" },
{ id: 66162, word: "washable" },
{ id: 66163, word: "washbasin" },
{ id: 66164, word: "washboard" },
{ id: 66165, word: "washbowl" },
{ id: 66166, word: "washcloth" },
{ id: 66211, word: "washday" },
{ id: 66212, word: "washed" },
{ id: 66213, word: "washer" },
{ id: 66214, word: "washhouse" },
{ id: 66215, word: "washing" },
{ id: 66216, word: "washout" },
{ id: 66221, word: "washroom" },
{ id: 66222, word: "washstand" },
{ id: 66223, word: "washtub" },
{ id: 66224, word: "wasp" },
{ id: 66225, word: "wasting" },
{ id: 66226, word: "watch" },
{ id: 66231, word: "water" },
{ id: 66232, word: "waviness" },
{ id: 66233, word: "waving" },
{ id: 66234, word: "wavy" },
{ id: 66235, word: "whacking" },
{ id: 66236, word: "whacky" },
{ id: 66241, word: "wham" },
{ id: 66242, word: "wharf" },
{ id: 66243, word: "wheat" },
{ id: 66244, word: "whenever" },
{ id: 66245, word: "whiff" },
{ id: 66246, word: "whimsical" },
{ id: 66251, word: "whinny" },
{ id: 66252, word: "whiny" },
{ id: 66253, word: "whisking" },
{ id: 66254, word: "whoever" },
{ id: 66255, word: "whole" },
{ id: 66256, word: "whomever" },
{ id: 66261, word: "whoopee" },
{ id: 66262, word: "whooping" },
{ id: 66263, word: "whoops" },
{ id: 66264, word: "why" },
{ id: 66265, word: "wick" },
{ id: 66266, word: "widely" },
{ id: 66311, word: "widen" },
{ id: 66312, word: "widget" },
{ id: 66313, word: "widow" },
{ id: 66314, word: "width" },
{ id: 66315, word: "wieldable" },
{ id: 66316, word: "wielder" },
{ id: 66321, word: "wife" },
{ id: 66322, word: "wifi" },
{ id: 66323, word: "wikipedia" },
{ id: 66324, word: "wildcard" },
{ id: 66325, word: "wildcat" },
{ id: 66326, word: "wilder" },
{ id: 66331, word: "wildfire" },
{ id: 66332, word: "wildfowl" },
{ id: 66333, word: "wildland" },
{ id: 66334, word: "wildlife" },
{ id: 66335, word: "wildly" },
{ id: 66336, word: "wildness" },
{ id: 66341, word: "willed" },
{ id: 66342, word: "willfully" },
{ id: 66343, word: "willing" },
{ id: 66344, word: "willow" },
{ id: 66345, word: "willpower" },
{ id: 66346, word: "wilt" },
{ id: 66351, word: "wimp" },
{ id: 66352, word: "wince" },
{ id: 66353, word: "wincing" },
{ id: 66354, word: "wind" },
{ id: 66355, word: "wing" },
{ id: 66356, word: "winking" },
{ id: 66361, word: "winner" },
{ id: 66362, word: "winnings" },
{ id: 66363, word: "winter" },
{ id: 66364, word: "wipe" },
{ id: 66365, word: "wired" },
{ id: 66366, word: "wireless" },
{ id: 66411, word: "wiring" },
{ id: 66412, word: "wiry" },
{ id: 66413, word: "wisdom" },
{ id: 66414, word: "wise" },
{ id: 66415, word: "wish" },
{ id: 66416, word: "wisplike" },
{ id: 66421, word: "wispy" },
{ id: 66422, word: "wistful" },
{ id: 66423, word: "wizard" },
{ id: 66424, word: "wobble" },
{ id: 66425, word: "wobbling" },
{ id: 66426, word: "wobbly" },
{ id: 66431, word: "wok" },
{ id: 66432, word: "wolf" },
{ id: 66433, word: "wolverine" },
{ id: 66434, word: "womanhood" },
{ id: 66435, word: "womankind" },
{ id: 66436, word: "womanless" },
{ id: 66441, word: "womanlike" },
{ id: 66442, word: "womanly" },
{ id: 66443, word: "womb" },
{ id: 66444, word: "woof" },
{ id: 66445, word: "wooing" },
{ id: 66446, word: "wool" },
{ id: 66451, word: "woozy" },
{ id: 66452, word: "word" },
{ id: 66453, word: "work" },
{ id: 66454, word: "worried" },
{ id: 66455, word: "worrier" },
{ id: 66456, word: "worrisome" },
{ id: 66461, word: "worry" },
{ id: 66462, word: "worsening" },
{ id: 66463, word: "worshiper" },
{ id: 66464, word: "worst" },
{ id: 66465, word: "wound" },
{ id: 66466, word: "woven" },
{ id: 66511, word: "wow" },
{ id: 66512, word: "wrangle" },
{ id: 66513, word: "wrath" },
{ id: 66514, word: "wreath" },
{ id: 66515, word: "wreckage" },
{ id: 66516, word: "wrecker" },
{ id: 66521, word: "wrecking" },
{ id: 66522, word: "wrench" },
{ id: 66523, word: "wriggle" },
{ id: 66524, word: "wriggly" },
{ id: 66525, word: "wrinkle" },
{ id: 66526, word: "wrinkly" },
{ id: 66531, word: "wrist" },
{ id: 66532, word: "writing" },
{ id: 66533, word: "written" },
{ id: 66534, word: "wrongdoer" },
{ id: 66535, word: "wronged" },
{ id: 66536, word: "wrongful" },
{ id: 66541, word: "wrongly" },
{ id: 66542, word: "wrongness" },
{ id: 66543, word: "wrought" },
{ id: 66544, word: "xbox" },
{ id: 66545, word: "xerox" },
{ id: 66546, word: "yahoo" },
{ id: 66551, word: "yam" },
{ id: 66552, word: "yanking" },
{ id: 66553, word: "yapping" },
{ id: 66554, word: "yard" },
{ id: 66555, word: "yarn" },
{ id: 66556, word: "yeah" },
{ id: 66561, word: "yearbook" },
{ id: 66562, word: "yearling" },
{ id: 66563, word: "yearly" },
{ id: 66564, word: "yearning" },
{ id: 66565, word: "yeast" },
{ id: 66566, word: "yelling" },
{ id: 66611, word: "yelp" },
{ id: 66612, word: "yen" },
{ id: 66613, word: "yesterday" },
{ id: 66614, word: "yiddish" },
{ id: 66615, word: "yield" },
{ id: 66616, word: "yin" },
{ id: 66621, word: "yippee" },
{ id: 66622, word: "yo-yo" },
{ id: 66623, word: "yodel" },
{ id: 66624, word: "yoga" },
{ id: 66625, word: "yogurt" },
{ id: 66626, word: "yonder" },
{ id: 66631, word: "yoyo" },
{ id: 66632, word: "yummy" },
{ id: 66633, word: "zap" },
{ id: 66634, word: "zealous" },
{ id: 66635, word: "zebra" },
{ id: 66636, word: "zen" },
{ id: 66641, word: "zeppelin" },
{ id: 66642, word: "zero" },
{ id: 66643, word: "zestfully" },
{ id: 66644, word: "zesty" },
{ id: 66645, word: "zigzagged" },
{ id: 66646, word: "zipfile" },
{ id: 66651, word: "zipping" },
{ id: 66652, word: "zippy" },
{ id: 66653, word: "zips" },
{ id: 66654, word: "zit" },
{ id: 66655, word: "zodiac" },
{ id: 66656, word: "zombie" },
{ id: 66661, word: "zone" },
{ id: 66662, word: "zoning" },
{ id: 66663, word: "zookeeper" },
{ id: 66664, word: "zoologist" },
{ id: 66665, word: "zoology" },
{ id: 66666, word: "zoom" }
];
}<|fim▁end|> | { id: 61141, word: "subscript" },
{ id: 61142, word: "subsector" },
{ id: 61143, word: "subside" },
|
<|file_name|>auth_config.go<|end_file_name|><|fim▁begin|>package origin
import (
"crypto/md5"
"crypto/x509"
"fmt"
"net/url"
"code.google.com/p/go-uuid/uuid"
"github.com/GoogleCloudPlatform/kubernetes/pkg/tools"
"github.com/projectatomic/atomic-enterprise/pkg/auth/server/session"
configapi "github.com/projectatomic/atomic-enterprise/pkg/cmd/server/api"
"github.com/projectatomic/atomic-enterprise/pkg/cmd/server/api/latest"
"github.com/projectatomic/atomic-enterprise/pkg/cmd/server/etcd"
identityregistry "github.com/projectatomic/atomic-enterprise/pkg/user/registry/identity"
identityetcd "github.com/projectatomic/atomic-enterprise/pkg/user/registry/identity/etcd"
userregistry "github.com/projectatomic/atomic-enterprise/pkg/user/registry/user"
useretcd "github.com/projectatomic/atomic-enterprise/pkg/user/registry/user/etcd"
)
type AuthConfig struct {<|fim▁hole|> Options configapi.OAuthConfig
// TODO: possibly change to point to MasterConfig's version
OpenshiftEnabled bool
// AssetPublicAddresses contains valid redirectURI prefixes to direct browsers to the web console
AssetPublicAddresses []string
MasterRoots *x509.CertPool
EtcdHelper tools.EtcdHelper
UserRegistry userregistry.Registry
IdentityRegistry identityregistry.Registry
SessionAuth *session.Authenticator
}
func BuildAuthConfig(options configapi.MasterConfig) (*AuthConfig, error) {
client, err := etcd.GetAndTestEtcdClient(options.EtcdClientInfo)
if err != nil {
return nil, err
}
etcdHelper, err := NewEtcdHelper(client, options.EtcdStorageConfig.OpenShiftStorageVersion, options.EtcdStorageConfig.OpenShiftStoragePrefix)
if err != nil {
return nil, fmt.Errorf("Error setting up server storage: %v", err)
}
apiServerCAs, err := configapi.GetAPIServerCertCAPool(options)
if err != nil {
return nil, err
}
var sessionAuth *session.Authenticator
if options.OAuthConfig.SessionConfig != nil {
secure := isHTTPS(options.OAuthConfig.MasterPublicURL)
auth, err := BuildSessionAuth(secure, options.OAuthConfig.SessionConfig)
if err != nil {
return nil, err
}
sessionAuth = auth
}
// Build the list of valid redirect_uri prefixes for a login using the openshift-web-console client to redirect to
// TODO: allow configuring this
// TODO: remove hard-coding of development UI server
assetPublicURLs := []string{options.OAuthConfig.AssetPublicURL, "http://localhost:9000", "https://localhost:9000"}
userStorage := useretcd.NewREST(etcdHelper)
userRegistry := userregistry.NewRegistry(userStorage)
identityStorage := identityetcd.NewREST(etcdHelper)
identityRegistry := identityregistry.NewRegistry(identityStorage)
ret := &AuthConfig{
Options: *options.OAuthConfig,
OpenshiftEnabled: options.OpenshiftEnabled,
AssetPublicAddresses: assetPublicURLs,
MasterRoots: apiServerCAs,
EtcdHelper: etcdHelper,
IdentityRegistry: identityRegistry,
UserRegistry: userRegistry,
SessionAuth: sessionAuth,
}
return ret, nil
}
func BuildSessionAuth(secure bool, config *configapi.SessionConfig) (*session.Authenticator, error) {
secrets, err := getSessionSecrets(config.SessionSecretsFile)
if err != nil {
return nil, err
}
sessionStore := session.NewStore(secure, int(config.SessionMaxAgeSeconds), secrets...)
return session.NewAuthenticator(sessionStore, config.SessionName), nil
}
func getSessionSecrets(filename string) ([]string, error) {
// Build secrets list
secrets := []string{}
if len(filename) != 0 {
sessionSecrets, err := latest.ReadSessionSecrets(filename)
if err != nil {
return nil, fmt.Errorf("error reading sessionSecretsFile %s: %v", filename, err)
}
if len(sessionSecrets.Secrets) == 0 {
return nil, fmt.Errorf("sessionSecretsFile %s contained no secrets", filename)
}
for _, s := range sessionSecrets.Secrets {
secrets = append(secrets, s.Authentication)
secrets = append(secrets, s.Encryption)
}
} else {
// Generate random signing and encryption secrets if none are specified in config
secrets = append(secrets, fmt.Sprintf("%x", md5.Sum([]byte(uuid.NewRandom().String()))))
secrets = append(secrets, fmt.Sprintf("%x", md5.Sum([]byte(uuid.NewRandom().String()))))
}
return secrets, nil
}
// isHTTPS returns true if the given URL is a valid https URL
func isHTTPS(u string) bool {
parsedURL, err := url.Parse(u)
return err == nil && parsedURL.Scheme == "https"
}<|fim▁end|> | |
<|file_name|>AbstractInboundPatientDiscoveryDeferredRequest.java<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2012, United States Government, as represented by the Secretary of Health and Human Services.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above
* copyright notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* * Neither the name of the United States Government nor the<|fim▁hole|> * names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE UNITED STATES GOVERNMENT BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package gov.hhs.fha.nhinc.patientdiscovery.inbound.deferred.request;
import gov.hhs.fha.nhinc.aspect.InboundProcessingEvent;
import gov.hhs.fha.nhinc.common.nhinccommon.AssertionType;
import gov.hhs.fha.nhinc.nhinclib.NhincConstants;
import gov.hhs.fha.nhinc.patientdiscovery.PatientDiscoveryAuditor;
import gov.hhs.fha.nhinc.patientdiscovery.adapter.deferred.request.proxy.AdapterPatientDiscoveryDeferredReqProxy;
import gov.hhs.fha.nhinc.patientdiscovery.adapter.deferred.request.proxy.AdapterPatientDiscoveryDeferredReqProxyObjectFactory;
import gov.hhs.fha.nhinc.patientdiscovery.aspect.MCCIIN000002UV01EventDescriptionBuilder;
import gov.hhs.fha.nhinc.patientdiscovery.aspect.PRPAIN201305UV02EventDescriptionBuilder;
import org.hl7.v3.MCCIIN000002UV01;
import org.hl7.v3.PRPAIN201305UV02;
public abstract class AbstractInboundPatientDiscoveryDeferredRequest implements InboundPatientDiscoveryDeferredRequest {
private final AdapterPatientDiscoveryDeferredReqProxyObjectFactory adapterFactory;
public AbstractInboundPatientDiscoveryDeferredRequest(AdapterPatientDiscoveryDeferredReqProxyObjectFactory factory) {
adapterFactory = factory;
}
abstract MCCIIN000002UV01 process(PRPAIN201305UV02 request, AssertionType assertion);
abstract PatientDiscoveryAuditor getAuditLogger();
/**
* Processes the PD Deferred request message. This call will audit the message and send it to the Nhin.
*
* @param request
* @param assertion
* @return MCCIIN000002UV01
*/
@InboundProcessingEvent(beforeBuilder = PRPAIN201305UV02EventDescriptionBuilder.class,
afterReturningBuilder = MCCIIN000002UV01EventDescriptionBuilder.class,
serviceType = "Patient Discovery Deferred Request",
version = "1.0")
public MCCIIN000002UV01 respondingGatewayPRPAIN201305UV02(PRPAIN201305UV02 request, AssertionType assertion) {
auditRequestFromNhin(request, assertion);
MCCIIN000002UV01 response = process(request, assertion);
auditResponseToNhin(response, assertion);
return response;
}
protected MCCIIN000002UV01 sendToAdapter(PRPAIN201305UV02 request, AssertionType assertion) {
AdapterPatientDiscoveryDeferredReqProxy proxy = adapterFactory.getAdapterPatientDiscoveryDeferredReqProxy();
return proxy.processPatientDiscoveryAsyncReq(request, assertion);
}
private void auditRequestFromNhin(PRPAIN201305UV02 request, AssertionType assertion) {
getAuditLogger().auditNhinDeferred201305(request, assertion, NhincConstants.AUDIT_LOG_INBOUND_DIRECTION);
}
private void auditResponseToNhin(MCCIIN000002UV01 response, AssertionType assertion) {
getAuditLogger().auditAck(response, assertion, NhincConstants.AUDIT_LOG_OUTBOUND_DIRECTION,
NhincConstants.AUDIT_LOG_NHIN_INTERFACE);
}
protected void auditRequestToAdapter(PRPAIN201305UV02 request, AssertionType assertion) {
getAuditLogger().auditAdapterDeferred201305(request, assertion, NhincConstants.AUDIT_LOG_OUTBOUND_DIRECTION);
}
protected void auditResponseFromAdapter(MCCIIN000002UV01 response, AssertionType assertion) {
getAuditLogger().auditAck(response, assertion, NhincConstants.AUDIT_LOG_INBOUND_DIRECTION,
NhincConstants.AUDIT_LOG_ADAPTER_INTERFACE);
}
}<|fim▁end|> | |
<|file_name|>auth.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8; tab-width: 4; indent-tabs-mode: t -*-
#
# NetProfile: Authentication routines
# © Copyright 2013-2014 Alex 'Unik' Unigovsky
#
# This file is part of NetProfile.
# NetProfile is free software: you can redistribute it and/or
# modify it under the terms of the GNU Affero General Public
# License as published by the Free Software Foundation, either
# version 3 of the License, or (at your option) any later
# version.
#
# NetProfile is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General
# Public License along with NetProfile. If not, see
# <http://www.gnu.org/licenses/>.
from __future__ import (
unicode_literals,
print_function,
absolute_import,
division
)
import hashlib
import random
import string
import time
from zope.interface import implementer
from pyramid.interfaces import IAuthenticationPolicy
from pyramid.security import (
Authenticated,
Everyone
)
class PluginPolicySelected(object):
def __init__(self, request, policy):
self.request = request
self.policy = policy
@implementer(IAuthenticationPolicy)
class PluginAuthenticationPolicy(object):
def __init__(self, default, routes=None):
self._default = default
if routes is None:
routes = {}
self._routes = routes
def add_plugin(self, route, policy):
self._routes[route] = policy
def match(self, request):
if hasattr(request, 'auth_policy'):
return request.auth_policy
cur = None
cur_len = 0
for route, plug in self._routes.items():
r_len = len(route)
if r_len <= cur_len:
continue
path = request.path
if route == path[:r_len]:
if len(path) > r_len:
if path[r_len:r_len + 1] != '/':
continue
cur = plug
cur_len = r_len
if cur:
request.auth_policy = cur
else:
request.auth_policy = self._default
request.registry.notify(PluginPolicySelected(request, request.auth_policy))
return request.auth_policy
def authenticated_userid(self, request):
return self.match(request).authenticated_userid(request)
def unauthenticated_userid(self, request):
return self.match(request).unauthenticated_userid(request)
def effective_principals(self, request):
return self.match(request).effective_principals(request)
def remember(self, request, principal, **kw):
return self.match(request).remember(request, principal, **kw)
def forget(self, request):
return self.match(request).forget(request)
_TOKEN_FILTER_MAP = (
[chr(n) for n in range(32)] +
[chr(127), '\\', '"']
)
_TOKEN_FILTER_MAP = dict.fromkeys(_TOKEN_FILTER_MAP, None)
def _filter_token(tok):
return str(tok).translate(_TOKEN_FILTER_MAP)
def _format_kvpairs(**kwargs):
return ', '.join('{0!s}="{1}"'.format(k, _filter_token(v)) for (k, v) in kwargs.items())
def _generate_nonce(ts, secret, salt=None, chars=string.hexdigits.upper()):
# TODO: Add IP-address to nonce
if not salt:
try:
rng = random.SystemRandom()
except NotImplementedError:
rng = random
salt = ''.join(rng.choice(chars) for i in range(16))
ctx = hashlib.md5(('%s:%s:%s' % (ts, salt, secret)).encode())
return ('%s:%s:%s' % (ts, salt, ctx.hexdigest()))
def _is_valid_nonce(nonce, secret):
comp = nonce.split(':')
if len(comp) != 3:
return False
calc_nonce = _generate_nonce(comp[0], secret, comp[1])
if nonce == calc_nonce:
return True
return False
def _generate_digest_challenge(ts, secret, realm, opaque, stale=False):
nonce = _generate_nonce(ts, secret)
return 'Digest %s' % (_format_kvpairs(
realm=realm,
qop='auth',
nonce=nonce,
opaque=opaque,
algorithm='MD5',
stale='true' if stale else 'false'
),)
def _add_www_authenticate(request, secret, realm):
resp = request.response
if not resp.www_authenticate:
resp.www_authenticate = _generate_digest_challenge(
round(time.time()),
secret, realm, 'NPDIGEST'
)
def _parse_authorization(request, secret, realm):
authz = request.authorization
if (not authz) or (len(authz) != 2) or (authz[0] != 'Digest'):
_add_www_authenticate(request, secret, realm)
return None
params = authz[1]
if 'algorithm' not in params:
params['algorithm'] = 'MD5'
for required in ('username', 'realm', 'nonce', 'uri', 'response', 'cnonce', 'nc', 'opaque'):
if (required not in params) or ((required == 'opaque') and (params['opaque'] != 'NPDIGEST')):
_add_www_authenticate(request, secret, realm)
return None
return params
@implementer(IAuthenticationPolicy)
class DigestAuthenticationPolicy(object):
def __init__(self, secret, callback, realm='Realm'):<|fim▁hole|>
def authenticated_userid(self, request):
params = _parse_authorization(request, self.secret, self.realm)
if params is None:
return None
if not _is_valid_nonce(params['nonce'], self.secret):
_add_www_authenticate(request, self.secret, self.realm)
return None
userid = params['username']
if self.callback(params, request) is not None:
return 'u:%s' % userid
_add_www_authenticate(request, self.secret, self.realm)
def unauthenticated_userid(self, request):
params = _parse_authorization(request, self.secret, self.realm)
if params is None:
return None
if not _is_valid_nonce(params['nonce'], self.secret):
_add_www_authenticate(request, self.secret, self.realm)
return None
return 'u:%s' % params['username']
def effective_principals(self, request):
creds = [Everyone]
params = _parse_authorization(request, self.secret, self.realm)
if params is None:
return creds
if not _is_valid_nonce(params['nonce'], self.secret):
_add_www_authenticate(request, self.secret, self.realm)
return creds
groups = self.callback(params, request)
if groups is None:
return creds
creds.append(Authenticated)
creds.append('u:%s' % params['username'])
creds.extend(groups)
return creds
def remember(self, request, principal, *kw):
return []
def forget(self, request):
return [('WWW-Authenticate', _generate_digest_challenge(
round(time.time()),
self.secret,
self.realm,
'NPDIGEST'
))]<|fim▁end|> | self.secret = secret
self.callback = callback
self.realm = realm |
<|file_name|>face-detection-rectangle.js<|end_file_name|><|fim▁begin|>var cv = require('../lib/opencv');
var COLOR = [0, 255, 0]; // default red<|fim▁hole|> if (err) throw err;
if (im.width() < 1 || im.height() < 1) throw new Error('Image has no size');
im.detectObject('../data/haarcascade_frontalface_alt2.xml', {}, function(err, faces) {
if (err) throw err;
for (var i = 0; i < faces.length; i++) {
var face = faces[i];
im.rectangle([face.x, face.y], [face.width, face.height], COLOR, 2);
}
im.save('./tmp/face-detection-rectangle.png');
console.log('Image saved to ./tmp/face-detection-rectangle.png');
});
});<|fim▁end|> | var thickness = 2; // default 1
cv.readImage('./files/mona.png', function(err, im) { |
<|file_name|>plugin_class.rs<|end_file_name|><|fim▁begin|>use node::inner_node::node_from_ptr;
use node::{String, Uri};
use world::World;
#[derive(Clone)]
pub struct PluginClass<'w> {
pub(crate) ptr: *const ::lilv_sys::LilvPluginClass,
pub(crate) world: &'w World,
}
impl<'w> PluginClass<'w> {
pub(crate) fn new(ptr: *const ::lilv_sys::LilvPluginClass, world: &World) -> PluginClass {<|fim▁hole|>
pub fn parent_uri(&self) -> &Uri<'w> {
unsafe { node_from_ptr(::lilv_sys::lilv_plugin_class_get_parent_uri(self.ptr)) }
}
pub fn uri(&self) -> &Uri<'w> {
unsafe { node_from_ptr(::lilv_sys::lilv_plugin_class_get_uri(self.ptr)) }
}
pub fn label(&self) -> &String<'w> {
unsafe { node_from_ptr(::lilv_sys::lilv_plugin_class_get_label(self.ptr)) }
}
}<|fim▁end|> | PluginClass { ptr, world }
} |
<|file_name|>test_realizer_arbitrary_reordering.py<|end_file_name|><|fim▁begin|>import unittest
import tagging
class TestRealizerArbitraryReordering(unittest.TestCase):
"""<|fim▁hole|> """
def test_realize_output_in_order(self):
"""
Test for when source tokens occur
in the same relative order in the
target string
"""
editing_task = tagging.EditingTask(["word1 word2 <::::> word3 "])
tags_str = ['KEEP|0', 'KEEP|1', 'KEEP|and', 'DELETE', 'KEEP|3']
tags = [tagging.Tag(tag) for tag in tags_str]
result = editing_task.realize_output([tags])
expected = "word1 word2 and word3 "
self.assertEqual(expected, result)
def test_realize_output_out_of_order(self):
"""
Test for when the source tokens
do not occur in the same relative order
in the target string
"""
editing_task = tagging.EditingTask(["word1 word2 <::::> word3 "])
tags_str = ['KEEP|1', 'KEEP|0', 'KEEP|and', 'DELETE', 'KEEP|3']
tags = [tagging.Tag(tag) for tag in tags_str]
result = editing_task.realize_output([tags])
expected = "word2 word1 and word3 "
self.assertEqual(expected, result)
if __name__ == '__main__':
unittest.main()<|fim▁end|> | Tests for the realizer with arbitrary reordering
enabled. |
<|file_name|>pipe_unix.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::c_str::CString;
use std::cast;
use std::io;
use std::libc;
use std::mem;
use std::rt::rtio;<|fim▁hole|>
use super::{IoResult, retry, keep_going};
use super::file::fd_t;
fn unix_socket(ty: libc::c_int) -> IoResult<fd_t> {
match unsafe { libc::socket(libc::AF_UNIX, ty, 0) } {
-1 => Err(super::last_error()),
fd => Ok(fd)
}
}
fn addr_to_sockaddr_un(addr: &CString) -> IoResult<(libc::sockaddr_storage, uint)> {
// the sun_path length is limited to SUN_LEN (with null)
assert!(mem::size_of::<libc::sockaddr_storage>() >=
mem::size_of::<libc::sockaddr_un>());
let mut storage: libc::sockaddr_storage = unsafe { intrinsics::init() };
let s: &mut libc::sockaddr_un = unsafe { cast::transmute(&mut storage) };
let len = addr.len();
if len > s.sun_path.len() - 1 {
return Err(io::IoError {
kind: io::InvalidInput,
desc: "path must be smaller than SUN_LEN",
detail: None,
})
}
s.sun_family = libc::AF_UNIX as libc::sa_family_t;
for (slot, value) in s.sun_path.mut_iter().zip(addr.iter()) {
*slot = value;
}
// count the null terminator
let len = mem::size_of::<libc::sa_family_t>() + len + 1;
return Ok((storage, len));
}
fn sockaddr_to_unix(storage: &libc::sockaddr_storage,
len: uint) -> IoResult<CString> {
match storage.ss_family as libc::c_int {
libc::AF_UNIX => {
assert!(len as uint <= mem::size_of::<libc::sockaddr_un>());
let storage: &libc::sockaddr_un = unsafe {
cast::transmute(storage)
};
unsafe {
Ok(CString::new(storage.sun_path.as_ptr(), false).clone())
}
}
_ => Err(io::standard_error(io::InvalidInput))
}
}
struct Inner {
fd: fd_t,
}
impl Drop for Inner {
fn drop(&mut self) { unsafe { let _ = libc::close(self.fd); } }
}
fn connect(addr: &CString, ty: libc::c_int) -> IoResult<Inner> {
let (addr, len) = try!(addr_to_sockaddr_un(addr));
let inner = Inner { fd: try!(unix_socket(ty)) };
let addrp = &addr as *libc::sockaddr_storage;
match retry(|| unsafe {
libc::connect(inner.fd, addrp as *libc::sockaddr,
len as libc::socklen_t)
}) {
-1 => Err(super::last_error()),
_ => Ok(inner)
}
}
fn bind(addr: &CString, ty: libc::c_int) -> IoResult<Inner> {
let (addr, len) = try!(addr_to_sockaddr_un(addr));
let inner = Inner { fd: try!(unix_socket(ty)) };
let addrp = &addr as *libc::sockaddr_storage;
match unsafe {
libc::bind(inner.fd, addrp as *libc::sockaddr, len as libc::socklen_t)
} {
-1 => Err(super::last_error()),
_ => Ok(inner)
}
}
////////////////////////////////////////////////////////////////////////////////
// Unix Streams
////////////////////////////////////////////////////////////////////////////////
pub struct UnixStream {
priv inner: UnsafeArc<Inner>,
}
impl UnixStream {
pub fn connect(addr: &CString) -> IoResult<UnixStream> {
connect(addr, libc::SOCK_STREAM).map(|inner| {
UnixStream { inner: UnsafeArc::new(inner) }
})
}
fn fd(&self) -> fd_t { unsafe { (*self.inner.get()).fd } }
}
impl rtio::RtioPipe for UnixStream {
fn read(&mut self, buf: &mut [u8]) -> IoResult<uint> {
let ret = retry(|| unsafe {
libc::recv(self.fd(),
buf.as_ptr() as *mut libc::c_void,
buf.len() as libc::size_t,
0) as libc::c_int
});
if ret == 0 {
Err(io::standard_error(io::EndOfFile))
} else if ret < 0 {
Err(super::last_error())
} else {
Ok(ret as uint)
}
}
fn write(&mut self, buf: &[u8]) -> IoResult<()> {
let ret = keep_going(buf, |buf, len| unsafe {
libc::send(self.fd(),
buf as *mut libc::c_void,
len as libc::size_t,
0) as i64
});
if ret < 0 {
Err(super::last_error())
} else {
Ok(())
}
}
fn clone(&self) -> ~rtio::RtioPipe:Send {
~UnixStream { inner: self.inner.clone() } as ~rtio::RtioPipe:Send
}
}
////////////////////////////////////////////////////////////////////////////////
// Unix Datagram
////////////////////////////////////////////////////////////////////////////////
pub struct UnixDatagram {
priv inner: UnsafeArc<Inner>,
}
impl UnixDatagram {
pub fn connect(addr: &CString) -> IoResult<UnixDatagram> {
connect(addr, libc::SOCK_DGRAM).map(|inner| {
UnixDatagram { inner: UnsafeArc::new(inner) }
})
}
pub fn bind(addr: &CString) -> IoResult<UnixDatagram> {
bind(addr, libc::SOCK_DGRAM).map(|inner| {
UnixDatagram { inner: UnsafeArc::new(inner) }
})
}
fn fd(&self) -> fd_t { unsafe { (*self.inner.get()).fd } }
pub fn recvfrom(&mut self, buf: &mut [u8]) -> IoResult<(uint, CString)> {
let mut storage: libc::sockaddr_storage = unsafe { intrinsics::init() };
let storagep = &mut storage as *mut libc::sockaddr_storage;
let mut addrlen: libc::socklen_t =
mem::size_of::<libc::sockaddr_storage>() as libc::socklen_t;
let ret = retry(|| unsafe {
libc::recvfrom(self.fd(),
buf.as_ptr() as *mut libc::c_void,
buf.len() as libc::size_t,
0,
storagep as *mut libc::sockaddr,
&mut addrlen) as libc::c_int
});
if ret < 0 { return Err(super::last_error()) }
sockaddr_to_unix(&storage, addrlen as uint).and_then(|addr| {
Ok((ret as uint, addr))
})
}
pub fn sendto(&mut self, buf: &[u8], dst: &CString) -> IoResult<()> {
let (dst, len) = try!(addr_to_sockaddr_un(dst));
let dstp = &dst as *libc::sockaddr_storage;
let ret = retry(|| unsafe {
libc::sendto(self.fd(),
buf.as_ptr() as *libc::c_void,
buf.len() as libc::size_t,
0,
dstp as *libc::sockaddr,
len as libc::socklen_t) as libc::c_int
});
match ret {
-1 => Err(super::last_error()),
n if n as uint != buf.len() => {
Err(io::IoError {
kind: io::OtherIoError,
desc: "couldn't send entire packet at once",
detail: None,
})
}
_ => Ok(())
}
}
pub fn clone(&mut self) -> UnixDatagram {
UnixDatagram { inner: self.inner.clone() }
}
}
////////////////////////////////////////////////////////////////////////////////
// Unix Listener
////////////////////////////////////////////////////////////////////////////////
pub struct UnixListener {
priv inner: Inner,
}
impl UnixListener {
pub fn bind(addr: &CString) -> IoResult<UnixListener> {
bind(addr, libc::SOCK_STREAM).map(|fd| UnixListener { inner: fd })
}
fn fd(&self) -> fd_t { self.inner.fd }
pub fn native_listen(self, backlog: int) -> IoResult<UnixAcceptor> {
match unsafe { libc::listen(self.fd(), backlog as libc::c_int) } {
-1 => Err(super::last_error()),
_ => Ok(UnixAcceptor { listener: self })
}
}
}
impl rtio::RtioUnixListener for UnixListener {
fn listen(~self) -> IoResult<~rtio::RtioUnixAcceptor:Send> {
self.native_listen(128).map(|a| ~a as ~rtio::RtioUnixAcceptor:Send)
}
}
pub struct UnixAcceptor {
priv listener: UnixListener,
}
impl UnixAcceptor {
fn fd(&self) -> fd_t { self.listener.fd() }
pub fn native_accept(&mut self) -> IoResult<UnixStream> {
let mut storage: libc::sockaddr_storage = unsafe { intrinsics::init() };
let storagep = &mut storage as *mut libc::sockaddr_storage;
let size = mem::size_of::<libc::sockaddr_storage>();
let mut size = size as libc::socklen_t;
match retry(|| unsafe {
libc::accept(self.fd(),
storagep as *mut libc::sockaddr,
&mut size as *mut libc::socklen_t) as libc::c_int
}) {
-1 => Err(super::last_error()),
fd => Ok(UnixStream { inner: UnsafeArc::new(Inner { fd: fd }) })
}
}
}
impl rtio::RtioUnixAcceptor for UnixAcceptor {
fn accept(&mut self) -> IoResult<~rtio::RtioPipe:Send> {
self.native_accept().map(|s| ~s as ~rtio::RtioPipe:Send)
}
}<|fim▁end|> | use std::sync::arc::UnsafeArc;
use std::intrinsics; |
<|file_name|>config_client.hpp<|end_file_name|><|fim▁begin|>// Copyright 2010-2014 RethinkDB, all rights reserved.
#ifndef CLUSTERING_ADMINISTRATION_SERVERS_CONFIG_CLIENT_HPP_
#define CLUSTERING_ADMINISTRATION_SERVERS_CONFIG_CLIENT_HPP_
#include <map>
#include <set>
#include <string>
#include "containers/incremental_lenses.hpp"
#include "clustering/administration/metadata.hpp"
#include "clustering/administration/servers/server_metadata.hpp"
#include "rpc/mailbox/mailbox.hpp"
#include "rpc/semilattice/view.hpp"
class server_config_client_t : public home_thread_mixin_t {
public:
server_config_client_t(
mailbox_manager_t *_mailbox_manager,
watchable_map_t<peer_id_t, cluster_directory_metadata_t>
*_directory_view,
watchable_map_t<std::pair<peer_id_t, server_id_t>, empty_value_t>
*_peer_connections_map);
/* `get_server_config_map()` returns the server IDs and current configurations of
every connected server. */
watchable_map_t<server_id_t, server_config_versioned_t> *get_server_config_map() {
return &server_config_map;
}
/* `get_peer_to_server_map()` and `get_server_to_peer_map()` allow conversion back
and forth between server IDs and peer IDs. */
watchable_map_t<peer_id_t, server_id_t> *get_peer_to_server_map() {
return &peer_to_server_map;
}
watchable_map_t<server_id_t, peer_id_t> *get_server_to_peer_map() {
return &server_to_peer_map;
}
/* This map contains the pair (X, Y) if we can see server X and server X can see
server Y. */
watchable_map_t<std::pair<server_id_t, server_id_t>, empty_value_t>
*get_connections_map() {
return &connections_map;
}
/* `set_config()` changes the config of the server with the given server ID. */
bool set_config(
const server_id_t &server_id,
const name_string_t &old_server_name, /* for error messages */
const server_config_t &new_server_config,
signal_t *interruptor,
std::string *error_out);
private:
void install_server_metadata(
const peer_id_t &peer_id,
const cluster_directory_metadata_t &metadata);
void on_directory_change(
const peer_id_t &peer_id,
const cluster_directory_metadata_t *metadata);
void on_peer_connections_map_change(
const std::pair<peer_id_t, server_id_t> &key,
const empty_value_t *value);
mailbox_manager_t *const mailbox_manager;
watchable_map_t<peer_id_t, cluster_directory_metadata_t> * const directory_view;
watchable_map_t< std::pair<peer_id_t, server_id_t>, empty_value_t>
* const peer_connections_map;
watchable_map_var_t<server_id_t, server_config_versioned_t> server_config_map;
watchable_map_var_t<peer_id_t, server_id_t> peer_to_server_map;
watchable_map_var_t<server_id_t, peer_id_t> server_to_peer_map;
watchable_map_var_t<std::pair<server_id_t, server_id_t>, empty_value_t>
connections_map;
/* We use this to produce reasonable results when multiple peers have the same server
ID. In general multiple peers cannot have the same server ID, but a server might
conceivably shut down and then reconnect with a new peer ID before we had dropped the
original connection. */
std::multimap<server_id_t, peer_id_t> all_server_to_peer_map;
watchable_map_t<peer_id_t, cluster_directory_metadata_t>::all_subs_t directory_subs;
watchable_map_t<std::pair<peer_id_t, server_id_t>, empty_value_t>::all_subs_t
peer_connections_map_subs;
};<|fim▁hole|><|fim▁end|> |
#endif /* CLUSTERING_ADMINISTRATION_SERVERS_CONFIG_CLIENT_HPP_ */ |
<|file_name|>ExecutableStatement.java<|end_file_name|><|fim▁begin|>/**
* Copyright 2014 Nortal AS
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.nortal.petit.orm.statement;
import java.util.List;<|fim▁hole|> * @author Lauri Lättemäe ([email protected])
* @created 29.04.2013
*/
public abstract class ExecutableStatement<B> extends SimpleStatement<B> {
/**
* Returns statements sql with parameter values
*
* @return
*/
@Override
public String getSqlWithParams() {
prepare();
StringBuffer sb = new StringBuffer();
if (!CollectionUtils.isEmpty(getBeans())) {
for (B bean : getBeans()) {
prepare(bean);
sb.append(super.getSqlWithParams()).append("\n");
}
} else {
sb.append(super.getSqlWithParams()).append("\n");
}
return sb.toString();
}
protected abstract List<B> getBeans();
protected abstract void prepare(B bean);
public abstract void exec();
}<|fim▁end|> |
import org.springframework.util.CollectionUtils;
/** |
<|file_name|>ImAdd.py<|end_file_name|><|fim▁begin|>__author__ = 'Mayur M'
import ImgIO
def add(image1, image2): # add two images together
if image1.width == image2.width and image1.height == image2.height:
return_red = []
return_green = []
return_blue = []
for i in range(0, len(image1.red)):
tmp_r = image1.red[i] + image2.red[i] # adding the RGB values
tmp_g = image1.green[i] + image2.green[i]
tmp_b = image1.blue[i] + image2.blue[i]
if 0 <= tmp_r <= 255:
return_red.append(tmp_r)
else:
return_red.append(tmp_r % 255) # loop values around if saturation
if 0 <= tmp_g <= 255:
return_green.append(tmp_g)
else:
return_green.append(tmp_g % 255) # loop values around if saturation
if 0 <= tmp_b <= 255:
return_blue.append(tmp_b)
else:
return_blue.append(tmp_b % 255) # loop values around if saturation
return return_red, return_green, return_blue
else:
print "Error: image dimensions do not match!"
def main(): # test case
print('start!!!!!')
ima = ImgIO.ImgIO()
imb = ImgIO.ImgIO()
ima.read_image("y.jpg")
imb.read_image("test1.png")
add_r, add_g, add_b = add(ima, imb)
imc = ImgIO.ImgIO()
imc.read_list(add_r, add_g, add_b, "final1.png", ima.width, ima.height)
imc.write_image("final1.png")
<|fim▁hole|><|fim▁end|> | if __name__ == '__main__':
main() |
<|file_name|>server.js<|end_file_name|><|fim▁begin|>import express from 'express';
import path from 'path';
let app = express();
/*** Webpack imports ***/
import webpack from 'webpack';
import WebpackDevServer from 'webpack-dev-server';
import config from './webpack.config.js';
const webpackOptions = {
publicPath: config.output.publicPath,
// needed so that when going to the localhost:3000 it will load the contents
// from this directory
contentBase: config.devServer.contentBase,
quiet: false,
// hides all the bundling file names
noInfo: true,
// adds color to the terminal
stats: {
colors: true
}
};
const isDevelopment = process.env.NODE_ENV !== 'production';
const port = isDevelopment ? 3003 : process.env.PORT;
const public_path = path.join(__dirname, 'public');
app.use(express.static(public_path))
// .get('/', function(req, res) {
// res.sendFile('index.html', {root: public_path})
// });
/*** during development I am using a webpack-dev-server ***/
if(isDevelopment) {
new WebpackDevServer(webpack(config), webpackOptions)
.listen(port, 'localhost', function(err) {
if (err) { console.log(err); }
console.log(`Listening on port: ${port}`);
});
}
<|fim▁hole|><|fim▁end|> |
module.exports = app; |
<|file_name|>test_cp_mgmt_host.py<|end_file_name|><|fim▁begin|># Ansible module to manage CheckPoint Firewall (c) 2019
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import pytest
from units.modules.utils import set_module_args, exit_json, fail_json, AnsibleExitJson
from ansible.module_utils import basic
from ansible.modules.network.check_point import cp_mgmt_host
OBJECT = {
"name": "New Host 1",
"ip_address": "192.0.2.1"
}
CREATE_PAYLOAD = {
"name": "New Host 1",
"ip_address": "192.0.2.1"
}
UPDATE_PAYLOAD = {
"name": "New Host 1",
"color": "blue",
"ipv4_address": "192.0.2.2"
}
OBJECT_AFTER_UPDATE = UPDATE_PAYLOAD
DELETE_PAYLOAD = {<|fim▁hole|>function_path = 'ansible.modules.network.check_point.cp_mgmt_host.api_call'
api_call_object = 'host'
class TestCheckpointHost(object):
module = cp_mgmt_host
@pytest.fixture(autouse=True)
def module_mock(self, mocker):
return mocker.patch.multiple(basic.AnsibleModule, exit_json=exit_json, fail_json=fail_json)
@pytest.fixture
def connection_mock(self, mocker):
connection_class_mock = mocker.patch('ansible.module_utils.network.checkpoint.checkpoint.Connection')
return connection_class_mock.return_value
def test_create(self, mocker, connection_mock):
mock_function = mocker.patch(function_path)
mock_function.return_value = {'changed': True, api_call_object: OBJECT}
result = self._run_module(CREATE_PAYLOAD)
assert result['changed']
assert OBJECT.items() == result[api_call_object].items()
def test_create_idempotent(self, mocker, connection_mock):
mock_function = mocker.patch(function_path)
mock_function.return_value = {'changed': False, api_call_object: OBJECT}
result = self._run_module(CREATE_PAYLOAD)
assert not result['changed']
def test_update(self, mocker, connection_mock):
mock_function = mocker.patch(function_path)
mock_function.return_value = {'changed': True, api_call_object: OBJECT_AFTER_UPDATE}
result = self._run_module(UPDATE_PAYLOAD)
assert result['changed']
assert OBJECT_AFTER_UPDATE.items() == result[api_call_object].items()
def test_update_idempotent(self, mocker, connection_mock):
mock_function = mocker.patch(function_path)
mock_function.return_value = {'changed': False, api_call_object: OBJECT_AFTER_UPDATE}
result = self._run_module(UPDATE_PAYLOAD)
assert not result['changed']
def test_delete(self, mocker, connection_mock):
mock_function = mocker.patch(function_path)
mock_function.return_value = {'changed': True}
result = self._run_module(DELETE_PAYLOAD)
assert result['changed']
def test_delete_idempotent(self, mocker, connection_mock):
mock_function = mocker.patch(function_path)
mock_function.return_value = {'changed': False}
result = self._run_module(DELETE_PAYLOAD)
assert not result['changed']
def _run_module(self, module_args):
set_module_args(module_args)
with pytest.raises(AnsibleExitJson) as ex:
self.module.main()
return ex.value.args[0]<|fim▁end|> | "name": "New Host 1",
"state": "absent"
}
|
<|file_name|>config.py<|end_file_name|><|fim▁begin|># Details used to log into Reddit.
reddit_client_id = ""
reddit_client_secret = ""
reddit_user = ""
reddit_pass = ""
# Auth key used to log into Discord.
discord_key = ""
# Command/feature modules.
module_names = (
"default",
)<|fim▁hole|><|fim▁end|> |
# Do not change this value!
config_version = 2 |
<|file_name|>vardict.py<|end_file_name|><|fim▁begin|>"""Sensitive variant calling using VarDict.
Defaults to using the faster, equally sensitive Java port:
https://github.com/AstraZeneca-NGS/VarDictJava
if 'vardict' or 'vardict-java' is specified in the configuration. To use the
VarDict perl version:
https://github.com/AstraZeneca-NGS/VarDict
specify 'vardict-perl'.
"""
import os
import itertools
import sys
import toolz as tz
import pybedtools
from bcbio import broad, utils
from bcbio.bam import highdepth
from bcbio.distributed.transaction import file_transaction
from bcbio.pipeline import config_utils, shared
from bcbio.pipeline import datadict as dd
from bcbio.provenance import do
from bcbio.variation import annotation, bamprep, vcfutils
def _is_bed_file(target):
return target and isinstance(target, basestring) and os.path.isfile(target)
def _vardict_options_from_config(items, config, out_file, target=None):
opts = ["-c 1", "-S 2", "-E 3", "-g 4"]
# ["-z", "-F", "-c", "1", "-S", "2", "-E", "3", "-g", "4", "-x", "0",
# "-k", "3", "-r", "4", "-m", "8"]
resources = config_utils.get_resources("vardict", config)
if resources.get("options"):
opts += resources["options"]
assert _is_bed_file(target)
if any(tz.get_in(["config", "algorithm", "coverage_interval"], x, "").lower() == "genome"
for x in items):
target = shared.remove_highdepth_regions(target, items)
target = shared.remove_lcr_regions(target, items)
target = _enforce_max_region_size(target, items[0])
opts += [target] # this must be the last option
return opts
def _enforce_max_region_size(in_file, data):
"""Ensure we don't have any chunks in the region greater than 1Mb.
Larger sections have high memory usage on VarDictJava and failures
on VarDict. This creates minimum windows from the input BED file
to avoid these issues. Downstream VarDict merging sorts out any
variants across windows.
"""
max_size = 1e6
overlap_size = 250
def _has_larger_regions(f):
return any(r.stop - r.start > max_size for r in pybedtools.BedTool(f))
out_file = "%s-regionlimit%s" % utils.splitext_plus(in_file)
if not utils.file_exists(out_file):
if _has_larger_regions(in_file):
with file_transaction(data, out_file) as tx_out_file:
pybedtools.BedTool().window_maker(w=max_size,
s=max_size - overlap_size,
b=pybedtools.BedTool(in_file)).saveas(tx_out_file)
else:
utils.symlink_plus(in_file, out_file)
return out_file
def run_vardict(align_bams, items, ref_file, assoc_files, region=None,
out_file=None):
"""Run VarDict variant calling.
"""
if vcfutils.is_paired_analysis(align_bams, items):
call_file = _run_vardict_paired(align_bams, items, ref_file,
assoc_files, region, out_file)
else:
vcfutils.check_paired_problems(items)
call_file = _run_vardict_caller(align_bams, items, ref_file,
assoc_files, region, out_file)
return call_file
def _get_jvm_opts(data, out_file):
"""Retrieve JVM options when running the Java version of VarDict.
"""
if get_vardict_command(data) == "vardict-java":
resources = config_utils.get_resources("vardict", data["config"])
jvm_opts = resources.get("jvm_opts", ["-Xms750m", "-Xmx4g"])
jvm_opts += broad.get_default_jvm_opts(os.path.dirname(out_file))
return "export VAR_DICT_OPTS='%s' && " % " ".join(jvm_opts)
else:
return ""
def _run_vardict_caller(align_bams, items, ref_file, assoc_files,
region=None, out_file=None):
"""Detect SNPs and indels with VarDict.
"""
config = items[0]["config"]
if out_file is None:
out_file = "%s-variants.vcf.gz" % os.path.splitext(align_bams[0])[0]
if not utils.file_exists(out_file):
with file_transaction(items[0], out_file) as tx_out_file:
target = shared.subset_variant_regions(dd.get_variant_regions(items[0]), region,
out_file, do_merge=False)
num_bams = len(align_bams)
sample_vcf_names = [] # for individual sample names, given batch calling may be required
for bamfile, item in itertools.izip(align_bams, items):
# prepare commands
sample = dd.get_sample_name(item)
vardict = get_vardict_command(items[0])
strandbias = "teststrandbias.R"
var2vcf = "var2vcf_valid.pl"
opts = (" ".join(_vardict_options_from_config(items, config, out_file, target))
if _is_bed_file(target) else "")
vcfstreamsort = config_utils.get_program("vcfstreamsort", config)
compress_cmd = "| bgzip -c" if out_file.endswith("gz") else ""
freq = float(utils.get_in(config, ("algorithm", "min_allele_fraction"), 10)) / 100.0
coverage_interval = utils.get_in(config, ("algorithm", "coverage_interval"), "exome")
# for deep targeted panels, require 50 worth of coverage
var2vcf_opts = " -v 50 " if highdepth.get_median_coverage(items[0]) > 5000 else ""
fix_ambig = vcfutils.fix_ambiguous_cl()
remove_dup = vcfutils.remove_dup_cl()
jvm_opts = _get_jvm_opts(items[0], tx_out_file)
r_setup = "unset R_HOME && export PATH=%s:$PATH && " % os.path.dirname(utils.Rscript_cmd())
cmd = ("{r_setup}{jvm_opts}{vardict} -G {ref_file} -f {freq} "
"-N {sample} -b {bamfile} {opts} "
"| {strandbias}"
"| {var2vcf} -N {sample} -E -f {freq} {var2vcf_opts} "
"| {fix_ambig} | {remove_dup} | {vcfstreamsort} {compress_cmd}")
if num_bams > 1:
temp_file_prefix = out_file.replace(".gz", "").replace(".vcf", "") + item["name"][1]
tmp_out = temp_file_prefix + ".temp.vcf"
tmp_out += ".gz" if out_file.endswith("gz") else ""
sample_vcf_names.append(tmp_out)
with file_transaction(item, tmp_out) as tx_tmp_file:
if not _is_bed_file(target):
vcfutils.write_empty_vcf(tx_tmp_file, config, samples=[sample])
else:
cmd += " > {tx_tmp_file}"<|fim▁hole|> else:
if not _is_bed_file(target):
vcfutils.write_empty_vcf(tx_out_file, config, samples=[sample])
else:
cmd += " > {tx_out_file}"
do.run(cmd.format(**locals()), "Genotyping with VarDict: Inference", {})
if num_bams > 1:
# N.B. merge_variant_files wants region in 1-based end-inclusive
# coordinates. Thus use bamprep.region_to_gatk
vcfutils.merge_variant_files(orig_files=sample_vcf_names,
out_file=tx_out_file, ref_file=ref_file,
config=config, region=bamprep.region_to_gatk(region))
out_file = (annotation.add_dbsnp(out_file, assoc_files["dbsnp"], config)
if assoc_files.get("dbsnp") else out_file)
return out_file
def _safe_to_float(x):
if x is None:
return None
else:
try:
return float(x)
except ValueError:
return None
def depth_freq_filter(line, tumor_index, aligner):
"""Command line to filter VarDict calls based on depth, frequency and quality.
Looks at regions with low depth for allele frequency (AF * DP < 6, the equivalent
of < 13bp for heterogygote calls, but generalized. Within these calls filters if a
calls has:
- Low mapping quality and multiple mismatches in a read (NM)
For bwa only: MQ < 55.0 and NM > 1.0 or MQ < 60.0 and NM > 2.0
- Low depth (DP < 10)
- Low QUAL (QUAL < 45)
Also filters in low allele frequency regions with poor quality, if all of these are
true:
- Allele frequency < 0.2
- Quality < 55
- P-value (SSF) > 0.06
"""
if line.startswith("#CHROM"):
headers = [('##FILTER=<ID=LowAlleleDepth,Description="Low depth per allele frequency '
'along with poor depth, quality, mapping quality and read mismatches.">'),
('##FILTER=<ID=LowFreqQuality,Description="Low frequency read with '
'poor quality and p-value (SSF).">')]
return "\n".join(headers) + "\n" + line
elif line.startswith("#"):
return line
else:
parts = line.split("\t")
sample_ft = {a: v for (a, v) in zip(parts[8].split(":"), parts[9 + tumor_index].split(":"))}
qual = _safe_to_float(parts[5])
dp = _safe_to_float(sample_ft.get("DP"))
af = _safe_to_float(sample_ft.get("AF"))
nm = _safe_to_float(sample_ft.get("NM"))
mq = _safe_to_float(sample_ft.get("MQ"))
ssfs = [x for x in parts[7].split(";") if x.startswith("SSF=")]
pval = _safe_to_float(ssfs[0].split("=")[-1] if ssfs else None)
fname = None
if dp is not None and af is not None:
if dp * af < 6:
if aligner == "bwa" and nm is not None and mq is not None:
if (mq < 55.0 and nm > 1.0) or (mq < 60.0 and nm > 2.0):
fname = "LowAlleleDepth"
if dp < 10:
fname = "LowAlleleDepth"
if qual is not None and qual < 45:
fname = "LowAlleleDepth"
if af is not None and qual is not None and pval is not None:
if af < 0.2 and qual < 55 and pval > 0.06:
fname = "LowFreqQuality"
if fname:
if parts[6] in set([".", "PASS"]):
parts[6] = fname
else:
parts[6] += ";%s" % fname
line = "\t".join(parts)
return line
def _run_vardict_paired(align_bams, items, ref_file, assoc_files,
region=None, out_file=None):
"""Detect variants with Vardict.
This is used for paired tumor / normal samples.
"""
config = items[0]["config"]
if out_file is None:
out_file = "%s-paired-variants.vcf.gz" % os.path.splitext(align_bams[0])[0]
if not utils.file_exists(out_file):
with file_transaction(items[0], out_file) as tx_out_file:
target = shared.subset_variant_regions(dd.get_variant_regions(items[0]), region,
out_file, do_merge=True)
paired = vcfutils.get_paired_bams(align_bams, items)
if not _is_bed_file(target):
vcfutils.write_empty_vcf(tx_out_file, config,
samples=[x for x in [paired.tumor_name, paired.normal_name] if x])
else:
if not paired.normal_bam:
ann_file = _run_vardict_caller(align_bams, items, ref_file,
assoc_files, region, out_file)
return ann_file
vcffilter = config_utils.get_program("vcffilter", config)
vardict = get_vardict_command(items[0])
vcfstreamsort = config_utils.get_program("vcfstreamsort", config)
strandbias = "testsomatic.R"
var2vcf = "var2vcf_paired.pl"
compress_cmd = "| bgzip -c" if out_file.endswith("gz") else ""
freq = float(utils.get_in(config, ("algorithm", "min_allele_fraction"), 10)) / 100.0
# merge bed file regions as amplicon VarDict is only supported in single sample mode
opts = " ".join(_vardict_options_from_config(items, config, out_file, target))
coverage_interval = utils.get_in(config, ("algorithm", "coverage_interval"), "exome")
# for deep targeted panels, require 50 worth of coverage
var2vcf_opts = " -v 50 " if highdepth.get_median_coverage(items[0]) > 5000 else ""
fix_ambig = vcfutils.fix_ambiguous_cl()
remove_dup = vcfutils.remove_dup_cl()
if any("vardict_somatic_filter" in tz.get_in(("config", "algorithm", "tools_off"), data, [])
for data in items):
somatic_filter = ""
freq_filter = ""
else:
var2vcf_opts += " -M " # this makes VarDict soft filter non-differential variants
somatic_filter = ("| sed 's/\\\\.*Somatic\\\\/Somatic/' "
"| sed 's/REJECT,Description=\".*\">/REJECT,Description=\"Not Somatic via VarDict\">/' "
"| %s -x 'bcbio.variation.freebayes.call_somatic(x)'" %
os.path.join(os.path.dirname(sys.executable), "py"))
freq_filter = ("| bcftools filter -m '+' -s 'REJECT' -e 'STATUS !~ \".*Somatic\"' 2> /dev/null "
"| %s -x 'bcbio.variation.vardict.depth_freq_filter(x, %s, \"%s\")'" %
(os.path.join(os.path.dirname(sys.executable), "py"),
0, dd.get_aligner(paired.tumor_data)))
jvm_opts = _get_jvm_opts(items[0], tx_out_file)
r_setup = "unset R_HOME && export PATH=%s:$PATH && " % os.path.dirname(utils.Rscript_cmd())
cmd = ("{r_setup}{jvm_opts}{vardict} -G {ref_file} -f {freq} "
"-N {paired.tumor_name} -b \"{paired.tumor_bam}|{paired.normal_bam}\" {opts} "
"| {strandbias} "
"| {var2vcf} -P 0.9 -m 4.25 -f {freq} {var2vcf_opts} "
"-N \"{paired.tumor_name}|{paired.normal_name}\" "
"{freq_filter} "
"{somatic_filter} | {fix_ambig} | {remove_dup} | {vcfstreamsort} "
"{compress_cmd} > {tx_out_file}")
do.run(cmd.format(**locals()), "Genotyping with VarDict: Inference", {})
out_file = (annotation.add_dbsnp(out_file, assoc_files["dbsnp"], config)
if assoc_files.get("dbsnp") else out_file)
return out_file
def get_vardict_command(data):
"""
convert variantcaller specification to proper vardict command, handling
string or list specification
"""
vcaller = dd.get_variantcaller(data)
if isinstance(vcaller, list):
vardict = [x for x in vcaller if "vardict" in x]
if not vardict:
return None
vardict = vardict[0]
elif not vcaller:
return None
else:
vardict = vcaller
vardict = "vardict-java" if not vardict.endswith("-perl") else "vardict"
return vardict<|fim▁end|> | do.run(cmd.format(**locals()), "Genotyping with VarDict: Inference", {}) |
<|file_name|>main.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Modulos
import sys
import pygame
from pygame.locals import *
# Constantes
venx = 640
veny = 448
# Clases
class Pieza(pygame.sprite.Sprite): # 64x64 px tamaño
def __init__(self, tipo):
pygame.sprite.Sprite.__init__(self)
if tipo == 0:
self.image = load_image("tablero.png", True)
elif tipo == 1:
self.image = load_image("laser.png", True)
elif tipo == 2:
self.image = load_image("diana.png", True)
elif tipo == 3:
self.image = load_image("diana_espejo.png", True)
elif tipo == 4:
self.image = load_image("espejo.png", True)
elif tipo == 5:
self.image = load_image("espejotraves.png", True)
elif tipo == 6:
self.image = load_image("tunel.png", True)
elif tipo == 7:
self.image = load_image("bloqueo.png", True)<|fim▁hole|> else:
tipo = 0
self.image = load_image("tablero.png", True)
# Funciones
def load_image(filename, transparent=False):
try:
image = pygame.image.load(filename)
except pygame.error:
raise SystemExit
image = image.convert()
if transparent:
color = image.get_at((0, 0))
image.set_colorkey(color, RLEACCEL)
return image
#------------------------------------------
def main():
screen = pygame.display.set_mode((venx, veny))
pygame.display.set_caption("Laser Game")
background_image = load_image('fondo.png')
bola = Bola()
while True:
for eventos in pygame.event.get():
if eventos.type == QUIT:
sys.exit(0)
screen.blit(background_image, (0, 0))
screen.blit(bola.image, bola.rect)
pygame.display.flip()
return 0
if __name__ == '__main__':
pygame.init()
main()<|fim▁end|> | elif tipo == 8:
self.image = load_image("bloqueo_g.png", True)
elif tipo == 9:
self.image = load_image("portal.png", True) |
<|file_name|>PigasusPlayerListener.java<|end_file_name|><|fim▁begin|>package redsgreens.Pigasus;
import org.bukkit.entity.Entity;
import org.bukkit.entity.Player;
import org.bukkit.event.EventHandler;
import org.bukkit.event.EventPriority;
import org.bukkit.event.Listener;
import org.bukkit.event.player.PlayerInteractEntityEvent;
/**
* Handle events for all Player related events
* @author redsgreens
*/
<|fim▁hole|> plugin = instance;
}
@EventHandler(priority = EventPriority.MONITOR)
public void onPlayerInteractEntity(PlayerInteractEntityEvent event)
// catch player+entity events, looking for wand usage on an entity
{
Entity entity = event.getRightClicked();
// return if something not allowed was clicked
if(plugin.Config.getHoveringChance(entity) == -1) return;
Player player = event.getPlayer();
// return if the click was with something other than the wand
if(player.getItemInHand().getType() != plugin.Config.WandItem) return;
// check for permission
if(!plugin.isAuthorized(player, "wand") && !plugin.isAuthorized(player, "wand." + PigasusFlyingEntity.getType(entity).name().toLowerCase()))
{
if(plugin.Config.ShowErrorsInClient)
player.sendMessage("§cErr: " + plugin.Name + ": you don't have permission.");
return;
}
// checks passed, make this pig fly!
plugin.Manager.addEntity(entity);
}
}<|fim▁end|> | public class PigasusPlayerListener implements Listener {
private final Pigasus plugin;
public PigasusPlayerListener(Pigasus instance) {
|
<|file_name|>iXmlFactory.d.ts<|end_file_name|><|fim▁begin|>// Type definitions for @ag-grid-community/core v25.0.1
// Project: http://www.ag-grid.com/
// Definitions by: Niall Crosby <https://github.com/ag-grid/>
export interface XmlElement {
name: string;
properties?: XmlAttributes;<|fim▁hole|>export interface HeaderElement {
[key: string]: string | undefined;
version?: string;
standalone?: string;
encoding?: string;
}
export interface XmlAttributes {
prefixedAttributes?: PrefixedXmlAttributes[];
rawMap?: any;
}
export interface PrefixedXmlAttributes {
prefix: string;
map: any;
}<|fim▁end|> | children?: XmlElement[];
textNode?: string | null;
} |
<|file_name|>base64.js<|end_file_name|><|fim▁begin|>/*
* author: Lisa
* Info: Base64 / UTF8
* 编码 & 解码
*/
function base64Encode(input) {
var keyStr = "ABCDEFGHIJKLMNOP" + "QRSTUVWXYZabcdef" + "ghijklmnopqrstuv" + "wxyz0123456789+/" + "=";
var output = "";
var chr1, chr2, chr3 = "";
var enc1, enc2, enc3, enc4 = "";
var i = 0;
do {
chr1 = input[i++];
chr2 = input[i++];
chr3 = input[i++];
enc1 = chr1 >> 2;
enc2 = ((chr1 & 3) << 4) | (chr2 >> 4);
enc3 = ((chr2 & 15) << 2) | (chr3 >> 6);
enc4 = chr3 & 63;
if (isNaN(chr2)) {
enc3 = enc4 = 64;
} else if (isNaN(chr3)) {
enc4 = 64;
}
output = output +
keyStr.charAt(enc1) +
keyStr.charAt(enc2) +
keyStr.charAt(enc3) +
keyStr.charAt(enc4);
chr1 = chr2 = chr3 = "";
enc1 = enc2 = enc3 = enc4 = "";
} while (i < input.length);
return output;
}
function base64Decode(input) {
var output = "";
var chr1, chr2, chr3 = "";
var enc1, enc2, enc3, enc4 = "";
var i = 0;
var base64test = /[^A-Za-z0-9/+///=]/g;
if (base64test.exec(input)) {
alert("There were invalid base64 characters in the input text./n" +
"Valid base64 characters are A-Z, a-z, 0-9, '+', '/', and '='/n" +
"Expect errors in decoding.");
}
input = input.replace(/[^A-Za-z0-9/+///=]/g, "");
output=new Array();
do {
enc1 = keyStr.indexOf(input.charAt(i++));
enc2 = keyStr.indexOf(input.charAt(i++));
enc3 = keyStr.indexOf(input.charAt(i++));
enc4 = keyStr.indexOf(input.charAt(i++));
chr1 = (enc1 << 2) | (enc2 >> 4);
chr2 = ((enc2 & 15) << 4) | (enc3 >> 2);
chr3 = ((enc3 & 3) << 6) | enc4;
output.push(chr1);
if (enc3 != 64) {
output.push(chr2);
}
if (enc4 != 64) {
output.push(chr3);
}
chr1 = chr2 = chr3 = "";
enc1 = enc2 = enc3 = enc4 = "";
} while (i < input.length);
return output;
}
function UTF8Encode(str){
var temp = "",rs = "";
for( var i=0 , len = str.length; i < len; i++ ){
temp = str.charCodeAt(i).toString(16);
rs += "\\u"+ new Array(5-temp.length).join("0") + temp;
}
return rs;
}
function UTF8Decode(str){
return str.replace(/(\\u)(\w{4}|\w{2})/gi, function($0,$1,$2){
return String.fromCharCode(parseInt($2,16));
});<|fim▁hole|>exports.UTF8Encode = UTF8Encode;
exports.UTF8Decode = UTF8Decode;<|fim▁end|> | }
exports.base64Encode = base64Encode;
exports.base64Decode = base64Decode; |
<|file_name|>cmp_utils.rs<|end_file_name|><|fim▁begin|>use regex::Regex;
use std::cmp::Ordering;
use crate::types::*;
lazy_static! {
static ref MODIFICATION_RE: Regex = Regex::new(r"^(?P<aa>[A-Z])(?P<pos>\d+)$").unwrap();<|fim▁hole|> if let Some(ref res1) = *residue1 {
if let Some(ref res2) = *residue2 {
if let (Some(res1_captures), Some(res2_captures)) =
(MODIFICATION_RE.captures(res1), MODIFICATION_RE.captures(res2))
{
let res1_aa = res1_captures.name("aa").unwrap().as_str();
let res2_aa = res2_captures.name("aa").unwrap().as_str();
let aa_order = res1_aa.cmp(res2_aa);
if aa_order == Ordering::Equal {
let res1_pos =
res1_captures.name("pos").unwrap().as_str().parse::<i32>().unwrap();
let res2_pos =
res2_captures.name("pos").unwrap().as_str().parse::<i32>().unwrap();
res1_pos.cmp(&res2_pos)
} else {
aa_order
}
} else {
res1.cmp(res2)
}
} else {
Ordering::Less
}
} else {
if residue2.is_some() {
Ordering::Greater
} else {
Ordering::Equal
}
}
}<|fim▁end|> | }
pub fn cmp_residues(residue1: &Option<Residue>, residue2: &Option<Residue>) -> Ordering { |
<|file_name|>docs.ts<|end_file_name|><|fim▁begin|>/**
* Enumeration of documentation topics<|fim▁hole|> * @internal
*/
export enum DocsId {
Transformations,
FieldConfig,
FieldConfigOverrides,
}<|fim▁end|> | |
<|file_name|>0002_segment_additional_miles.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-<|fim▁hole|>
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('attracker_app', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='segment',
name='additional_miles',
field=models.FloatField(default=0, verbose_name='Non-AT miles hiked with the segment'),
),
]<|fim▁end|> | from __future__ import unicode_literals |
<|file_name|>main.js<|end_file_name|><|fim▁begin|>/*jshint unused: vars */
require.config({
paths: {
angular: '../../bower_components/angular/angular',
'angular-animate': '../../bower_components/angular-animate/angular-animate',
'angular-aria': '../../bower_components/angular-aria/angular-aria',
'angular-cookies': '../../bower_components/angular-cookies/angular-cookies',
'angular-messages': '../../bower_components/angular-messages/angular-messages',
'angular-mocks': '../../bower_components/angular-mocks/angular-mocks',
'angular-resource': '../../bower_components/angular-resource/angular-resource',
'angular-route': '../../bower_components/angular-route/angular-route',
'angular-sanitize': '../../bower_components/angular-sanitize/angular-sanitize',
'angular-touch': '../../bower_components/angular-touch/angular-touch',
bootstrap: '../../bower_components/bootstrap/dist/js/bootstrap',
'angular-bootstrap': '../../bower_components/angular-bootstrap/ui-bootstrap-tpls',
'angular-ui-grid': '../../bower_components/angular-ui-grid/ui-grid'
},
shim: {
angular: {
exports: 'angular'
},
'angular-route': [
'angular'
],
'angular-cookies': [
'angular'
],
'angular-sanitize': [
'angular'
],
'angular-resource': [
'angular'
],
'angular-animate': [
'angular'
],
'angular-touch': [
'angular'
],
'angular-bootstrap': [
'angular'
],
'angular-mocks': {
deps: [
'angular'
],
exports: 'angular.mock'
}
},
priority: [
'angular'
],
packages: [
]
});
//http://code.angularjs.org/1.2.1/docs/guide/bootstrap#overview_deferred-bootstrap
window.name = 'NG_DEFER_BOOTSTRAP!';
require([
'angular',
'angular-route',
'angular-cookies',
'angular-sanitize',
'angular-resource',
'angular-animate',
'angular-touch',
'angular-bootstrap',
'app'
], function (angular, ngRoutes, ngCookies, ngSanitize, ngResource, ngAnimate, ngTouch, ngBootstrap, app) {
'use strict';
/* jshint ignore:start */<|fim▁hole|> /* jshint ignore:end */
angular.element().ready(function () {
angular.resumeBootstrap([app.name]);
});
});<|fim▁end|> | var $html = angular.element(document.getElementsByTagName('html')[0]); |
<|file_name|>test_io_csv.py<|end_file_name|><|fim▁begin|>import pytest
import os
from polyglotdb import CorpusContext
acoustic = pytest.mark.skipif(
pytest.config.getoption("--skipacoustics"),
reason="remove --skipacoustics option to run"
)
def test_to_csv(acoustic_utt_config, export_test_dir):
export_path = os.path.join(export_test_dir, 'results_export.csv')
with CorpusContext(acoustic_utt_config) as g:
q = g.query_graph(g.phone).filter(g.phone.label == 'aa')
q = q.columns(g.phone.label.column_name('label'),
g.phone.duration.column_name('duration'),
g.phone.begin.column_name('begin'))
q = q.order_by(g.phone.begin.column_name('begin'))
q.to_csv(export_path)
# ignore ids
expected = [['label', 'duration', 'begin'],
['aa', 0.0783100000000001, 2.70424],
['aa', 0.12199999999999989, 9.32077],
['aa', 0.03981000000000279, 24.56029]]
with open(export_path, 'r') as f:
i = 0
for line in f.readlines():
line = line.strip()
if line == '':
continue
line = line.split(',')
print(line)
if i != 0:
line = [line[0], float(line[1]), float(line[2])]
assert line[0] == expected[i][0]
assert line[1:] == pytest.approx(expected[i][1:], 1e-3)
else:
assert line == expected[i]
i += 1
with CorpusContext(acoustic_utt_config) as g:
q = g.query_graph(g.phone).filter(g.phone.label == 'aa')
q = q.columns(g.phone.label,
g.phone.duration,
g.phone.begin)
q = q.order_by(g.phone.begin)
q.to_csv(export_path)
# ignore ids
expected = [['node_phone_label', 'node_phone_duration', 'node_phone_begin'],
['aa', 0.0783100000000001, 2.70424],
['aa', 0.12199999999999989, 9.32077],
['aa', 0.03981000000000279, 24.56029]]
with open(export_path, 'r') as f:
i = 0
for line in f.readlines():
line = line.strip()
print(line)
if line == '':
continue
line = line.split(',')
print(line)<|fim▁hole|> assert line[0] == expected[i][0]
assert line[1:] == pytest.approx(expected[i][1:], 1e-3)
else:
assert line == expected[i]
i += 1
@acoustic
def test_csv_vot(acoustic_utt_config, vot_classifier_path, export_test_dir):
export_path = os.path.join(export_test_dir, 'results_export_vot.csv')
with CorpusContext(acoustic_utt_config) as g:
g.reset_acoustics()
g.reset_vot()
stops = ['p', 't', 'k'] # , 'b', 'd', 'g']
g.encode_class(stops, 'stops')
g.analyze_vot(stop_label="stops",
classifier=vot_classifier_path,
vot_min=15,
vot_max=250,
window_min=-30,
window_max=30)
q = g.query_graph(g.phone).filter(g.phone.label.in_(stops)).columns(g.phone.vot.begin,
g.phone.vot.end).order_by(g.phone.begin)
q.to_csv(export_path)
p_true = [(1.593, 1.649), (1.832, 1.848), (1.909, 1.98), (2.116, 2.137), (2.687, 2.703),
(2.829, 2.8440000000000003), (2.934, 2.9490000000000003), (3.351, 3.403), (5.574, 5.593999999999999),
(6.207, 6.2219999999999995), (6.736, 6.755999999999999), (7.02, 7.0489999999999995), (9.255, 9.287),
(9.498, 9.514999999999999), (11.424, 11.479999999999999), (13.144, 13.206), (13.498, 13.523),
(25.125, 25.14)]
p_csv = []
with open(export_path, 'r') as f:
f.readline()
for line in f:
line = line.strip()
if line == '':
continue
line = line.split(',')
p_csv.append((float(line[0]), float(line[1])))
for t, r in zip(p_true, p_csv):
assert r == t<|fim▁end|> | if i != 0:
line = [line[0], float(line[1]), float(line[2])] |
<|file_name|>column.rs<|end_file_name|><|fim▁begin|>// Copyright 2018 TiKV Project Authors. Licensed under Apache-2.0.
use super::*;
use tidb_query_datatype::codec::{datum, Datum};
use tidb_query_datatype::expr::EvalContext;
use tipb::{ColumnInfo, FieldType};
pub const TYPE_VAR_CHAR: i32 = 1;
pub const TYPE_LONG: i32 = 2;
#[derive(Clone)]
pub struct Column {
pub id: i64,
pub(crate) col_type: i32,
// negative means not a index key, 0 means primary key, positive means normal index key.
pub index: i64,
pub(crate) default_val: Option<Datum>,
}
impl Column {
pub fn as_column_info(&self) -> ColumnInfo {
let mut c_info = ColumnInfo::default();<|fim▁hole|> if let Some(ref dv) = self.default_val {
c_info.set_default_val(
datum::encode_value(&mut EvalContext::default(), &[dv.clone()]).unwrap(),
)
}
c_info
}
pub fn as_field_type(&self) -> FieldType {
let mut ft = FieldType::default();
ft.set_tp(self.col_field_type());
ft
}
pub fn col_field_type(&self) -> i32 {
match self.col_type {
TYPE_LONG => 8, // FieldTypeTp::LongLong
TYPE_VAR_CHAR => 15, // FieldTypeTp::VarChar
_ => unreachable!("col_type: {}", self.col_type),
}
}
}
pub struct ColumnBuilder {
col_type: i32,
index: i64,
default_val: Option<Datum>,
}
impl ColumnBuilder {
pub fn new() -> ColumnBuilder {
ColumnBuilder {
col_type: TYPE_LONG,
index: -1,
default_val: None,
}
}
pub fn col_type(mut self, t: i32) -> ColumnBuilder {
self.col_type = t;
self
}
pub fn primary_key(mut self, b: bool) -> ColumnBuilder {
if b {
self.index = 0;
} else {
self.index = -1;
}
self
}
pub fn index_key(mut self, idx_id: i64) -> ColumnBuilder {
self.index = idx_id;
self
}
pub fn default(mut self, val: Datum) -> ColumnBuilder {
self.default_val = Some(val);
self
}
pub fn build(self) -> Column {
Column {
id: next_id(),
col_type: self.col_type,
index: self.index,
default_val: self.default_val,
}
}
}
impl Default for ColumnBuilder {
fn default() -> Self {
Self::new()
}
}<|fim▁end|> | c_info.set_column_id(self.id);
c_info.set_tp(self.col_field_type());
c_info.set_pk_handle(self.index == 0); |
<|file_name|>types.rs<|end_file_name|><|fim▁begin|>extern crate indy_crypto;
extern crate rmp_serde;
extern crate serde;
extern crate serde_json;
extern crate time;
use std::cmp::Eq;
use std::collections::HashMap;
use std::hash::{Hash, Hasher};
use errors::prelude::*;
use utils::crypto::verkey_builder::build_full_verkey;
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
pub struct NodeData {
pub alias: String,
pub client_ip: Option<String>,
#[serde(deserialize_with = "string_or_number")]
#[serde(default)]
pub client_port: Option<u64>,
pub node_ip: Option<String>,
#[serde(deserialize_with = "string_or_number")]
#[serde(default)]
pub node_port: Option<u64>,
pub services: Option<Vec<String>>,
pub blskey: Option<String>,
pub blskey_pop: Option<String>,
}
fn string_or_number<'de, D>(deserializer: D) -> Result<Option<u64>, D::Error>
where D: serde::Deserializer<'de>
{
let deser_res: Result<serde_json::Value, _> = serde::Deserialize::deserialize(deserializer);
match deser_res {
Ok(serde_json::Value::String(s)) => match s.parse::<u64>() {
Ok(num) => Ok(Some(num)),
Err(err) => Err(serde::de::Error::custom(format!("Invalid Node transaction: {:?}", err)))
},
Ok(serde_json::Value::Number(n)) => match n.as_u64() {
Some(num) => Ok(Some(num)),
None => Err(serde::de::Error::custom("Invalid Node transaction".to_string()))
},
Ok(serde_json::Value::Null) => Ok(None),
_ => Err(serde::de::Error::custom("Invalid Node transaction".to_string())),
}
}
#[derive(Serialize, Deserialize)]
#[serde(untagged)]
pub enum NodeTransaction {
NodeTransactionV0(NodeTransactionV0),
NodeTransactionV1(NodeTransactionV1),
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
pub struct NodeTransactionV0 {
pub data: NodeData,
pub dest: String,
pub identifier: String,
#[serde(rename = "txnId")]
pub txn_id: Option<String>,
pub verkey: Option<String>,
#[serde(rename = "type")]
pub txn_type: String,
}
impl NodeTransactionV0 {
pub const VERSION: &'static str = "1.3";
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
#[serde(rename_all = "camelCase")]
pub struct NodeTransactionV1 {
pub txn: Txn,
pub txn_metadata: Metadata,
pub req_signature: ReqSignature,
pub ver: String,
}
impl NodeTransactionV1 {
pub const VERSION: &'static str = "1.4";
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
pub struct Txn {
#[serde(rename = "type")]
pub txn_type: String,
#[serde(rename = "protocolVersion")]
pub protocol_version: Option<i32>,
pub data: TxnData,
pub metadata: TxnMetadata,
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
#[serde(rename_all = "camelCase")]
pub struct Metadata {
pub creation_time: Option<u64>,
pub seq_no: Option<i32>,
pub txn_id: Option<String>,
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
#[serde(rename_all = "camelCase")]
pub struct ReqSignature {
#[serde(rename = "type")]
pub type_: Option<String>,
pub values: Option<Vec<ReqSignatureValue>>,
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
pub struct ReqSignatureValue {
pub from: Option<String>,
pub value: Option<String>,
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
pub struct TxnData {
pub data: NodeData,
pub dest: String,
pub verkey: Option<String>,
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
#[serde(rename_all = "camelCase")]
pub struct TxnMetadata {
pub req_id: Option<i64>,
pub from: String,
}
impl From<NodeTransactionV0> for NodeTransactionV1 {
fn from(node_txn: NodeTransactionV0) -> Self {
{
let txn = Txn {
txn_type: node_txn.txn_type,
protocol_version: None,
data: TxnData {
data: node_txn.data,
dest: node_txn.dest,
verkey: node_txn.verkey,
},
metadata: TxnMetadata {
req_id: None,
from: node_txn.identifier,
},
};
NodeTransactionV1 {
txn,
txn_metadata: Metadata {
seq_no: None,
txn_id: node_txn.txn_id,
creation_time: None,
},
req_signature: ReqSignature {
type_: None,
values: None,
},
ver: "1".to_string(),
}
}
}
}
impl NodeTransactionV1 {
pub fn update(&mut self, other: &mut NodeTransactionV1) -> IndyResult<()> {
assert_eq!(self.txn.data.dest, other.txn.data.dest);
assert_eq!(self.txn.data.data.alias, other.txn.data.data.alias);
if let Some(ref mut client_ip) = other.txn.data.data.client_ip {
self.txn.data.data.client_ip = Some(client_ip.to_owned());
}
if let Some(ref mut client_port) = other.txn.data.data.client_port {
self.txn.data.data.client_port = Some(client_port.to_owned());
}
if let Some(ref mut node_ip) = other.txn.data.data.node_ip {
self.txn.data.data.node_ip = Some(node_ip.to_owned());
}
if let Some(ref mut node_port) = other.txn.data.data.node_port {
self.txn.data.data.node_port = Some(node_port.to_owned());
}
if let Some(ref mut blskey) = other.txn.data.data.blskey {
self.txn.data.data.blskey = Some(blskey.to_owned());
}
if let Some(ref mut blskey_pop) = other.txn.data.data.blskey_pop {
self.txn.data.data.blskey_pop = Some(blskey_pop.to_owned());
}
if let Some(ref mut services) = other.txn.data.data.services {
self.txn.data.data.services = Some(services.to_owned());
}
if other.txn.data.verkey.is_some() {
self.txn.data.verkey = Some(build_full_verkey(&self.txn.data.dest, other.txn.data.verkey.as_ref().map(String::as_str))?);
}
Ok(())
}
}
#[allow(non_snake_case)]
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct LedgerStatus {
pub txnSeqNo: usize,
pub merkleRoot: String,
pub ledgerId: u8,
pub ppSeqNo: Option<u32>,
pub viewNo: Option<u32>,
#[serde(skip_serializing_if = "Option::is_none")]
pub protocolVersion: Option<usize>,
}
#[allow(non_snake_case)]
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct ConsistencyProof {
//TODO almost all fields Option<> or find better approach
pub seqNoEnd: usize,
pub seqNoStart: usize,<|fim▁hole|> pub hashes: Vec<String>,
pub oldMerkleRoot: String,
pub newMerkleRoot: String,
}
#[allow(non_snake_case)]
#[derive(Serialize, Deserialize, Debug, Clone, Eq, PartialEq)]
pub struct CatchupReq {
pub ledgerId: usize,
pub seqNoStart: usize,
pub seqNoEnd: usize,
pub catchupTill: usize,
}
#[allow(non_snake_case)]
#[derive(Serialize, Deserialize, Debug, PartialEq, Clone)]
pub struct CatchupRep {
pub ledgerId: usize,
pub consProof: Vec<String>,
pub txns: HashMap<String, serde_json::Value>,
}
impl CatchupRep {
pub fn min_tx(&self) -> IndyResult<usize> {
let mut min = None;
for (k, _) in self.txns.iter() {
let val = k.parse::<usize>()
.to_indy(IndyErrorKind::InvalidStructure, "Invalid key in catchup reply")?;
match min {
None => min = Some(val),
Some(m) => if val < m { min = Some(val) }
}
}
min.ok_or(err_msg(IndyErrorKind::InvalidStructure, "Empty map"))
}
}
#[derive(Serialize, Debug, Deserialize, Clone)]
#[serde(untagged)]
pub enum Reply {
ReplyV0(ReplyV0),
ReplyV1(ReplyV1),
}
impl Reply {
pub fn req_id(&self) -> u64 {
match self {
&Reply::ReplyV0(ref reply) => reply.result.req_id,
&Reply::ReplyV1(ref reply) => reply.result.txn.metadata.req_id
}
}
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct ReplyV0 {
pub result: ResponseMetadata
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct ReplyV1 {
pub result: ReplyResultV1
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct ReplyResultV1 {
pub txn: ReplyTxnV1
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct ReplyTxnV1 {
pub metadata: ResponseMetadata
}
#[derive(Serialize, Debug, Deserialize, Clone)]
#[serde(untagged)]
pub enum Response {
ResponseV0(ResponseV0),
ResponseV1(ResponseV1),
}
impl Response {
pub fn req_id(&self) -> u64 {
match self {
&Response::ResponseV0(ref res) => res.req_id,
&Response::ResponseV1(ref res) => res.metadata.req_id
}
}
}
#[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(rename_all = "camelCase")]
pub struct ResponseV0 {
pub req_id: u64
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct ResponseV1 {
pub metadata: ResponseMetadata
}
#[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(rename_all = "camelCase")]
pub struct ResponseMetadata {
pub req_id: u64
}
#[derive(Serialize, Debug, Deserialize)]
#[serde(untagged)]
pub enum PoolLedgerTxn {
PoolLedgerTxnV0(PoolLedgerTxnV0),
PoolLedgerTxnV1(PoolLedgerTxnV1),
}
#[derive(Serialize, Deserialize, Debug)]
pub struct PoolLedgerTxnV0 {
pub txn: Response,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct PoolLedgerTxnV1 {
pub txn: PoolLedgerTxnDataV1,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct PoolLedgerTxnDataV1 {
pub txn: Response,
}
#[derive(Serialize, Deserialize, Debug)]
#[serde(rename_all = "camelCase")]
pub struct SimpleRequest {
pub req_id: u64,
}
#[serde(tag = "op")]
#[derive(Serialize, Deserialize, Debug)]
pub enum Message {
#[serde(rename = "CONSISTENCY_PROOF")]
ConsistencyProof(ConsistencyProof),
#[serde(rename = "LEDGER_STATUS")]
LedgerStatus(LedgerStatus),
#[serde(rename = "CATCHUP_REQ")]
CatchupReq(CatchupReq),
#[serde(rename = "CATCHUP_REP")]
CatchupRep(CatchupRep),
#[serde(rename = "REQACK")]
ReqACK(Response),
#[serde(rename = "REQNACK")]
ReqNACK(Response),
#[serde(rename = "REPLY")]
Reply(Reply),
#[serde(rename = "REJECT")]
Reject(Response),
#[serde(rename = "POOL_LEDGER_TXNS")]
PoolLedgerTxns(PoolLedgerTxn),
Ping,
Pong,
}
impl Message {
pub fn from_raw_str(str: &str) -> IndyResult<Message> {
match str {
"po" => Ok(Message::Pong),
"pi" => Ok(Message::Ping),
_ => serde_json::from_str::<Message>(str)
.to_indy(IndyErrorKind::InvalidStructure, "Malformed message json"),
}
}
}
/**
Single item to verification:
- SP Trie with RootHash
- BLS MS
- set of key-value to verify
*/
#[derive(Serialize, Deserialize, Debug)]
pub struct ParsedSP {
/// encoded SP Trie transferred from Node to Client
pub proof_nodes: String,
/// RootHash of the Trie, start point for verification. Should be same with appropriate filed in BLS MS data
pub root_hash: String,
/// entities to verification against current SP Trie
pub kvs_to_verify: KeyValuesInSP,
/// BLS MS data for verification
pub multi_signature: serde_json::Value,
}
/**
Variants of representation for items to verify against SP Trie
Right now 2 options are specified:
- simple array of key-value pair
- whole subtrie
*/
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
#[serde(tag = "type")]
pub enum KeyValuesInSP {
Simple(KeyValueSimpleData),
SubTrie(KeyValuesSubTrieData),
}
/**
Simple variant of `KeyValuesInSP`.
All required data already present in parent SP Trie (built from `proof_nodes`).
`kvs` can be verified directly in parent trie
*/
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
pub struct KeyValueSimpleData {
pub kvs: Vec<(String /* b64-encoded key */, Option<String /* val */>)>
}
/**
Subtrie variant of `KeyValuesInSP`.
In this case Client (libindy) should construct subtrie and append it into trie based on `proof_nodes`.
After this preparation each kv pair can be checked.
*/
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
pub struct KeyValuesSubTrieData {
/// base64-encoded common prefix of each pair in `kvs`. Should be used to correct merging initial trie and subtrie
pub sub_trie_prefix: Option<String>,
pub kvs: Vec<(String /* b64-encoded key_suffix */, Option<String /* val */>)>,
}
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct RemoteNode {
pub name: String,
pub public_key: Vec<u8>,
pub zaddr: String,
pub is_blacklisted: bool,
}
pub trait MinValue {
fn get_min_index(&self) -> IndyResult<usize>;
}
impl MinValue for Vec<(CatchupRep, usize)> {
fn get_min_index(&self) -> IndyResult<usize> {
let mut res = None;
for (index, &(ref catchup_rep, _)) in self.iter().enumerate() {
match res {
None => { res = Some((catchup_rep, index)); }
Some((min_rep, _)) => if catchup_rep.min_tx()? < min_rep.min_tx()? {
res = Some((catchup_rep, index));
}
}
}
Ok(res.ok_or(err_msg(IndyErrorKind::InvalidStructure, "Element not Found"))?.1)
}
}
#[derive(Debug)]
pub struct HashableValue {
pub inner: serde_json::Value
}
impl Eq for HashableValue {}
impl Hash for HashableValue {
fn hash<H: Hasher>(&self, state: &mut H) {
serde_json::to_string(&self.inner).unwrap().hash(state); //TODO
}
}
impl PartialEq for HashableValue {
fn eq(&self, other: &HashableValue) -> bool {
self.inner.eq(&other.inner)
}
}
#[derive(Debug, PartialEq, Eq)]
pub struct ResendableRequest {
pub request: String,
pub start_node: usize,
pub next_node: usize,
pub next_try_send_time: Option<time::Tm>,
}
#[derive(Debug, PartialEq, Eq)]
pub struct CommandProcess {
pub nack_cnt: usize,
pub replies: HashMap<HashableValue, usize>,
pub accum_replies: Option<HashableValue>,
pub parent_cmd_ids: Vec<i32>,
pub resendable_request: Option<ResendableRequest>,
pub full_cmd_timeout: Option<time::Tm>,
}
#[derive(Debug, PartialEq, Eq)]
pub struct RequestToSend {
pub request: String,
pub id: i32,
}
#[derive(Debug, PartialEq, Eq)]
pub struct MessageToProcess {
pub message: String,
pub node_idx: usize,
}<|fim▁end|> | pub ledgerId: usize, |
<|file_name|>position.mako.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
<%namespace name="helpers" file="/helpers.mako.rs" />
<%helpers:shorthand name="flex-flow" sub_properties="flex-direction flex-wrap" extra_prefixes="webkit"
spec="https://drafts.csswg.org/css-flexbox/#flex-flow-property">
use properties::longhands::{flex_direction, flex_wrap};
pub fn parse_value(context: &ParserContext, input: &mut Parser) -> Result<Longhands, ()> {
let mut direction = None;
let mut wrap = None;
loop {
if direction.is_none() {
if let Ok(value) = input.try(|input| flex_direction::parse(context, input)) {
direction = Some(value);
continue
}
}
if wrap.is_none() {
if let Ok(value) = input.try(|input| flex_wrap::parse(context, input)) {
wrap = Some(value);
continue
}
}
break
}
if direction.is_none() && wrap.is_none() {
return Err(())
}
Ok(Longhands {
flex_direction: unwrap_or_initial!(flex_direction, direction),
flex_wrap: unwrap_or_initial!(flex_wrap, wrap),
})
}
impl<'a> ToCss for LonghandsToSerialize<'a> {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
self.flex_direction.to_css(dest)?;
dest.write_str(" ")?;
self.flex_wrap.to_css(dest)
}
}
</%helpers:shorthand>
<%helpers:shorthand name="flex" sub_properties="flex-grow flex-shrink flex-basis" extra_prefixes="webkit"
spec="https://drafts.csswg.org/css-flexbox/#flex-property">
use values::specified::Number;
fn parse_flexibility(context: &ParserContext, input: &mut Parser)
-> Result<(Number, Option<Number>),()> {
let grow = try!(Number::parse_non_negative(context, input));
let shrink = input.try(|i| Number::parse_non_negative(context, i)).ok();
Ok((grow, shrink))
}
pub fn parse_value(context: &ParserContext, input: &mut Parser) -> Result<Longhands, ()> {
let mut grow = None;
let mut shrink = None;
let mut basis = None;
if input.try(|input| input.expect_ident_matching("none")).is_ok() {
return Ok(Longhands {
flex_grow: Number::new(0.0),
flex_shrink: Number::new(0.0),
flex_basis: longhands::flex_basis::SpecifiedValue::auto(),
})
}
loop {
if grow.is_none() {
if let Ok((flex_grow, flex_shrink)) = input.try(|i| parse_flexibility(context, i)) {
grow = Some(flex_grow);
shrink = flex_shrink;
continue
}
}
if basis.is_none() {
if let Ok(value) = input.try(|input| longhands::flex_basis::parse(context, input)) {
basis = Some(value);
continue
}
}
break
}
if grow.is_none() && basis.is_none() {
return Err(())
}
Ok(Longhands {
flex_grow: grow.unwrap_or(Number::new(1.0)),
flex_shrink: shrink.unwrap_or(Number::new(1.0)),
flex_basis: basis.unwrap_or(longhands::flex_basis::SpecifiedValue::zero()),
})
}
impl<'a> ToCss for LonghandsToSerialize<'a> {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
try!(self.flex_grow.to_css(dest));
try!(dest.write_str(" "));
try!(self.flex_shrink.to_css(dest));
try!(dest.write_str(" "));
self.flex_basis.to_css(dest)
}
}
</%helpers:shorthand>
<%helpers:shorthand name="grid-gap" sub_properties="grid-row-gap grid-column-gap"
spec="https://drafts.csswg.org/css-grid/#propdef-grid-gap"
products="gecko">
use properties::longhands::{grid_row_gap, grid_column_gap};
pub fn parse_value(context: &ParserContext, input: &mut Parser) -> Result<Longhands, ()> {
let row_gap = grid_row_gap::parse(context, input)?;
let column_gap = input.try(|input| grid_column_gap::parse(context, input)).unwrap_or(row_gap.clone());
Ok(Longhands {
grid_row_gap: row_gap,
grid_column_gap: column_gap,
})
}
impl<'a> ToCss for LonghandsToSerialize<'a> {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
if self.grid_row_gap == self.grid_column_gap {
self.grid_row_gap.to_css(dest)
} else {
self.grid_row_gap.to_css(dest)?;
dest.write_str(" ")?;
self.grid_column_gap.to_css(dest)
}
}
}
</%helpers:shorthand>
% for kind in ["row", "column"]:
<%helpers:shorthand name="grid-${kind}" sub_properties="grid-${kind}-start grid-${kind}-end"
spec="https://drafts.csswg.org/css-grid/#propdef-grid-${kind}"
products="gecko">
use values::specified::GridLine;
use parser::Parse;
// NOTE: Since both the shorthands have the same code, we should (re-)use code from one to implement
// the other. This might not be a big deal for now, but we should consider looking into this in the future
// to limit the amount of code generated.
pub fn parse_value(context: &ParserContext, input: &mut Parser) -> Result<Longhands, ()> {
let start = input.try(|i| GridLine::parse(context, i))?;
let end = if input.try(|i| i.expect_delim('/')).is_ok() {
GridLine::parse(context, input)?
} else {
let mut line = GridLine::default();
if start.integer.is_none() && !start.is_span {
line.ident = start.ident.clone(); // ident from start value should be taken
}
line
};
Ok(Longhands {
grid_${kind}_start: start,
grid_${kind}_end: end,
})
}
impl<'a> ToCss for LonghandsToSerialize<'a> {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
self.grid_${kind}_start.to_css(dest)?;
dest.write_str(" / ")?;
self.grid_${kind}_end.to_css(dest)
}
}
</%helpers:shorthand>
% endfor
<%helpers:shorthand name="grid-area"
sub_properties="grid-row-start grid-row-end grid-column-start grid-column-end"
spec="https://drafts.csswg.org/css-grid/#propdef-grid-area"
products="gecko">
use values::specified::GridLine;
use parser::Parse;
// The code is the same as `grid-{row,column}` except that this can have four values at most.
pub fn parse_value(context: &ParserContext, input: &mut Parser) -> Result<Longhands, ()> {
fn line_with_ident_from(other: &GridLine) -> GridLine {
let mut this = GridLine::default();
if other.integer.is_none() && !other.is_span {
this.ident = other.ident.clone();
}
this
}
let row_start = input.try(|i| GridLine::parse(context, i))?;
let (column_start, row_end, column_end) = if input.try(|i| i.expect_delim('/')).is_ok() {
let column_start = GridLine::parse(context, input)?;
let (row_end, column_end) = if input.try(|i| i.expect_delim('/')).is_ok() {
let row_end = GridLine::parse(context, input)?;
let column_end = if input.try(|i| i.expect_delim('/')).is_ok() {
GridLine::parse(context, input)?
} else { // grid-column-end has not been given
line_with_ident_from(&column_start)
};
(row_end, column_end)
} else { // grid-row-start and grid-column-start has been given
let row_end = line_with_ident_from(&row_start);
let column_end = line_with_ident_from(&column_start);
(row_end, column_end)
};
(column_start, row_end, column_end)<|fim▁hole|>
Ok(Longhands {
grid_row_start: row_start,
grid_row_end: row_end,
grid_column_start: column_start,
grid_column_end: column_end,
})
}
impl<'a> ToCss for LonghandsToSerialize<'a> {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
self.grid_row_start.to_css(dest)?;
let values = [&self.grid_column_start, &self.grid_row_end, &self.grid_column_end];
for value in &values {
dest.write_str(" / ")?;
value.to_css(dest)?;
}
Ok(())
}
}
</%helpers:shorthand>
<%helpers:shorthand name="place-content" sub_properties="align-content justify-content"
spec="https://drafts.csswg.org/css-align/#propdef-place-content"
products="gecko" disable_when_testing="True">
use properties::longhands::align_content;
use properties::longhands::justify_content;
pub fn parse_value(context: &ParserContext, input: &mut Parser) -> Result<Longhands, ()> {
let align = align_content::parse(context, input)?;
if align.has_extra_flags() {
return Err(());
}
let justify = input.try(|input| justify_content::parse(context, input))
.unwrap_or(justify_content::SpecifiedValue::from(align));
if justify.has_extra_flags() {
return Err(());
}
Ok(Longhands {
align_content: align,
justify_content: justify,
})
}
impl<'a> ToCss for LonghandsToSerialize<'a> {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
if self.align_content == self.justify_content {
self.align_content.to_css(dest)
} else {
self.justify_content.to_css(dest)?;
dest.write_str(" ")?;
self.justify_content.to_css(dest)
}
}
}
</%helpers:shorthand>
<%helpers:shorthand name="place-self" sub_properties="align-self justify-self"
spec="https://drafts.csswg.org/css-align/#place-self-property"
products="gecko" disable_when_testing="True">
use values::specified::align::AlignJustifySelf;
use parser::Parse;
pub fn parse_value(context: &ParserContext, input: &mut Parser) -> Result<Longhands, ()> {
let align = AlignJustifySelf::parse(context, input)?;
if align.has_extra_flags() {
return Err(());
}
let justify = input.try(|input| AlignJustifySelf::parse(context, input)).unwrap_or(align.clone());
if justify.has_extra_flags() {
return Err(());
}
Ok(Longhands {
align_self: align,
justify_self: justify,
})
}
impl<'a> ToCss for LonghandsToSerialize<'a> {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
if self.align_self == self.justify_self {
self.align_self.to_css(dest)
} else {
self.align_self.to_css(dest)?;
dest.write_str(" ")?;
self.justify_self.to_css(dest)
}
}
}
</%helpers:shorthand>
<%helpers:shorthand name="place-items" sub_properties="align-items justify-items"
spec="https://drafts.csswg.org/css-align/#place-items-property"
products="gecko" disable_when_testing="True">
use values::specified::align::{AlignItems, JustifyItems};
use parser::Parse;
impl From<AlignItems> for JustifyItems {
fn from(align: AlignItems) -> JustifyItems {
JustifyItems(align.0)
}
}
pub fn parse_value(context: &ParserContext, input: &mut Parser) -> Result<Longhands, ()> {
let align = AlignItems::parse(context, input)?;
if align.has_extra_flags() {
return Err(());
}
let justify = input.try(|input| JustifyItems::parse(context, input))
.unwrap_or(JustifyItems::from(align));
if justify.has_extra_flags() {
return Err(());
}
Ok(Longhands {
align_items: align,
justify_items: justify,
})
}
impl<'a> ToCss for LonghandsToSerialize<'a> {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
if self.align_items.0 == self.justify_items.0 {
self.align_items.to_css(dest)
} else {
self.align_items.to_css(dest)?;
dest.write_str(" ")?;
self.justify_items.to_css(dest)
}
}
}
</%helpers:shorthand><|fim▁end|> | } else { // only grid-row-start is given
let line = line_with_ident_from(&row_start);
(line.clone(), line.clone(), line)
}; |
<|file_name|>full_factorial.py<|end_file_name|><|fim▁begin|>"""
Author: Dr. John T. Hwang <[email protected]>
This package is distributed under New BSD license.
Full-factorial sampling.
"""
import numpy as np
from smt.sampling_methods.sampling_method import SamplingMethod
class FullFactorial(SamplingMethod):
def _initialize(self):
self.options.declare(
"weights",
values=None,
types=(list, np.ndarray),
desc="relative sampling weights for each nx dimensions",
)
self.options.declare(
"clip",
default=False,
types=bool,
desc="round number of samples to the sampling number product of each nx dimensions (> asked nt)",
)
<|fim▁hole|> """
Compute the requested number of sampling points.
Arguments
---------
nt : int
Number of points requested.
Returns
-------
ndarray[nt, nx]
The sampling locations in the input space.
"""
xlimits = self.options["xlimits"]
nx = xlimits.shape[0]
if self.options["weights"] is None:
weights = np.ones(nx) / nx
else:
weights = np.atleast_1d(self.options["weights"])
weights /= np.sum(weights)
num_list = np.ones(nx, int)
while np.prod(num_list) < nt:
ind = np.argmax(weights - num_list / np.sum(num_list))
num_list[ind] += 1
lins_list = [np.linspace(0.0, 1.0, num_list[kx]) for kx in range(nx)]
x_list = np.meshgrid(*lins_list, indexing="ij")
if self.options["clip"]:
nt = np.prod(num_list)
x = np.zeros((nt, nx))
for kx in range(nx):
x[:, kx] = x_list[kx].reshape(np.prod(num_list))[:nt]
return x<|fim▁end|> | def _compute(self, nt): |
<|file_name|>mat4.js<|end_file_name|><|fim▁begin|>/* Copyright (c) 2015, Brandon Jones, Colin MacKenzie IV.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE. */
var glMatrix = require("./common.js");
/**
* @class 4x4 Matrix
* @name mat4
*/
var mat4 = {
scalar: {},
SIMD: {}
};
/**
* Creates a new identity mat4
*
* @returns {mat4} a new 4x4 matrix
*/
mat4.create = function() {
var out = new glMatrix.ARRAY_TYPE(16);
out[0] = 1;
out[1] = 0;
out[2] = 0;
out[3] = 0;
out[4] = 0;
out[5] = 1;
out[6] = 0;
out[7] = 0;
out[8] = 0;
out[9] = 0;
out[10] = 1;
out[11] = 0;
out[12] = 0;
out[13] = 0;
out[14] = 0;
out[15] = 1;
return out;
};
/**
* Creates a new mat4 initialized with values from an existing matrix
*
* @param {mat4} a matrix to clone
* @returns {mat4} a new 4x4 matrix
*/
mat4.clone = function(a) {
var out = new glMatrix.ARRAY_TYPE(16);
out[0] = a[0];
out[1] = a[1];
out[2] = a[2];
out[3] = a[3];
out[4] = a[4];
out[5] = a[5];
out[6] = a[6];
out[7] = a[7];
out[8] = a[8];
out[9] = a[9];
out[10] = a[10];
out[11] = a[11];
out[12] = a[12];
out[13] = a[13];
out[14] = a[14];
out[15] = a[15];
return out;
};
/**
* Copy the values from one mat4 to another
*
* @param {mat4} out the receiving matrix
* @param {mat4} a the source matrix
* @returns {mat4} out
*/
mat4.copy = function(out, a) {
out[0] = a[0];
out[1] = a[1];
out[2] = a[2];
out[3] = a[3];
out[4] = a[4];
out[5] = a[5];
out[6] = a[6];
out[7] = a[7];
out[8] = a[8];
out[9] = a[9];
out[10] = a[10];
out[11] = a[11];
out[12] = a[12];
out[13] = a[13];
out[14] = a[14];
out[15] = a[15];
return out;
};
/**
* Create a new mat4 with the given values
*
* @param {Number} m00 Component in column 0, row 0 position (index 0)
* @param {Number} m01 Component in column 0, row 1 position (index 1)
* @param {Number} m02 Component in column 0, row 2 position (index 2)
* @param {Number} m03 Component in column 0, row 3 position (index 3)
* @param {Number} m10 Component in column 1, row 0 position (index 4)
* @param {Number} m11 Component in column 1, row 1 position (index 5)
* @param {Number} m12 Component in column 1, row 2 position (index 6)
* @param {Number} m13 Component in column 1, row 3 position (index 7)
* @param {Number} m20 Component in column 2, row 0 position (index 8)
* @param {Number} m21 Component in column 2, row 1 position (index 9)
* @param {Number} m22 Component in column 2, row 2 position (index 10)
* @param {Number} m23 Component in column 2, row 3 position (index 11)
* @param {Number} m30 Component in column 3, row 0 position (index 12)
* @param {Number} m31 Component in column 3, row 1 position (index 13)
* @param {Number} m32 Component in column 3, row 2 position (index 14)
* @param {Number} m33 Component in column 3, row 3 position (index 15)
* @returns {mat4} A new mat4
*/
mat4.fromValues = function(m00, m01, m02, m03, m10, m11, m12, m13, m20, m21, m22, m23, m30, m31, m32, m33) {
var out = new glMatrix.ARRAY_TYPE(16);
out[0] = m00;
out[1] = m01;
out[2] = m02;
out[3] = m03;
out[4] = m10;
out[5] = m11;
out[6] = m12;
out[7] = m13;
out[8] = m20;
out[9] = m21;
out[10] = m22;
out[11] = m23;
out[12] = m30;
out[13] = m31;
out[14] = m32;
out[15] = m33;
return out;
};
/**
* Set the components of a mat4 to the given values
*
* @param {mat4} out the receiving matrix
* @param {Number} m00 Component in column 0, row 0 position (index 0)
* @param {Number} m01 Component in column 0, row 1 position (index 1)
* @param {Number} m02 Component in column 0, row 2 position (index 2)
* @param {Number} m03 Component in column 0, row 3 position (index 3)
* @param {Number} m10 Component in column 1, row 0 position (index 4)
* @param {Number} m11 Component in column 1, row 1 position (index 5)
* @param {Number} m12 Component in column 1, row 2 position (index 6)
* @param {Number} m13 Component in column 1, row 3 position (index 7)
* @param {Number} m20 Component in column 2, row 0 position (index 8)
* @param {Number} m21 Component in column 2, row 1 position (index 9)
* @param {Number} m22 Component in column 2, row 2 position (index 10)
* @param {Number} m23 Component in column 2, row 3 position (index 11)
* @param {Number} m30 Component in column 3, row 0 position (index 12)
* @param {Number} m31 Component in column 3, row 1 position (index 13)
* @param {Number} m32 Component in column 3, row 2 position (index 14)
* @param {Number} m33 Component in column 3, row 3 position (index 15)
* @returns {mat4} out
*/
mat4.set = function(out, m00, m01, m02, m03, m10, m11, m12, m13, m20, m21, m22, m23, m30, m31, m32, m33) {
out[0] = m00;
out[1] = m01;
out[2] = m02;
out[3] = m03;
out[4] = m10;
out[5] = m11;
out[6] = m12;
out[7] = m13;
out[8] = m20;
out[9] = m21;
out[10] = m22;
out[11] = m23;
out[12] = m30;
out[13] = m31;
out[14] = m32;
out[15] = m33;
return out;
};
/**
* Set a mat4 to the identity matrix
*
* @param {mat4} out the receiving matrix
* @returns {mat4} out
*/
mat4.identity = function(out) {
out[0] = 1;
out[1] = 0;
out[2] = 0;
out[3] = 0;
out[4] = 0;
out[5] = 1;
out[6] = 0;
out[7] = 0;
out[8] = 0;
out[9] = 0;
out[10] = 1;
out[11] = 0;
out[12] = 0;
out[13] = 0;
out[14] = 0;
out[15] = 1;
return out;
};
/**
* Transpose the values of a mat4 not using SIMD
*
* @param {mat4} out the receiving matrix
* @param {mat4} a the source matrix
* @returns {mat4} out
*/
mat4.scalar.transpose = function(out, a) {
// If we are transposing ourselves we can skip a few steps but have to cache some values
if (out === a) {
var a01 = a[1], a02 = a[2], a03 = a[3],
a12 = a[6], a13 = a[7],
a23 = a[11];
out[1] = a[4];
out[2] = a[8];
out[3] = a[12];
out[4] = a01;
out[6] = a[9];
out[7] = a[13];
out[8] = a02;
out[9] = a12;
out[11] = a[14];
out[12] = a03;
out[13] = a13;
out[14] = a23;
} else {
out[0] = a[0];
out[1] = a[4];
out[2] = a[8];
out[3] = a[12];
out[4] = a[1];
out[5] = a[5];
out[6] = a[9];
out[7] = a[13];
out[8] = a[2];
out[9] = a[6];
out[10] = a[10];
out[11] = a[14];
out[12] = a[3];
out[13] = a[7];
out[14] = a[11];
out[15] = a[15];
}
return out;
};
/**
* Transpose the values of a mat4 using SIMD
*
* @param {mat4} out the receiving matrix
* @param {mat4} a the source matrix
* @returns {mat4} out
*/
mat4.SIMD.transpose = function(out, a) {
var a0, a1, a2, a3,
tmp01, tmp23,
out0, out1, out2, out3;
a0 = SIMD.Float32x4.load(a, 0);
a1 = SIMD.Float32x4.load(a, 4);
a2 = SIMD.Float32x4.load(a, 8);
a3 = SIMD.Float32x4.load(a, 12);
tmp01 = SIMD.Float32x4.shuffle(a0, a1, 0, 1, 4, 5);
tmp23 = SIMD.Float32x4.shuffle(a2, a3, 0, 1, 4, 5);
out0 = SIMD.Float32x4.shuffle(tmp01, tmp23, 0, 2, 4, 6);
out1 = SIMD.Float32x4.shuffle(tmp01, tmp23, 1, 3, 5, 7);
SIMD.Float32x4.store(out, 0, out0);
SIMD.Float32x4.store(out, 4, out1);
tmp01 = SIMD.Float32x4.shuffle(a0, a1, 2, 3, 6, 7);
tmp23 = SIMD.Float32x4.shuffle(a2, a3, 2, 3, 6, 7);
out2 = SIMD.Float32x4.shuffle(tmp01, tmp23, 0, 2, 4, 6);
out3 = SIMD.Float32x4.shuffle(tmp01, tmp23, 1, 3, 5, 7);
SIMD.Float32x4.store(out, 8, out2);
SIMD.Float32x4.store(out, 12, out3);
return out;
};
/**
* Transpse a mat4 using SIMD if available and enabled
*
* @param {mat4} out the receiving matrix
* @param {mat4} a the source matrix
* @returns {mat4} out
*/
mat4.transpose = glMatrix.USE_SIMD ? mat4.SIMD.transpose : mat4.scalar.transpose;
/**
* Inverts a mat4 not using SIMD
*
* @param {mat4} out the receiving matrix
* @param {mat4} a the source matrix
* @returns {mat4} out
*/
mat4.scalar.invert = function(out, a) {
var a00 = a[0], a01 = a[1], a02 = a[2], a03 = a[3],
a10 = a[4], a11 = a[5], a12 = a[6], a13 = a[7],
a20 = a[8], a21 = a[9], a22 = a[10], a23 = a[11],
a30 = a[12], a31 = a[13], a32 = a[14], a33 = a[15],
b00 = a00 * a11 - a01 * a10,
b01 = a00 * a12 - a02 * a10,
b02 = a00 * a13 - a03 * a10,
b03 = a01 * a12 - a02 * a11,
b04 = a01 * a13 - a03 * a11,
b05 = a02 * a13 - a03 * a12,
b06 = a20 * a31 - a21 * a30,
b07 = a20 * a32 - a22 * a30,
b08 = a20 * a33 - a23 * a30,
b09 = a21 * a32 - a22 * a31,
b10 = a21 * a33 - a23 * a31,
b11 = a22 * a33 - a23 * a32,
// Calculate the determinant
det = b00 * b11 - b01 * b10 + b02 * b09 + b03 * b08 - b04 * b07 + b05 * b06;
if (!det) {
return null;
}
det = 1.0 / det;
out[0] = (a11 * b11 - a12 * b10 + a13 * b09) * det;
out[1] = (a02 * b10 - a01 * b11 - a03 * b09) * det;
out[2] = (a31 * b05 - a32 * b04 + a33 * b03) * det;
out[3] = (a22 * b04 - a21 * b05 - a23 * b03) * det;
out[4] = (a12 * b08 - a10 * b11 - a13 * b07) * det;
out[5] = (a00 * b11 - a02 * b08 + a03 * b07) * det;
out[6] = (a32 * b02 - a30 * b05 - a33 * b01) * det;
out[7] = (a20 * b05 - a22 * b02 + a23 * b01) * det;
out[8] = (a10 * b10 - a11 * b08 + a13 * b06) * det;
out[9] = (a01 * b08 - a00 * b10 - a03 * b06) * det;
out[10] = (a30 * b04 - a31 * b02 + a33 * b00) * det;
out[11] = (a21 * b02 - a20 * b04 - a23 * b00) * det;
out[12] = (a11 * b07 - a10 * b09 - a12 * b06) * det;
out[13] = (a00 * b09 - a01 * b07 + a02 * b06) * det;
out[14] = (a31 * b01 - a30 * b03 - a32 * b00) * det;
out[15] = (a20 * b03 - a21 * b01 + a22 * b00) * det;
return out;
};
/**
* Inverts a mat4 using SIMD
*
* @param {mat4} out the receiving matrix
* @param {mat4} a the source matrix
* @returns {mat4} out
*/
mat4.SIMD.invert = function(out, a) {
var row0, row1, row2, row3,
tmp1,
minor0, minor1, minor2, minor3,
det,
a0 = SIMD.Float32x4.load(a, 0),
a1 = SIMD.Float32x4.load(a, 4),
a2 = SIMD.Float32x4.load(a, 8),
a3 = SIMD.Float32x4.load(a, 12);
// Compute matrix adjugate
tmp1 = SIMD.Float32x4.shuffle(a0, a1, 0, 1, 4, 5);
row1 = SIMD.Float32x4.shuffle(a2, a3, 0, 1, 4, 5);
row0 = SIMD.Float32x4.shuffle(tmp1, row1, 0, 2, 4, 6);
row1 = SIMD.Float32x4.shuffle(row1, tmp1, 1, 3, 5, 7);
tmp1 = SIMD.Float32x4.shuffle(a0, a1, 2, 3, 6, 7);
row3 = SIMD.Float32x4.shuffle(a2, a3, 2, 3, 6, 7);
row2 = SIMD.Float32x4.shuffle(tmp1, row3, 0, 2, 4, 6);
row3 = SIMD.Float32x4.shuffle(row3, tmp1, 1, 3, 5, 7);
tmp1 = SIMD.Float32x4.mul(row2, row3);
tmp1 = SIMD.Float32x4.swizzle(tmp1, 1, 0, 3, 2);
minor0 = SIMD.Float32x4.mul(row1, tmp1);
minor1 = SIMD.Float32x4.mul(row0, tmp1);
tmp1 = SIMD.Float32x4.swizzle(tmp1, 2, 3, 0, 1);
minor0 = SIMD.Float32x4.sub(SIMD.Float32x4.mul(row1, tmp1), minor0);
minor1 = SIMD.Float32x4.sub(SIMD.Float32x4.mul(row0, tmp1), minor1);
minor1 = SIMD.Float32x4.swizzle(minor1, 2, 3, 0, 1);
tmp1 = SIMD.Float32x4.mul(row1, row2);
tmp1 = SIMD.Float32x4.swizzle(tmp1, 1, 0, 3, 2);
minor0 = SIMD.Float32x4.add(SIMD.Float32x4.mul(row3, tmp1), minor0);
minor3 = SIMD.Float32x4.mul(row0, tmp1);
tmp1 = SIMD.Float32x4.swizzle(tmp1, 2, 3, 0, 1);
minor0 = SIMD.Float32x4.sub(minor0, SIMD.Float32x4.mul(row3, tmp1));
minor3 = SIMD.Float32x4.sub(SIMD.Float32x4.mul(row0, tmp1), minor3);
minor3 = SIMD.Float32x4.swizzle(minor3, 2, 3, 0, 1);
tmp1 = SIMD.Float32x4.mul(SIMD.Float32x4.swizzle(row1, 2, 3, 0, 1), row3);
tmp1 = SIMD.Float32x4.swizzle(tmp1, 1, 0, 3, 2);
row2 = SIMD.Float32x4.swizzle(row2, 2, 3, 0, 1);
minor0 = SIMD.Float32x4.add(SIMD.Float32x4.mul(row2, tmp1), minor0);
minor2 = SIMD.Float32x4.mul(row0, tmp1);
tmp1 = SIMD.Float32x4.swizzle(tmp1, 2, 3, 0, 1);
minor0 = SIMD.Float32x4.sub(minor0, SIMD.Float32x4.mul(row2, tmp1));
minor2 = SIMD.Float32x4.sub(SIMD.Float32x4.mul(row0, tmp1), minor2);
minor2 = SIMD.Float32x4.swizzle(minor2, 2, 3, 0, 1);
tmp1 = SIMD.Float32x4.mul(row0, row1);
tmp1 = SIMD.Float32x4.swizzle(tmp1, 1, 0, 3, 2);
minor2 = SIMD.Float32x4.add(SIMD.Float32x4.mul(row3, tmp1), minor2);
minor3 = SIMD.Float32x4.sub(SIMD.Float32x4.mul(row2, tmp1), minor3);
tmp1 = SIMD.Float32x4.swizzle(tmp1, 2, 3, 0, 1);
minor2 = SIMD.Float32x4.sub(SIMD.Float32x4.mul(row3, tmp1), minor2);
minor3 = SIMD.Float32x4.sub(minor3, SIMD.Float32x4.mul(row2, tmp1));
tmp1 = SIMD.Float32x4.mul(row0, row3);
tmp1 = SIMD.Float32x4.swizzle(tmp1, 1, 0, 3, 2);
minor1 = SIMD.Float32x4.sub(minor1, SIMD.Float32x4.mul(row2, tmp1));
minor2 = SIMD.Float32x4.add(SIMD.Float32x4.mul(row1, tmp1), minor2);
tmp1 = SIMD.Float32x4.swizzle(tmp1, 2, 3, 0, 1);
minor1 = SIMD.Float32x4.add(SIMD.Float32x4.mul(row2, tmp1), minor1);
minor2 = SIMD.Float32x4.sub(minor2, SIMD.Float32x4.mul(row1, tmp1));
tmp1 = SIMD.Float32x4.mul(row0, row2);
tmp1 = SIMD.Float32x4.swizzle(tmp1, 1, 0, 3, 2);
minor1 = SIMD.Float32x4.add(SIMD.Float32x4.mul(row3, tmp1), minor1);
minor3 = SIMD.Float32x4.sub(minor3, SIMD.Float32x4.mul(row1, tmp1));
tmp1 = SIMD.Float32x4.swizzle(tmp1, 2, 3, 0, 1);
minor1 = SIMD.Float32x4.sub(minor1, SIMD.Float32x4.mul(row3, tmp1));
minor3 = SIMD.Float32x4.add(SIMD.Float32x4.mul(row1, tmp1), minor3);
// Compute matrix determinant
det = SIMD.Float32x4.mul(row0, minor0);
det = SIMD.Float32x4.add(SIMD.Float32x4.swizzle(det, 2, 3, 0, 1), det);
det = SIMD.Float32x4.add(SIMD.Float32x4.swizzle(det, 1, 0, 3, 2), det);
tmp1 = SIMD.Float32x4.reciprocalApproximation(det);
det = SIMD.Float32x4.sub(
SIMD.Float32x4.add(tmp1, tmp1),
SIMD.Float32x4.mul(det, SIMD.Float32x4.mul(tmp1, tmp1)));
det = SIMD.Float32x4.swizzle(det, 0, 0, 0, 0);
if (!det) {
return null;
}
// Compute matrix inverse
SIMD.Float32x4.store(out, 0, SIMD.Float32x4.mul(det, minor0));
SIMD.Float32x4.store(out, 4, SIMD.Float32x4.mul(det, minor1));
SIMD.Float32x4.store(out, 8, SIMD.Float32x4.mul(det, minor2));
SIMD.Float32x4.store(out, 12, SIMD.Float32x4.mul(det, minor3));
return out;
}
/**
* Inverts a mat4 using SIMD if available and enabled
*
* @param {mat4} out the receiving matrix
* @param {mat4} a the source matrix
* @returns {mat4} out
*/
mat4.invert = glMatrix.USE_SIMD ? mat4.SIMD.invert : mat4.scalar.invert;
/**
* Calculates the adjugate of a mat4 not using SIMD
*
* @param {mat4} out the receiving matrix
* @param {mat4} a the source matrix
* @returns {mat4} out
*/
mat4.scalar.adjoint = function(out, a) {
var a00 = a[0], a01 = a[1], a02 = a[2], a03 = a[3],
a10 = a[4], a11 = a[5], a12 = a[6], a13 = a[7],
a20 = a[8], a21 = a[9], a22 = a[10], a23 = a[11],
a30 = a[12], a31 = a[13], a32 = a[14], a33 = a[15];
out[0] = (a11 * (a22 * a33 - a23 * a32) - a21 * (a12 * a33 - a13 * a32) + a31 * (a12 * a23 - a13 * a22));
out[1] = -(a01 * (a22 * a33 - a23 * a32) - a21 * (a02 * a33 - a03 * a32) + a31 * (a02 * a23 - a03 * a22));
out[2] = (a01 * (a12 * a33 - a13 * a32) - a11 * (a02 * a33 - a03 * a32) + a31 * (a02 * a13 - a03 * a12));
out[3] = -(a01 * (a12 * a23 - a13 * a22) - a11 * (a02 * a23 - a03 * a22) + a21 * (a02 * a13 - a03 * a12));
out[4] = -(a10 * (a22 * a33 - a23 * a32) - a20 * (a12 * a33 - a13 * a32) + a30 * (a12 * a23 - a13 * a22));
out[5] = (a00 * (a22 * a33 - a23 * a32) - a20 * (a02 * a33 - a03 * a32) + a30 * (a02 * a23 - a03 * a22));
out[6] = -(a00 * (a12 * a33 - a13 * a32) - a10 * (a02 * a33 - a03 * a32) + a30 * (a02 * a13 - a03 * a12));
out[7] = (a00 * (a12 * a23 - a13 * a22) - a10 * (a02 * a23 - a03 * a22) + a20 * (a02 * a13 - a03 * a12));
out[8] = (a10 * (a21 * a33 - a23 * a31) - a20 * (a11 * a33 - a13 * a31) + a30 * (a11 * a23 - a13 * a21));
out[9] = -(a00 * (a21 * a33 - a23 * a31) - a20 * (a01 * a33 - a03 * a31) + a30 * (a01 * a23 - a03 * a21));
out[10] = (a00 * (a11 * a33 - a13 * a31) - a10 * (a01 * a33 - a03 * a31) + a30 * (a01 * a13 - a03 * a11));
out[11] = -(a00 * (a11 * a23 - a13 * a21) - a10 * (a01 * a23 - a03 * a21) + a20 * (a01 * a13 - a03 * a11));
out[12] = -(a10 * (a21 * a32 - a22 * a31) - a20 * (a11 * a32 - a12 * a31) + a30 * (a11 * a22 - a12 * a21));
out[13] = (a00 * (a21 * a32 - a22 * a31) - a20 * (a01 * a32 - a02 * a31) + a30 * (a01 * a22 - a02 * a21));
out[14] = -(a00 * (a11 * a32 - a12 * a31) - a10 * (a01 * a32 - a02 * a31) + a30 * (a01 * a12 - a02 * a11));
out[15] = (a00 * (a11 * a22 - a12 * a21) - a10 * (a01 * a22 - a02 * a21) + a20 * (a01 * a12 - a02 * a11));
return out;
};
/**
* Calculates the adjugate of a mat4 using SIMD
*
* @param {mat4} out the receiving matrix
* @param {mat4} a the source matrix
* @returns {mat4} out
*/
mat4.SIMD.adjoint = function(out, a) {
var a0, a1, a2, a3;
var row0, row1, row2, row3;
var tmp1;
var minor0, minor1, minor2, minor3;
a0 = SIMD.Float32x4.load(a, 0);
a1 = SIMD.Float32x4.load(a, 4);
a2 = SIMD.Float32x4.load(a, 8);
a3 = SIMD.Float32x4.load(a, 12);
// Transpose the source matrix. Sort of. Not a true transpose operation
tmp1 = SIMD.Float32x4.shuffle(a0, a1, 0, 1, 4, 5);
row1 = SIMD.Float32x4.shuffle(a2, a3, 0, 1, 4, 5);
row0 = SIMD.Float32x4.shuffle(tmp1, row1, 0, 2, 4, 6);
row1 = SIMD.Float32x4.shuffle(row1, tmp1, 1, 3, 5, 7);
tmp1 = SIMD.Float32x4.shuffle(a0, a1, 2, 3, 6, 7);
row3 = SIMD.Float32x4.shuffle(a2, a3, 2, 3, 6, 7);
row2 = SIMD.Float32x4.shuffle(tmp1, row3, 0, 2, 4, 6);
row3 = SIMD.Float32x4.shuffle(row3, tmp1, 1, 3, 5, 7);
tmp1 = SIMD.Float32x4.mul(row2, row3);
tmp1 = SIMD.Float32x4.swizzle(tmp1, 1, 0, 3, 2);
minor0 = SIMD.Float32x4.mul(row1, tmp1);
minor1 = SIMD.Float32x4.mul(row0, tmp1);
tmp1 = SIMD.Float32x4.swizzle(tmp1, 2, 3, 0, 1);
minor0 = SIMD.Float32x4.sub(SIMD.Float32x4.mul(row1, tmp1), minor0);
minor1 = SIMD.Float32x4.sub(SIMD.Float32x4.mul(row0, tmp1), minor1);
minor1 = SIMD.Float32x4.swizzle(minor1, 2, 3, 0, 1);
tmp1 = SIMD.Float32x4.mul(row1, row2);
tmp1 = SIMD.Float32x4.swizzle(tmp1, 1, 0, 3, 2);
minor0 = SIMD.Float32x4.add(SIMD.Float32x4.mul(row3, tmp1), minor0);
minor3 = SIMD.Float32x4.mul(row0, tmp1);
tmp1 = SIMD.Float32x4.swizzle(tmp1, 2, 3, 0, 1);
minor0 = SIMD.Float32x4.sub(minor0, SIMD.Float32x4.mul(row3, tmp1));
minor3 = SIMD.Float32x4.sub(SIMD.Float32x4.mul(row0, tmp1), minor3);
minor3 = SIMD.Float32x4.swizzle(minor3, 2, 3, 0, 1);
tmp1 = SIMD.Float32x4.mul(SIMD.Float32x4.swizzle(row1, 2, 3, 0, 1), row3);
tmp1 = SIMD.Float32x4.swizzle(tmp1, 1, 0, 3, 2);
row2 = SIMD.Float32x4.swizzle(row2, 2, 3, 0, 1);
minor0 = SIMD.Float32x4.add(SIMD.Float32x4.mul(row2, tmp1), minor0);
minor2 = SIMD.Float32x4.mul(row0, tmp1);
tmp1 = SIMD.Float32x4.swizzle(tmp1, 2, 3, 0, 1);
minor0 = SIMD.Float32x4.sub(minor0, SIMD.Float32x4.mul(row2, tmp1));
minor2 = SIMD.Float32x4.sub(SIMD.Float32x4.mul(row0, tmp1), minor2);
minor2 = SIMD.Float32x4.swizzle(minor2, 2, 3, 0, 1);
tmp1 = SIMD.Float32x4.mul(row0, row1);
tmp1 = SIMD.Float32x4.swizzle(tmp1, 1, 0, 3, 2);
minor2 = SIMD.Float32x4.add(SIMD.Float32x4.mul(row3, tmp1), minor2);
minor3 = SIMD.Float32x4.sub(SIMD.Float32x4.mul(row2, tmp1), minor3);
tmp1 = SIMD.Float32x4.swizzle(tmp1, 2, 3, 0, 1);
minor2 = SIMD.Float32x4.sub(SIMD.Float32x4.mul(row3, tmp1), minor2);
minor3 = SIMD.Float32x4.sub(minor3, SIMD.Float32x4.mul(row2, tmp1));
tmp1 = SIMD.Float32x4.mul(row0, row3);
tmp1 = SIMD.Float32x4.swizzle(tmp1, 1, 0, 3, 2);
minor1 = SIMD.Float32x4.sub(minor1, SIMD.Float32x4.mul(row2, tmp1));
minor2 = SIMD.Float32x4.add(SIMD.Float32x4.mul(row1, tmp1), minor2);
tmp1 = SIMD.Float32x4.swizzle(tmp1, 2, 3, 0, 1);
minor1 = SIMD.Float32x4.add(SIMD.Float32x4.mul(row2, tmp1), minor1);
minor2 = SIMD.Float32x4.sub(minor2, SIMD.Float32x4.mul(row1, tmp1));
tmp1 = SIMD.Float32x4.mul(row0, row2);
tmp1 = SIMD.Float32x4.swizzle(tmp1, 1, 0, 3, 2);
minor1 = SIMD.Float32x4.add(SIMD.Float32x4.mul(row3, tmp1), minor1);
minor3 = SIMD.Float32x4.sub(minor3, SIMD.Float32x4.mul(row1, tmp1));
tmp1 = SIMD.Float32x4.swizzle(tmp1, 2, 3, 0, 1);
minor1 = SIMD.Float32x4.sub(minor1, SIMD.Float32x4.mul(row3, tmp1));
minor3 = SIMD.Float32x4.add(SIMD.Float32x4.mul(row1, tmp1), minor3);
SIMD.Float32x4.store(out, 0, minor0);
SIMD.Float32x4.store(out, 4, minor1);
SIMD.Float32x4.store(out, 8, minor2);
SIMD.Float32x4.store(out, 12, minor3);
return out;
};
/**
* Calculates the adjugate of a mat4 using SIMD if available and enabled
*
* @param {mat4} out the receiving matrix
* @param {mat4} a the source matrix
* @returns {mat4} out
*/
mat4.adjoint = glMatrix.USE_SIMD ? mat4.SIMD.adjoint : mat4.scalar.adjoint;
/**
* Calculates the determinant of a mat4
*
* @param {mat4} a the source matrix
* @returns {Number} determinant of a
*/
mat4.determinant = function (a) {
var a00 = a[0], a01 = a[1], a02 = a[2], a03 = a[3],
a10 = a[4], a11 = a[5], a12 = a[6], a13 = a[7],
a20 = a[8], a21 = a[9], a22 = a[10], a23 = a[11],
a30 = a[12], a31 = a[13], a32 = a[14], a33 = a[15],
b00 = a00 * a11 - a01 * a10,
b01 = a00 * a12 - a02 * a10,
b02 = a00 * a13 - a03 * a10,
b03 = a01 * a12 - a02 * a11,
b04 = a01 * a13 - a03 * a11,
b05 = a02 * a13 - a03 * a12,
b06 = a20 * a31 - a21 * a30,
b07 = a20 * a32 - a22 * a30,
b08 = a20 * a33 - a23 * a30,
b09 = a21 * a32 - a22 * a31,
b10 = a21 * a33 - a23 * a31,
b11 = a22 * a33 - a23 * a32;
// Calculate the determinant
return b00 * b11 - b01 * b10 + b02 * b09 + b03 * b08 - b04 * b07 + b05 * b06;
};
/**
* Multiplies two mat4's explicitly using SIMD
*
* @param {mat4} out the receiving matrix
* @param {mat4} a the first operand, must be a Float32Array
* @param {mat4} b the second operand, must be a Float32Array
* @returns {mat4} out
*/
mat4.SIMD.multiply = function (out, a, b) {
var a0 = SIMD.Float32x4.load(a, 0);
var a1 = SIMD.Float32x4.load(a, 4);
var a2 = SIMD.Float32x4.load(a, 8);
var a3 = SIMD.Float32x4.load(a, 12);
var b0 = SIMD.Float32x4.load(b, 0);
var out0 = SIMD.Float32x4.add(
SIMD.Float32x4.mul(SIMD.Float32x4.swizzle(b0, 0, 0, 0, 0), a0),
SIMD.Float32x4.add(
SIMD.Float32x4.mul(SIMD.Float32x4.swizzle(b0, 1, 1, 1, 1), a1),
SIMD.Float32x4.add(
SIMD.Float32x4.mul(SIMD.Float32x4.swizzle(b0, 2, 2, 2, 2), a2),
SIMD.Float32x4.mul(SIMD.Float32x4.swizzle(b0, 3, 3, 3, 3), a3))));
SIMD.Float32x4.store(out, 0, out0);
var b1 = SIMD.Float32x4.load(b, 4);
var out1 = SIMD.Float32x4.add(
SIMD.Float32x4.mul(SIMD.Float32x4.swizzle(b1, 0, 0, 0, 0), a0),
SIMD.Float32x4.add(
SIMD.Float32x4.mul(SIMD.Float32x4.swizzle(b1, 1, 1, 1, 1), a1),
SIMD.Float32x4.add(
SIMD.Float32x4.mul(SIMD.Float32x4.swizzle(b1, 2, 2, 2, 2), a2),
SIMD.Float32x4.mul(SIMD.Float32x4.swizzle(b1, 3, 3, 3, 3), a3))));
SIMD.Float32x4.store(out, 4, out1);
var b2 = SIMD.Float32x4.load(b, 8);
var out2 = SIMD.Float32x4.add(
SIMD.Float32x4.mul(SIMD.Float32x4.swizzle(b2, 0, 0, 0, 0), a0),
SIMD.Float32x4.add(
SIMD.Float32x4.mul(SIMD.Float32x4.swizzle(b2, 1, 1, 1, 1), a1),
SIMD.Float32x4.add(
SIMD.Float32x4.mul(SIMD.Float32x4.swizzle(b2, 2, 2, 2, 2), a2),
SIMD.Float32x4.mul(SIMD.Float32x4.swizzle(b2, 3, 3, 3, 3), a3))));
SIMD.Float32x4.store(out, 8, out2);
var b3 = SIMD.Float32x4.load(b, 12);
var out3 = SIMD.Float32x4.add(
SIMD.Float32x4.mul(SIMD.Float32x4.swizzle(b3, 0, 0, 0, 0), a0),
SIMD.Float32x4.add(
SIMD.Float32x4.mul(SIMD.Float32x4.swizzle(b3, 1, 1, 1, 1), a1),
SIMD.Float32x4.add(
SIMD.Float32x4.mul(SIMD.Float32x4.swizzle(b3, 2, 2, 2, 2), a2),
SIMD.Float32x4.mul(SIMD.Float32x4.swizzle(b3, 3, 3, 3, 3), a3))));
SIMD.Float32x4.store(out, 12, out3);
return out;
};
/**
* Multiplies two mat4's explicitly not using SIMD
*
* @param {mat4} out the receiving matrix
* @param {mat4} a the first operand
* @param {mat4} b the second operand
* @returns {mat4} out
*/
mat4.scalar.multiply = function (out, a, b) {
var a00 = a[0], a01 = a[1], a02 = a[2], a03 = a[3],
a10 = a[4], a11 = a[5], a12 = a[6], a13 = a[7],
a20 = a[8], a21 = a[9], a22 = a[10], a23 = a[11],
a30 = a[12], a31 = a[13], a32 = a[14], a33 = a[15];
// Cache only the current line of the second matrix
var b0 = b[0], b1 = b[1], b2 = b[2], b3 = b[3];
out[0] = b0*a00 + b1*a10 + b2*a20 + b3*a30;
out[1] = b0*a01 + b1*a11 + b2*a21 + b3*a31;
out[2] = b0*a02 + b1*a12 + b2*a22 + b3*a32;
out[3] = b0*a03 + b1*a13 + b2*a23 + b3*a33;
b0 = b[4]; b1 = b[5]; b2 = b[6]; b3 = b[7];
out[4] = b0*a00 + b1*a10 + b2*a20 + b3*a30;
out[5] = b0*a01 + b1*a11 + b2*a21 + b3*a31;
out[6] = b0*a02 + b1*a12 + b2*a22 + b3*a32;
out[7] = b0*a03 + b1*a13 + b2*a23 + b3*a33;
b0 = b[8]; b1 = b[9]; b2 = b[10]; b3 = b[11];
out[8] = b0*a00 + b1*a10 + b2*a20 + b3*a30;
out[9] = b0*a01 + b1*a11 + b2*a21 + b3*a31;
out[10] = b0*a02 + b1*a12 + b2*a22 + b3*a32;
out[11] = b0*a03 + b1*a13 + b2*a23 + b3*a33;
b0 = b[12]; b1 = b[13]; b2 = b[14]; b3 = b[15];
out[12] = b0*a00 + b1*a10 + b2*a20 + b3*a30;
out[13] = b0*a01 + b1*a11 + b2*a21 + b3*a31;
out[14] = b0*a02 + b1*a12 + b2*a22 + b3*a32;
out[15] = b0*a03 + b1*a13 + b2*a23 + b3*a33;
return out;
};
/**
* Multiplies two mat4's using SIMD if available and enabled
*
* @param {mat4} out the receiving matrix
* @param {mat4} a the first operand
* @param {mat4} b the second operand
* @returns {mat4} out
*/
mat4.multiply = glMatrix.USE_SIMD ? mat4.SIMD.multiply : mat4.scalar.multiply;
/**
* Alias for {@link mat4.multiply}
* @function
*/
mat4.mul = mat4.multiply;
/**
* Translate a mat4 by the given vector not using SIMD
*
* @param {mat4} out the receiving matrix
* @param {mat4} a the matrix to translate
* @param {vec3} v vector to translate by
* @returns {mat4} out
*/
mat4.scalar.translate = function (out, a, v) {
var x = v[0], y = v[1], z = v[2],
a00, a01, a02, a03,
a10, a11, a12, a13,
a20, a21, a22, a23;
if (a === out) {
out[12] = a[0] * x + a[4] * y + a[8] * z + a[12];
out[13] = a[1] * x + a[5] * y + a[9] * z + a[13];
out[14] = a[2] * x + a[6] * y + a[10] * z + a[14];
out[15] = a[3] * x + a[7] * y + a[11] * z + a[15];
} else {
a00 = a[0]; a01 = a[1]; a02 = a[2]; a03 = a[3];
a10 = a[4]; a11 = a[5]; a12 = a[6]; a13 = a[7];
a20 = a[8]; a21 = a[9]; a22 = a[10]; a23 = a[11];
out[0] = a00; out[1] = a01; out[2] = a02; out[3] = a03;
out[4] = a10; out[5] = a11; out[6] = a12; out[7] = a13;
out[8] = a20; out[9] = a21; out[10] = a22; out[11] = a23;
out[12] = a00 * x + a10 * y + a20 * z + a[12];
out[13] = a01 * x + a11 * y + a21 * z + a[13];
out[14] = a02 * x + a12 * y + a22 * z + a[14];
out[15] = a03 * x + a13 * y + a23 * z + a[15];
}
return out;
};
/**
* Translates a mat4 by the given vector using SIMD
*
* @param {mat4} out the receiving matrix
* @param {mat4} a the matrix to translate
* @param {vec3} v vector to translate by
* @returns {mat4} out
*/
mat4.SIMD.translate = function (out, a, v) {
var a0 = SIMD.Float32x4.load(a, 0),
a1 = SIMD.Float32x4.load(a, 4),
a2 = SIMD.Float32x4.load(a, 8),
a3 = SIMD.Float32x4.load(a, 12),
vec = SIMD.Float32x4(v[0], v[1], v[2] , 0);
if (a !== out) {
out[0] = a[0]; out[1] = a[1]; out[2] = a[2]; out[3] = a[3];
out[4] = a[4]; out[5] = a[5]; out[6] = a[6]; out[7] = a[7];
out[8] = a[8]; out[9] = a[9]; out[10] = a[10]; out[11] = a[11];
}
a0 = SIMD.Float32x4.mul(a0, SIMD.Float32x4.swizzle(vec, 0, 0, 0, 0));
a1 = SIMD.Float32x4.mul(a1, SIMD.Float32x4.swizzle(vec, 1, 1, 1, 1));
a2 = SIMD.Float32x4.mul(a2, SIMD.Float32x4.swizzle(vec, 2, 2, 2, 2));
var t0 = SIMD.Float32x4.add(a0, SIMD.Float32x4.add(a1, SIMD.Float32x4.add(a2, a3)));
SIMD.Float32x4.store(out, 12, t0);
return out;
};
/**
* Translates a mat4 by the given vector using SIMD if available and enabled
*
* @param {mat4} out the receiving matrix
* @param {mat4} a the matrix to translate
* @param {vec3} v vector to translate by
* @returns {mat4} out
*/
mat4.translate = glMatrix.USE_SIMD ? mat4.SIMD.translate : mat4.scalar.translate;
/**
* Scales the mat4 by the dimensions in the given vec3 not using vectorization
*
* @param {mat4} out the receiving matrix
* @param {mat4} a the matrix to scale
* @param {vec3} v the vec3 to scale the matrix by
* @returns {mat4} out
**/
mat4.scalar.scale = function(out, a, v) {
var x = v[0], y = v[1], z = v[2];
out[0] = a[0] * x;
out[1] = a[1] * x;
out[2] = a[2] * x;
out[3] = a[3] * x;
out[4] = a[4] * y;
out[5] = a[5] * y;
out[6] = a[6] * y;
out[7] = a[7] * y;
out[8] = a[8] * z;
out[9] = a[9] * z;
out[10] = a[10] * z;
out[11] = a[11] * z;
out[12] = a[12];
out[13] = a[13];
out[14] = a[14];
out[15] = a[15];
return out;
};
/**
* Scales the mat4 by the dimensions in the given vec3 using vectorization
*
* @param {mat4} out the receiving matrix
* @param {mat4} a the matrix to scale
* @param {vec3} v the vec3 to scale the matrix by
* @returns {mat4} out
**/
mat4.SIMD.scale = function(out, a, v) {
var a0, a1, a2;
var vec = SIMD.Float32x4(v[0], v[1], v[2], 0);
a0 = SIMD.Float32x4.load(a, 0);
SIMD.Float32x4.store(
out, 0, SIMD.Float32x4.mul(a0, SIMD.Float32x4.swizzle(vec, 0, 0, 0, 0)));
a1 = SIMD.Float32x4.load(a, 4);
SIMD.Float32x4.store(
out, 4, SIMD.Float32x4.mul(a1, SIMD.Float32x4.swizzle(vec, 1, 1, 1, 1)));
a2 = SIMD.Float32x4.load(a, 8);
SIMD.Float32x4.store(
out, 8, SIMD.Float32x4.mul(a2, SIMD.Float32x4.swizzle(vec, 2, 2, 2, 2)));
out[12] = a[12];
out[13] = a[13];
out[14] = a[14];
out[15] = a[15];
return out;
};
/**
* Scales the mat4 by the dimensions in the given vec3 using SIMD if available and enabled
*
* @param {mat4} out the receiving matrix
* @param {mat4} a the matrix to scale
* @param {vec3} v the vec3 to scale the matrix by
* @returns {mat4} out
*/
mat4.scale = glMatrix.USE_SIMD ? mat4.SIMD.scale : mat4.scalar.scale;
/**
* Rotates a mat4 by the given angle around the given axis
*
* @param {mat4} out the receiving matrix
* @param {mat4} a the matrix to rotate
* @param {Number} rad the angle to rotate the matrix by
* @param {vec3} axis the axis to rotate around
* @returns {mat4} out
*/
mat4.rotate = function (out, a, rad, axis) {
var x = axis[0], y = axis[1], z = axis[2],
len = Math.sqrt(x * x + y * y + z * z),
s, c, t,
a00, a01, a02, a03,
a10, a11, a12, a13,
a20, a21, a22, a23,
b00, b01, b02,
b10, b11, b12,
b20, b21, b22;
if (Math.abs(len) < glMatrix.EPSILON) { return null; }
len = 1 / len;
x *= len;
y *= len;
z *= len;
s = Math.sin(rad);
c = Math.cos(rad);
t = 1 - c;
a00 = a[0]; a01 = a[1]; a02 = a[2]; a03 = a[3];
a10 = a[4]; a11 = a[5]; a12 = a[6]; a13 = a[7];
a20 = a[8]; a21 = a[9]; a22 = a[10]; a23 = a[11];
// Construct the elements of the rotation matrix
b00 = x * x * t + c; b01 = y * x * t + z * s; b02 = z * x * t - y * s;
b10 = x * y * t - z * s; b11 = y * y * t + c; b12 = z * y * t + x * s;
b20 = x * z * t + y * s; b21 = y * z * t - x * s; b22 = z * z * t + c;
// Perform rotation-specific matrix multiplication
out[0] = a00 * b00 + a10 * b01 + a20 * b02;
out[1] = a01 * b00 + a11 * b01 + a21 * b02;
out[2] = a02 * b00 + a12 * b01 + a22 * b02;
out[3] = a03 * b00 + a13 * b01 + a23 * b02;
out[4] = a00 * b10 + a10 * b11 + a20 * b12;
out[5] = a01 * b10 + a11 * b11 + a21 * b12;
out[6] = a02 * b10 + a12 * b11 + a22 * b12;
out[7] = a03 * b10 + a13 * b11 + a23 * b12;
out[8] = a00 * b20 + a10 * b21 + a20 * b22;
out[9] = a01 * b20 + a11 * b21 + a21 * b22;
out[10] = a02 * b20 + a12 * b21 + a22 * b22;
out[11] = a03 * b20 + a13 * b21 + a23 * b22;
if (a !== out) { // If the source and destination differ, copy the unchanged last row
out[12] = a[12];
out[13] = a[13];
out[14] = a[14];
out[15] = a[15];
}
return out;
};
/**
* Rotates a matrix by the given angle around the X axis not using SIMD
*
* @param {mat4} out the receiving matrix
* @param {mat4} a the matrix to rotate
* @param {Number} rad the angle to rotate the matrix by
* @returns {mat4} out
*/
mat4.scalar.rotateX = function (out, a, rad) {
var s = Math.sin(rad),
c = Math.cos(rad),
a10 = a[4],
a11 = a[5],
a12 = a[6],
a13 = a[7],
a20 = a[8],
a21 = a[9],
a22 = a[10],
a23 = a[11];
if (a !== out) { // If the source and destination differ, copy the unchanged rows
out[0] = a[0];
out[1] = a[1];
out[2] = a[2];
out[3] = a[3];
out[12] = a[12];
out[13] = a[13];
out[14] = a[14];
out[15] = a[15];
}
// Perform axis-specific matrix multiplication
out[4] = a10 * c + a20 * s;
out[5] = a11 * c + a21 * s;
out[6] = a12 * c + a22 * s;
out[7] = a13 * c + a23 * s;
out[8] = a20 * c - a10 * s;
out[9] = a21 * c - a11 * s;
out[10] = a22 * c - a12 * s;
out[11] = a23 * c - a13 * s;
return out;
};
/**
* Rotates a matrix by the given angle around the X axis using SIMD
*
* @param {mat4} out the receiving matrix
* @param {mat4} a the matrix to rotate
* @param {Number} rad the angle to rotate the matrix by
* @returns {mat4} out
*/
mat4.SIMD.rotateX = function (out, a, rad) {
var s = SIMD.Float32x4.splat(Math.sin(rad)),
c = SIMD.Float32x4.splat(Math.cos(rad));
if (a !== out) { // If the source and destination differ, copy the unchanged rows
out[0] = a[0];
out[1] = a[1];
out[2] = a[2];
out[3] = a[3];
out[12] = a[12];
out[13] = a[13];
out[14] = a[14];
out[15] = a[15];
}
// Perform axis-specific matrix multiplication
var a_1 = SIMD.Float32x4.load(a, 4);
var a_2 = SIMD.Float32x4.load(a, 8);
SIMD.Float32x4.store(out, 4,
SIMD.Float32x4.add(SIMD.Float32x4.mul(a_1, c), SIMD.Float32x4.mul(a_2, s)));
SIMD.Float32x4.store(out, 8,
SIMD.Float32x4.sub(SIMD.Float32x4.mul(a_2, c), SIMD.Float32x4.mul(a_1, s)));
return out;
};
/**
* Rotates a matrix by the given angle around the X axis using SIMD if availabe and enabled
*
* @param {mat4} out the receiving matrix
* @param {mat4} a the matrix to rotate
* @param {Number} rad the angle to rotate the matrix by
* @returns {mat4} out
*/
mat4.rotateX = glMatrix.USE_SIMD ? mat4.SIMD.rotateX : mat4.scalar.rotateX;
/**
* Rotates a matrix by the given angle around the Y axis not using SIMD
*
* @param {mat4} out the receiving matrix
* @param {mat4} a the matrix to rotate
* @param {Number} rad the angle to rotate the matrix by
* @returns {mat4} out
*/
mat4.scalar.rotateY = function (out, a, rad) {
var s = Math.sin(rad),
c = Math.cos(rad),
a00 = a[0],
a01 = a[1],
a02 = a[2],
a03 = a[3],
a20 = a[8],
a21 = a[9],
a22 = a[10],
a23 = a[11];
if (a !== out) { // If the source and destination differ, copy the unchanged rows
out[4] = a[4];
out[5] = a[5];
out[6] = a[6];
out[7] = a[7];
out[12] = a[12];
out[13] = a[13];
out[14] = a[14];
out[15] = a[15];
}
// Perform axis-specific matrix multiplication
out[0] = a00 * c - a20 * s;
out[1] = a01 * c - a21 * s;
out[2] = a02 * c - a22 * s;
out[3] = a03 * c - a23 * s;
out[8] = a00 * s + a20 * c;
out[9] = a01 * s + a21 * c;
out[10] = a02 * s + a22 * c;
out[11] = a03 * s + a23 * c;
return out;
};
/**
* Rotates a matrix by the given angle around the Y axis using SIMD
*
* @param {mat4} out the receiving matrix
* @param {mat4} a the matrix to rotate
* @param {Number} rad the angle to rotate the matrix by
* @returns {mat4} out
*/
mat4.SIMD.rotateY = function (out, a, rad) {
var s = SIMD.Float32x4.splat(Math.sin(rad)),
c = SIMD.Float32x4.splat(Math.cos(rad));
if (a !== out) { // If the source and destination differ, copy the unchanged rows
out[4] = a[4];
out[5] = a[5];
out[6] = a[6];
out[7] = a[7];
out[12] = a[12];
out[13] = a[13];
out[14] = a[14];
out[15] = a[15];
}
// Perform axis-specific matrix multiplication
var a_0 = SIMD.Float32x4.load(a, 0);
var a_2 = SIMD.Float32x4.load(a, 8);
SIMD.Float32x4.store(out, 0,
SIMD.Float32x4.sub(SIMD.Float32x4.mul(a_0, c), SIMD.Float32x4.mul(a_2, s)));
SIMD.Float32x4.store(out, 8,
SIMD.Float32x4.add(SIMD.Float32x4.mul(a_0, s), SIMD.Float32x4.mul(a_2, c)));
return out;
};
/**
* Rotates a matrix by the given angle around the Y axis if SIMD available and enabled
*
* @param {mat4} out the receiving matrix
* @param {mat4} a the matrix to rotate
* @param {Number} rad the angle to rotate the matrix by
* @returns {mat4} out
*/
mat4.rotateY = glMatrix.USE_SIMD ? mat4.SIMD.rotateY : mat4.scalar.rotateY;
/**
* Rotates a matrix by the given angle around the Z axis not using SIMD
*
* @param {mat4} out the receiving matrix
* @param {mat4} a the matrix to rotate
* @param {Number} rad the angle to rotate the matrix by
* @returns {mat4} out
*/
mat4.scalar.rotateZ = function (out, a, rad) {
var s = Math.sin(rad),
c = Math.cos(rad),
a00 = a[0],
a01 = a[1],
a02 = a[2],
a03 = a[3],
a10 = a[4],
a11 = a[5],
a12 = a[6],
a13 = a[7];
if (a !== out) { // If the source and destination differ, copy the unchanged last row
out[8] = a[8];
out[9] = a[9];
out[10] = a[10];
out[11] = a[11];
out[12] = a[12];
out[13] = a[13];
out[14] = a[14];
out[15] = a[15];
}
// Perform axis-specific matrix multiplication
out[0] = a00 * c + a10 * s;
out[1] = a01 * c + a11 * s;
out[2] = a02 * c + a12 * s;
out[3] = a03 * c + a13 * s;
out[4] = a10 * c - a00 * s;
out[5] = a11 * c - a01 * s;
out[6] = a12 * c - a02 * s;
out[7] = a13 * c - a03 * s;
return out;
};
/**
* Rotates a matrix by the given angle around the Z axis using SIMD
*
* @param {mat4} out the receiving matrix
* @param {mat4} a the matrix to rotate
* @param {Number} rad the angle to rotate the matrix by
* @returns {mat4} out
*/
mat4.SIMD.rotateZ = function (out, a, rad) {
var s = SIMD.Float32x4.splat(Math.sin(rad)),
c = SIMD.Float32x4.splat(Math.cos(rad));
if (a !== out) { // If the source and destination differ, copy the unchanged last row
out[8] = a[8];
out[9] = a[9];
out[10] = a[10];
out[11] = a[11];
out[12] = a[12];
out[13] = a[13];
out[14] = a[14];
out[15] = a[15];
}
// Perform axis-specific matrix multiplication
var a_0 = SIMD.Float32x4.load(a, 0);
var a_1 = SIMD.Float32x4.load(a, 4);
SIMD.Float32x4.store(out, 0,
SIMD.Float32x4.add(SIMD.Float32x4.mul(a_0, c), SIMD.Float32x4.mul(a_1, s)));
SIMD.Float32x4.store(out, 4,
SIMD.Float32x4.sub(SIMD.Float32x4.mul(a_1, c), SIMD.Float32x4.mul(a_0, s)));
return out;
};
/**
* Rotates a matrix by the given angle around the Z axis if SIMD available and enabled
*
* @param {mat4} out the receiving matrix
* @param {mat4} a the matrix to rotate
* @param {Number} rad the angle to rotate the matrix by
* @returns {mat4} out
*/
mat4.rotateZ = glMatrix.USE_SIMD ? mat4.SIMD.rotateZ : mat4.scalar.rotateZ;
/**
* Creates a matrix from a vector translation
* This is equivalent to (but much faster than):
*
* mat4.identity(dest);
* mat4.translate(dest, dest, vec);
*
* @param {mat4} out mat4 receiving operation result
* @param {vec3} v Translation vector
* @returns {mat4} out
*/
mat4.fromTranslation = function(out, v) {
out[0] = 1;
out[1] = 0;
out[2] = 0;
out[3] = 0;
out[4] = 0;
out[5] = 1;
out[6] = 0;
out[7] = 0;
out[8] = 0;
out[9] = 0;
out[10] = 1;
out[11] = 0;
out[12] = v[0];
out[13] = v[1];
out[14] = v[2];
out[15] = 1;
return out;
}
/**
* Creates a matrix from a vector scaling
* This is equivalent to (but much faster than):
*
* mat4.identity(dest);
* mat4.scale(dest, dest, vec);
*
* @param {mat4} out mat4 receiving operation result
* @param {vec3} v Scaling vector
* @returns {mat4} out
*/
mat4.fromScaling = function(out, v) {
out[0] = v[0];
out[1] = 0;
out[2] = 0;
out[3] = 0;
out[4] = 0;
out[5] = v[1];
out[6] = 0;
out[7] = 0;
out[8] = 0;
out[9] = 0;
out[10] = v[2];
out[11] = 0;
out[12] = 0;
out[13] = 0;
out[14] = 0;
out[15] = 1;
return out;
}
/**
* Creates a matrix from a given angle around a given axis
* This is equivalent to (but much faster than):
*
* mat4.identity(dest);
* mat4.rotate(dest, dest, rad, axis);
*
* @param {mat4} out mat4 receiving operation result
* @param {Number} rad the angle to rotate the matrix by
* @param {vec3} axis the axis to rotate around
* @returns {mat4} out
*/
mat4.fromRotation = function(out, rad, axis) {
var x = axis[0], y = axis[1], z = axis[2],
len = Math.sqrt(x * x + y * y + z * z),
s, c, t;
if (Math.abs(len) < glMatrix.EPSILON) { return null; }
len = 1 / len;
x *= len;
y *= len;
z *= len;
s = Math.sin(rad);
c = Math.cos(rad);
t = 1 - c;
// Perform rotation-specific matrix multiplication
out[0] = x * x * t + c;
out[1] = y * x * t + z * s;
out[2] = z * x * t - y * s;
out[3] = 0;
out[4] = x * y * t - z * s;
out[5] = y * y * t + c;
out[6] = z * y * t + x * s;
out[7] = 0;
out[8] = x * z * t + y * s;
out[9] = y * z * t - x * s;
out[10] = z * z * t + c;
out[11] = 0;
out[12] = 0;
out[13] = 0;
out[14] = 0;
out[15] = 1;
return out;
}
/**
* Creates a matrix from the given angle around the X axis
* This is equivalent to (but much faster than):
*
* mat4.identity(dest);
* mat4.rotateX(dest, dest, rad);
*
* @param {mat4} out mat4 receiving operation result
* @param {Number} rad the angle to rotate the matrix by
* @returns {mat4} out
*/
mat4.fromXRotation = function(out, rad) {
var s = Math.sin(rad),
c = Math.cos(rad);
// Perform axis-specific matrix multiplication
out[0] = 1;
out[1] = 0;
out[2] = 0;
out[3] = 0;
out[4] = 0;
out[5] = c;
out[6] = s;
out[7] = 0;
out[8] = 0;
out[9] = -s;
out[10] = c;
out[11] = 0;
out[12] = 0;
out[13] = 0;
out[14] = 0;
out[15] = 1;
return out;
}
/**
* Creates a matrix from the given angle around the Y axis
* This is equivalent to (but much faster than):
*
* mat4.identity(dest);
* mat4.rotateY(dest, dest, rad);
*
* @param {mat4} out mat4 receiving operation result
* @param {Number} rad the angle to rotate the matrix by
* @returns {mat4} out
*/
mat4.fromYRotation = function(out, rad) {
var s = Math.sin(rad),
c = Math.cos(rad);
// Perform axis-specific matrix multiplication
out[0] = c;
out[1] = 0;
out[2] = -s;
out[3] = 0;
out[4] = 0;
out[5] = 1;
out[6] = 0;
out[7] = 0;
out[8] = s;
out[9] = 0;
out[10] = c;
out[11] = 0;
out[12] = 0;
out[13] = 0;
out[14] = 0;
out[15] = 1;
return out;
}
/**
* Creates a matrix from the given angle around the Z axis
* This is equivalent to (but much faster than):
*
* mat4.identity(dest);
* mat4.rotateZ(dest, dest, rad);
*
* @param {mat4} out mat4 receiving operation result
* @param {Number} rad the angle to rotate the matrix by
* @returns {mat4} out
*/
mat4.fromZRotation = function(out, rad) {
var s = Math.sin(rad),
c = Math.cos(rad);
// Perform axis-specific matrix multiplication
out[0] = c;
out[1] = s;
out[2] = 0;
out[3] = 0;
out[4] = -s;
out[5] = c;
out[6] = 0;
out[7] = 0;
out[8] = 0;
out[9] = 0;
out[10] = 1;
out[11] = 0;
out[12] = 0;
out[13] = 0;
out[14] = 0;
out[15] = 1;
return out;
}
/**
* Creates a matrix from a quaternion rotation and vector translation
* This is equivalent to (but much faster than):
*
* mat4.identity(dest);
* mat4.translate(dest, vec);
* var quatMat = mat4.create();
* quat4.toMat4(quat, quatMat);
* mat4.multiply(dest, quatMat);
*
* @param {mat4} out mat4 receiving operation result
* @param {quat4} q Rotation quaternion
* @param {vec3} v Translation vector
* @returns {mat4} out
*/
mat4.fromRotationTranslation = function (out, q, v) {
// Quaternion math
var x = q[0], y = q[1], z = q[2], w = q[3],
x2 = x + x,
y2 = y + y,
z2 = z + z,
xx = x * x2,
xy = x * y2,
xz = x * z2,
yy = y * y2,
yz = y * z2,
zz = z * z2,
wx = w * x2,
wy = w * y2,
wz = w * z2;
out[0] = 1 - (yy + zz);
out[1] = xy + wz;
out[2] = xz - wy;
out[3] = 0;
out[4] = xy - wz;
out[5] = 1 - (xx + zz);
out[6] = yz + wx;
out[7] = 0;
out[8] = xz + wy;
out[9] = yz - wx;
out[10] = 1 - (xx + yy);
out[11] = 0;
out[12] = v[0];
out[13] = v[1];
out[14] = v[2];
out[15] = 1;
return out;
};
/**
* Returns the translation vector component of a transformation
* matrix. If a matrix is built with fromRotationTranslation,
* the returned vector will be the same as the translation vector
* originally supplied.
* @param {vec3} out Vector to receive translation component
* @param {mat4} mat Matrix to be decomposed (input)
* @return {vec3} out
*/
mat4.getTranslation = function (out, mat) {
out[0] = mat[12];
out[1] = mat[13];
out[2] = mat[14];
return out;
};
/**
* Returns the scaling factor component of a transformation
* matrix. If a matrix is built with fromRotationTranslationScale
* with a normalized Quaternion paramter, the returned vector will be
* the same as the scaling vector
* originally supplied.
* @param {vec3} out Vector to receive scaling factor component
* @param {mat4} mat Matrix to be decomposed (input)
* @return {vec3} out
*/
mat4.getScaling = function (out, mat) {
var m11 = mat[0],
m12 = mat[1],
m13 = mat[2],
m21 = mat[4],
m22 = mat[5],
m23 = mat[6],
m31 = mat[8],
m32 = mat[9],
m33 = mat[10];
out[0] = Math.sqrt(m11 * m11 + m12 * m12 + m13 * m13);
out[1] = Math.sqrt(m21 * m21 + m22 * m22 + m23 * m23);
out[2] = Math.sqrt(m31 * m31 + m32 * m32 + m33 * m33);
return out;
};
/**
* Returns a quaternion representing the rotational component
* of a transformation matrix. If a matrix is built with
* fromRotationTranslation, the returned quaternion will be the
* same as the quaternion originally supplied.
* @param {quat} out Quaternion to receive the rotation component
* @param {mat4} mat Matrix to be decomposed (input)
* @return {quat} out
*/
mat4.getRotation = function (out, mat) {
// Algorithm taken from http://www.euclideanspace.com/maths/geometry/rotations/conversions/matrixToQuaternion/index.htm
var trace = mat[0] + mat[5] + mat[10];
var S = 0;
if (trace > 0) {
S = Math.sqrt(trace + 1.0) * 2;
out[3] = 0.25 * S;
out[0] = (mat[6] - mat[9]) / S;
out[1] = (mat[8] - mat[2]) / S;
out[2] = (mat[1] - mat[4]) / S;
} else if ((mat[0] > mat[5])&(mat[0] > mat[10])) {
S = Math.sqrt(1.0 + mat[0] - mat[5] - mat[10]) * 2;
out[3] = (mat[6] - mat[9]) / S;
out[0] = 0.25 * S;
out[1] = (mat[1] + mat[4]) / S;
out[2] = (mat[8] + mat[2]) / S;
} else if (mat[5] > mat[10]) {
S = Math.sqrt(1.0 + mat[5] - mat[0] - mat[10]) * 2;
out[3] = (mat[8] - mat[2]) / S;
out[0] = (mat[1] + mat[4]) / S;
out[1] = 0.25 * S;
out[2] = (mat[6] + mat[9]) / S;
} else {
S = Math.sqrt(1.0 + mat[10] - mat[0] - mat[5]) * 2;
out[3] = (mat[1] - mat[4]) / S;
out[0] = (mat[8] + mat[2]) / S;
out[1] = (mat[6] + mat[9]) / S;
out[2] = 0.25 * S;
}
return out;
};
/**
* Creates a matrix from a quaternion rotation, vector translation and vector scale
* This is equivalent to (but much faster than):
*
* mat4.identity(dest);
* mat4.translate(dest, vec);
* var quatMat = mat4.create();
* quat4.toMat4(quat, quatMat);
* mat4.multiply(dest, quatMat);
* mat4.scale(dest, scale)
*
* @param {mat4} out mat4 receiving operation result
* @param {quat4} q Rotation quaternion
* @param {vec3} v Translation vector
* @param {vec3} s Scaling vector
* @returns {mat4} out
*/
mat4.fromRotationTranslationScale = function (out, q, v, s) {
// Quaternion math
var x = q[0], y = q[1], z = q[2], w = q[3],
x2 = x + x,
y2 = y + y,
z2 = z + z,
xx = x * x2,
xy = x * y2,
xz = x * z2,<|fim▁hole|> yz = y * z2,
zz = z * z2,
wx = w * x2,
wy = w * y2,
wz = w * z2,
sx = s[0],
sy = s[1],
sz = s[2];
out[0] = (1 - (yy + zz)) * sx;
out[1] = (xy + wz) * sx;
out[2] = (xz - wy) * sx;
out[3] = 0;
out[4] = (xy - wz) * sy;
out[5] = (1 - (xx + zz)) * sy;
out[6] = (yz + wx) * sy;
out[7] = 0;
out[8] = (xz + wy) * sz;
out[9] = (yz - wx) * sz;
out[10] = (1 - (xx + yy)) * sz;
out[11] = 0;
out[12] = v[0];
out[13] = v[1];
out[14] = v[2];
out[15] = 1;
return out;
};
/**
* Creates a matrix from a quaternion rotation, vector translation and vector scale, rotating and scaling around the given origin
* This is equivalent to (but much faster than):
*
* mat4.identity(dest);
* mat4.translate(dest, vec);
* mat4.translate(dest, origin);
* var quatMat = mat4.create();
* quat4.toMat4(quat, quatMat);
* mat4.multiply(dest, quatMat);
* mat4.scale(dest, scale)
* mat4.translate(dest, negativeOrigin);
*
* @param {mat4} out mat4 receiving operation result
* @param {quat4} q Rotation quaternion
* @param {vec3} v Translation vector
* @param {vec3} s Scaling vector
* @param {vec3} o The origin vector around which to scale and rotate
* @returns {mat4} out
*/
mat4.fromRotationTranslationScaleOrigin = function (out, q, v, s, o) {
// Quaternion math
var x = q[0], y = q[1], z = q[2], w = q[3],
x2 = x + x,
y2 = y + y,
z2 = z + z,
xx = x * x2,
xy = x * y2,
xz = x * z2,
yy = y * y2,
yz = y * z2,
zz = z * z2,
wx = w * x2,
wy = w * y2,
wz = w * z2,
sx = s[0],
sy = s[1],
sz = s[2],
ox = o[0],
oy = o[1],
oz = o[2];
out[0] = (1 - (yy + zz)) * sx;
out[1] = (xy + wz) * sx;
out[2] = (xz - wy) * sx;
out[3] = 0;
out[4] = (xy - wz) * sy;
out[5] = (1 - (xx + zz)) * sy;
out[6] = (yz + wx) * sy;
out[7] = 0;
out[8] = (xz + wy) * sz;
out[9] = (yz - wx) * sz;
out[10] = (1 - (xx + yy)) * sz;
out[11] = 0;
out[12] = v[0] + ox - (out[0] * ox + out[4] * oy + out[8] * oz);
out[13] = v[1] + oy - (out[1] * ox + out[5] * oy + out[9] * oz);
out[14] = v[2] + oz - (out[2] * ox + out[6] * oy + out[10] * oz);
out[15] = 1;
return out;
};
/**
* Calculates a 4x4 matrix from the given quaternion
*
* @param {mat4} out mat4 receiving operation result
* @param {quat} q Quaternion to create matrix from
*
* @returns {mat4} out
*/
mat4.fromQuat = function (out, q) {
var x = q[0], y = q[1], z = q[2], w = q[3],
x2 = x + x,
y2 = y + y,
z2 = z + z,
xx = x * x2,
yx = y * x2,
yy = y * y2,
zx = z * x2,
zy = z * y2,
zz = z * z2,
wx = w * x2,
wy = w * y2,
wz = w * z2;
out[0] = 1 - yy - zz;
out[1] = yx + wz;
out[2] = zx - wy;
out[3] = 0;
out[4] = yx - wz;
out[5] = 1 - xx - zz;
out[6] = zy + wx;
out[7] = 0;
out[8] = zx + wy;
out[9] = zy - wx;
out[10] = 1 - xx - yy;
out[11] = 0;
out[12] = 0;
out[13] = 0;
out[14] = 0;
out[15] = 1;
return out;
};
/**
* Generates a frustum matrix with the given bounds
*
* @param {mat4} out mat4 frustum matrix will be written into
* @param {Number} left Left bound of the frustum
* @param {Number} right Right bound of the frustum
* @param {Number} bottom Bottom bound of the frustum
* @param {Number} top Top bound of the frustum
* @param {Number} near Near bound of the frustum
* @param {Number} far Far bound of the frustum
* @returns {mat4} out
*/
mat4.frustum = function (out, left, right, bottom, top, near, far) {
var rl = 1 / (right - left),
tb = 1 / (top - bottom),
nf = 1 / (near - far);
out[0] = (near * 2) * rl;
out[1] = 0;
out[2] = 0;
out[3] = 0;
out[4] = 0;
out[5] = (near * 2) * tb;
out[6] = 0;
out[7] = 0;
out[8] = (right + left) * rl;
out[9] = (top + bottom) * tb;
out[10] = (far + near) * nf;
out[11] = -1;
out[12] = 0;
out[13] = 0;
out[14] = (far * near * 2) * nf;
out[15] = 0;
return out;
};
/**
* Generates a perspective projection matrix with the given bounds
*
* @param {mat4} out mat4 frustum matrix will be written into
* @param {number} fovy Vertical field of view in radians
* @param {number} aspect Aspect ratio. typically viewport width/height
* @param {number} near Near bound of the frustum
* @param {number} far Far bound of the frustum
* @returns {mat4} out
*/
mat4.perspective = function (out, fovy, aspect, near, far) {
var f = 1.0 / Math.tan(fovy / 2),
nf = 1 / (near - far);
out[0] = f / aspect;
out[1] = 0;
out[2] = 0;
out[3] = 0;
out[4] = 0;
out[5] = f;
out[6] = 0;
out[7] = 0;
out[8] = 0;
out[9] = 0;
out[10] = (far + near) * nf;
out[11] = -1;
out[12] = 0;
out[13] = 0;
out[14] = (2 * far * near) * nf;
out[15] = 0;
return out;
};
/**
* Generates a perspective projection matrix with the given field of view.
* This is primarily useful for generating projection matrices to be used
* with the still experiemental WebVR API.
*
* @param {mat4} out mat4 frustum matrix will be written into
* @param {Object} fov Object containing the following values: upDegrees, downDegrees, leftDegrees, rightDegrees
* @param {number} near Near bound of the frustum
* @param {number} far Far bound of the frustum
* @returns {mat4} out
*/
mat4.perspectiveFromFieldOfView = function (out, fov, near, far) {
var upTan = Math.tan(fov.upDegrees * Math.PI/180.0),
downTan = Math.tan(fov.downDegrees * Math.PI/180.0),
leftTan = Math.tan(fov.leftDegrees * Math.PI/180.0),
rightTan = Math.tan(fov.rightDegrees * Math.PI/180.0),
xScale = 2.0 / (leftTan + rightTan),
yScale = 2.0 / (upTan + downTan);
out[0] = xScale;
out[1] = 0.0;
out[2] = 0.0;
out[3] = 0.0;
out[4] = 0.0;
out[5] = yScale;
out[6] = 0.0;
out[7] = 0.0;
out[8] = -((leftTan - rightTan) * xScale * 0.5);
out[9] = ((upTan - downTan) * yScale * 0.5);
out[10] = far / (near - far);
out[11] = -1.0;
out[12] = 0.0;
out[13] = 0.0;
out[14] = (far * near) / (near - far);
out[15] = 0.0;
return out;
}
/**
* Generates a orthogonal projection matrix with the given bounds
*
* @param {mat4} out mat4 frustum matrix will be written into
* @param {number} left Left bound of the frustum
* @param {number} right Right bound of the frustum
* @param {number} bottom Bottom bound of the frustum
* @param {number} top Top bound of the frustum
* @param {number} near Near bound of the frustum
* @param {number} far Far bound of the frustum
* @returns {mat4} out
*/
mat4.ortho = function (out, left, right, bottom, top, near, far) {
var lr = 1 / (left - right),
bt = 1 / (bottom - top),
nf = 1 / (near - far);
out[0] = -2 * lr;
out[1] = 0;
out[2] = 0;
out[3] = 0;
out[4] = 0;
out[5] = -2 * bt;
out[6] = 0;
out[7] = 0;
out[8] = 0;
out[9] = 0;
out[10] = 2 * nf;
out[11] = 0;
out[12] = (left + right) * lr;
out[13] = (top + bottom) * bt;
out[14] = (far + near) * nf;
out[15] = 1;
return out;
};
/**
* Generates a look-at matrix with the given eye position, focal point, and up axis
*
* @param {mat4} out mat4 frustum matrix will be written into
* @param {vec3} eye Position of the viewer
* @param {vec3} center Point the viewer is looking at
* @param {vec3} up vec3 pointing up
* @returns {mat4} out
*/
mat4.lookAt = function (out, eye, center, up) {
var x0, x1, x2, y0, y1, y2, z0, z1, z2, len,
eyex = eye[0],
eyey = eye[1],
eyez = eye[2],
upx = up[0],
upy = up[1],
upz = up[2],
centerx = center[0],
centery = center[1],
centerz = center[2];
if (Math.abs(eyex - centerx) < glMatrix.EPSILON &&
Math.abs(eyey - centery) < glMatrix.EPSILON &&
Math.abs(eyez - centerz) < glMatrix.EPSILON) {
return mat4.identity(out);
}
z0 = eyex - centerx;
z1 = eyey - centery;
z2 = eyez - centerz;
len = 1 / Math.sqrt(z0 * z0 + z1 * z1 + z2 * z2);
z0 *= len;
z1 *= len;
z2 *= len;
x0 = upy * z2 - upz * z1;
x1 = upz * z0 - upx * z2;
x2 = upx * z1 - upy * z0;
len = Math.sqrt(x0 * x0 + x1 * x1 + x2 * x2);
if (!len) {
x0 = 0;
x1 = 0;
x2 = 0;
} else {
len = 1 / len;
x0 *= len;
x1 *= len;
x2 *= len;
}
y0 = z1 * x2 - z2 * x1;
y1 = z2 * x0 - z0 * x2;
y2 = z0 * x1 - z1 * x0;
len = Math.sqrt(y0 * y0 + y1 * y1 + y2 * y2);
if (!len) {
y0 = 0;
y1 = 0;
y2 = 0;
} else {
len = 1 / len;
y0 *= len;
y1 *= len;
y2 *= len;
}
out[0] = x0;
out[1] = y0;
out[2] = z0;
out[3] = 0;
out[4] = x1;
out[5] = y1;
out[6] = z1;
out[7] = 0;
out[8] = x2;
out[9] = y2;
out[10] = z2;
out[11] = 0;
out[12] = -(x0 * eyex + x1 * eyey + x2 * eyez);
out[13] = -(y0 * eyex + y1 * eyey + y2 * eyez);
out[14] = -(z0 * eyex + z1 * eyey + z2 * eyez);
out[15] = 1;
return out;
};
/**
* Returns a string representation of a mat4
*
* @param {mat4} a matrix to represent as a string
* @returns {String} string representation of the matrix
*/
mat4.str = function (a) {
return 'mat4(' + a[0] + ', ' + a[1] + ', ' + a[2] + ', ' + a[3] + ', ' +
a[4] + ', ' + a[5] + ', ' + a[6] + ', ' + a[7] + ', ' +
a[8] + ', ' + a[9] + ', ' + a[10] + ', ' + a[11] + ', ' +
a[12] + ', ' + a[13] + ', ' + a[14] + ', ' + a[15] + ')';
};
/**
* Returns Frobenius norm of a mat4
*
* @param {mat4} a the matrix to calculate Frobenius norm of
* @returns {Number} Frobenius norm
*/
mat4.frob = function (a) {
return(Math.sqrt(Math.pow(a[0], 2) + Math.pow(a[1], 2) + Math.pow(a[2], 2) + Math.pow(a[3], 2) + Math.pow(a[4], 2) + Math.pow(a[5], 2) + Math.pow(a[6], 2) + Math.pow(a[7], 2) + Math.pow(a[8], 2) + Math.pow(a[9], 2) + Math.pow(a[10], 2) + Math.pow(a[11], 2) + Math.pow(a[12], 2) + Math.pow(a[13], 2) + Math.pow(a[14], 2) + Math.pow(a[15], 2) ))
};
/**
* Adds two mat4's
*
* @param {mat4} out the receiving matrix
* @param {mat4} a the first operand
* @param {mat4} b the second operand
* @returns {mat4} out
*/
mat4.add = function(out, a, b) {
out[0] = a[0] + b[0];
out[1] = a[1] + b[1];
out[2] = a[2] + b[2];
out[3] = a[3] + b[3];
out[4] = a[4] + b[4];
out[5] = a[5] + b[5];
out[6] = a[6] + b[6];
out[7] = a[7] + b[7];
out[8] = a[8] + b[8];
out[9] = a[9] + b[9];
out[10] = a[10] + b[10];
out[11] = a[11] + b[11];
out[12] = a[12] + b[12];
out[13] = a[13] + b[13];
out[14] = a[14] + b[14];
out[15] = a[15] + b[15];
return out;
};
/**
* Subtracts matrix b from matrix a
*
* @param {mat4} out the receiving matrix
* @param {mat4} a the first operand
* @param {mat4} b the second operand
* @returns {mat4} out
*/
mat4.subtract = function(out, a, b) {
out[0] = a[0] - b[0];
out[1] = a[1] - b[1];
out[2] = a[2] - b[2];
out[3] = a[3] - b[3];
out[4] = a[4] - b[4];
out[5] = a[5] - b[5];
out[6] = a[6] - b[6];
out[7] = a[7] - b[7];
out[8] = a[8] - b[8];
out[9] = a[9] - b[9];
out[10] = a[10] - b[10];
out[11] = a[11] - b[11];
out[12] = a[12] - b[12];
out[13] = a[13] - b[13];
out[14] = a[14] - b[14];
out[15] = a[15] - b[15];
return out;
};
/**
* Alias for {@link mat4.subtract}
* @function
*/
mat4.sub = mat4.subtract;
/**
* Multiply each element of the matrix by a scalar.
*
* @param {mat4} out the receiving matrix
* @param {mat4} a the matrix to scale
* @param {Number} b amount to scale the matrix's elements by
* @returns {mat4} out
*/
mat4.multiplyScalar = function(out, a, b) {
out[0] = a[0] * b;
out[1] = a[1] * b;
out[2] = a[2] * b;
out[3] = a[3] * b;
out[4] = a[4] * b;
out[5] = a[5] * b;
out[6] = a[6] * b;
out[7] = a[7] * b;
out[8] = a[8] * b;
out[9] = a[9] * b;
out[10] = a[10] * b;
out[11] = a[11] * b;
out[12] = a[12] * b;
out[13] = a[13] * b;
out[14] = a[14] * b;
out[15] = a[15] * b;
return out;
};
/**
* Adds two mat4's after multiplying each element of the second operand by a scalar value.
*
* @param {mat4} out the receiving vector
* @param {mat4} a the first operand
* @param {mat4} b the second operand
* @param {Number} scale the amount to scale b's elements by before adding
* @returns {mat4} out
*/
mat4.multiplyScalarAndAdd = function(out, a, b, scale) {
out[0] = a[0] + (b[0] * scale);
out[1] = a[1] + (b[1] * scale);
out[2] = a[2] + (b[2] * scale);
out[3] = a[3] + (b[3] * scale);
out[4] = a[4] + (b[4] * scale);
out[5] = a[5] + (b[5] * scale);
out[6] = a[6] + (b[6] * scale);
out[7] = a[7] + (b[7] * scale);
out[8] = a[8] + (b[8] * scale);
out[9] = a[9] + (b[9] * scale);
out[10] = a[10] + (b[10] * scale);
out[11] = a[11] + (b[11] * scale);
out[12] = a[12] + (b[12] * scale);
out[13] = a[13] + (b[13] * scale);
out[14] = a[14] + (b[14] * scale);
out[15] = a[15] + (b[15] * scale);
return out;
};
/**
* Returns whether or not the matrices have exactly the same elements in the same position (when compared with ===)
*
* @param {mat4} a The first matrix.
* @param {mat4} b The second matrix.
* @returns {Boolean} True if the matrices are equal, false otherwise.
*/
mat4.exactEquals = function (a, b) {
return a[0] === b[0] && a[1] === b[1] && a[2] === b[2] && a[3] === b[3] &&
a[4] === b[4] && a[5] === b[5] && a[6] === b[6] && a[7] === b[7] &&
a[8] === b[8] && a[9] === b[9] && a[10] === b[10] && a[11] === b[11] &&
a[12] === b[12] && a[13] === b[13] && a[14] === b[14] && a[15] === b[15];
};
/**
* Returns whether or not the matrices have approximately the same elements in the same position.
*
* @param {mat4} a The first matrix.
* @param {mat4} b The second matrix.
* @returns {Boolean} True if the matrices are equal, false otherwise.
*/
mat4.equals = function (a, b) {
var a0 = a[0], a1 = a[1], a2 = a[2], a3 = a[3],
a4 = a[4], a5 = a[5], a6 = a[6], a7 = a[7],
a8 = a[8], a9 = a[9], a10 = a[10], a11 = a[11],
a12 = a[12], a13 = a[13], a14 = a[14], a15 = a[15];
var b0 = b[0], b1 = b[1], b2 = b[2], b3 = b[3],
b4 = b[4], b5 = b[5], b6 = b[6], b7 = b[7],
b8 = b[8], b9 = b[9], b10 = b[10], b11 = b[11],
b12 = b[12], b13 = b[13], b14 = b[14], b15 = b[15];
return (Math.abs(a0 - b0) <= glMatrix.EPSILON*Math.max(1.0, Math.abs(a0), Math.abs(b0)) &&
Math.abs(a1 - b1) <= glMatrix.EPSILON*Math.max(1.0, Math.abs(a1), Math.abs(b1)) &&
Math.abs(a2 - b2) <= glMatrix.EPSILON*Math.max(1.0, Math.abs(a2), Math.abs(b2)) &&
Math.abs(a3 - b3) <= glMatrix.EPSILON*Math.max(1.0, Math.abs(a3), Math.abs(b3)) &&
Math.abs(a4 - b4) <= glMatrix.EPSILON*Math.max(1.0, Math.abs(a4), Math.abs(b4)) &&
Math.abs(a5 - b5) <= glMatrix.EPSILON*Math.max(1.0, Math.abs(a5), Math.abs(b5)) &&
Math.abs(a6 - b6) <= glMatrix.EPSILON*Math.max(1.0, Math.abs(a6), Math.abs(b6)) &&
Math.abs(a7 - b7) <= glMatrix.EPSILON*Math.max(1.0, Math.abs(a7), Math.abs(b7)) &&
Math.abs(a8 - b8) <= glMatrix.EPSILON*Math.max(1.0, Math.abs(a8), Math.abs(b8)) &&
Math.abs(a9 - b9) <= glMatrix.EPSILON*Math.max(1.0, Math.abs(a9), Math.abs(b9)) &&
Math.abs(a10 - b10) <= glMatrix.EPSILON*Math.max(1.0, Math.abs(a10), Math.abs(b10)) &&
Math.abs(a11 - b11) <= glMatrix.EPSILON*Math.max(1.0, Math.abs(a11), Math.abs(b11)) &&
Math.abs(a12 - b12) <= glMatrix.EPSILON*Math.max(1.0, Math.abs(a12), Math.abs(b12)) &&
Math.abs(a13 - b13) <= glMatrix.EPSILON*Math.max(1.0, Math.abs(a13), Math.abs(b13)) &&
Math.abs(a14 - b14) <= glMatrix.EPSILON*Math.max(1.0, Math.abs(a14), Math.abs(b14)) &&
Math.abs(a15 - b15) <= glMatrix.EPSILON*Math.max(1.0, Math.abs(a15), Math.abs(b15)));
};
module.exports = mat4;<|fim▁end|> | yy = y * y2, |
<|file_name|>tutorial_etl_dag.py<|end_file_name|><|fim▁begin|>#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
### ETL DAG Tutorial Documentation
This ETL DAG is compatible with Airflow 1.10.x (specifically tested with 1.10.12) and is referenced
as part of the documentation that goes along with the Airflow Functional DAG tutorial located
[here](https://airflow.apache.org/tutorial_decorated_flows.html)
"""
# [START tutorial]
# [START import_module]
import json
from datetime import datetime
from textwrap import dedent
# The DAG object; we'll need this to instantiate a DAG<|fim▁hole|># Operators; we need this to operate!
from airflow.operators.python import PythonOperator
# [END import_module]
# [START instantiate_dag]
with DAG(
'tutorial_etl_dag',
# [START default_args]
# These args will get passed on to each operator
# You can override them on a per-task basis during operator initialization
default_args={'retries': 2},
# [END default_args]
description='ETL DAG tutorial',
schedule_interval=None,
start_date=datetime(2021, 1, 1),
catchup=False,
tags=['example'],
) as dag:
# [END instantiate_dag]
# [START documentation]
dag.doc_md = __doc__
# [END documentation]
# [START extract_function]
def extract(**kwargs):
ti = kwargs['ti']
data_string = '{"1001": 301.27, "1002": 433.21, "1003": 502.22}'
ti.xcom_push('order_data', data_string)
# [END extract_function]
# [START transform_function]
def transform(**kwargs):
ti = kwargs['ti']
extract_data_string = ti.xcom_pull(task_ids='extract', key='order_data')
order_data = json.loads(extract_data_string)
total_order_value = 0
for value in order_data.values():
total_order_value += value
total_value = {"total_order_value": total_order_value}
total_value_json_string = json.dumps(total_value)
ti.xcom_push('total_order_value', total_value_json_string)
# [END transform_function]
# [START load_function]
def load(**kwargs):
ti = kwargs['ti']
total_value_string = ti.xcom_pull(task_ids='transform', key='total_order_value')
total_order_value = json.loads(total_value_string)
print(total_order_value)
# [END load_function]
# [START main_flow]
extract_task = PythonOperator(
task_id='extract',
python_callable=extract,
)
extract_task.doc_md = dedent(
"""\
#### Extract task
A simple Extract task to get data ready for the rest of the data pipeline.
In this case, getting data is simulated by reading from a hardcoded JSON string.
This data is then put into xcom, so that it can be processed by the next task.
"""
)
transform_task = PythonOperator(
task_id='transform',
python_callable=transform,
)
transform_task.doc_md = dedent(
"""\
#### Transform task
A simple Transform task which takes in the collection of order data from xcom
and computes the total order value.
This computed value is then put into xcom, so that it can be processed by the next task.
"""
)
load_task = PythonOperator(
task_id='load',
python_callable=load,
)
load_task.doc_md = dedent(
"""\
#### Load task
A simple Load task which takes in the result of the Transform task, by reading it
from xcom and instead of saving it to end user review, just prints it out.
"""
)
extract_task >> transform_task >> load_task
# [END main_flow]
# [END tutorial]<|fim▁end|> | from airflow import DAG
|
<|file_name|>dershowitz.rs<|end_file_name|><|fim▁begin|>K : F (G x) <= G (G (F (F x)))
h : forall p x . p (F (F (G x)))
h = K (K h)
e : F (F (G x))<|fim▁hole|><|fim▁end|> | e = h |
<|file_name|>port.go<|end_file_name|><|fim▁begin|>package models
import "github.com/nvellon/hal"<|fim▁hole|> CurrentState int
DefaultState int
Name string
}
func (a Port) GetMap() hal.Entry {
return hal.Entry{
"id": a.Id,
"current_state": a.CurrentState,
"default_state": a.DefaultState,
"name": a.Name,
}
}<|fim▁end|> |
type Port struct {
Id int |
<|file_name|>config.py<|end_file_name|><|fim▁begin|>"""Helper module for parsing AWS ini config files."""
import os
try:
import configparser
except ImportError:
import ConfigParser as configparser
AWS_CLI_CREDENTIALS_PATH = "~/.aws/credentials"
AWS_CLI_CONFIG_PATH = "~/.aws/config"
DEFAULT_PROFILE_NAME = os.getenv("AWS_DEFAULT_PROFILE", "default")
class NoConfigFoundException(Exception):
"""Config file not present."""
pass
def _get_config_parser(path):
"""Open and parse given config.
:type path: basestring
:rtype: ConfigParser.ConfigParser
"""
config_parser = configparser.ConfigParser()
try:
with open(os.path.expanduser(path), "rb") as f:
config_parser.readfp(f)
except IOError:
raise NoConfigFoundException("Can't find the config file: %s" % path)
else:
return config_parser
def _get_credentials_from_environment():
key = os.environ.get("AWS_ACCESS_KEY_ID")
secret = os.environ.get("AWS_SECRET_ACCESS_KEY")
return key, secret
def get_credentials(profile=None):
"""Returns AWS credentials.
Reads ~/.aws/credentials if the profile name is given or tries
to get them from environment otherwise. Returns a (key, secret)
tuple.
:type profile: basestring
:rtype: tuple
"""
if profile is None:
key, secret = _get_credentials_from_environment()
if key is not None and secret is not None:
return key, secret
raise NoConfigFoundException("AWS credentials not found.")
config = _get_config_parser(path=AWS_CLI_CREDENTIALS_PATH)
key = config.get(profile, "aws_access_key_id")
secret = config.get(profile, "aws_secret_access_key")
return key, secret
def get_credentials_dict(profile):
"""Returns credentials as a dict (for use as kwargs).
:type profile: basestring
:rtype: dict
"""
key, secret = get_credentials(profile)
return {"aws_access_key_id": key,
"aws_secret_access_key": secret}
def get_profile_names():
"""Get available profile names.
:rtype: list
:returns: list of profile names (strings)
"""
try:
return _get_config_parser(path=AWS_CLI_CREDENTIALS_PATH).sections()
except NoConfigFoundException:
return []
def has_default_profile():
"""Is default profile present?
:rtype: bool<|fim▁hole|>
return DEFAULT_PROFILE_NAME in get_profile_names()
def get_default_region(profile):
"""Get the default region for given profile from AWS CLI tool's config.
:type profile: basestring
:rtype: basestring
:returns: name of defalt region if defined in config, None otherwise
"""
try:
config = _get_config_parser(path=AWS_CLI_CONFIG_PATH)
except NoConfigFoundException:
return None
try:
return config.get("profile %s" % profile, "region")
except (configparser.NoSectionError, configparser.NoOptionError):
pass
try:
return config.get("default", "region")
except (configparser.NoSectionError, configparser.NoOptionError):
pass
return None<|fim▁end|> | """ |
<|file_name|>lumina-fileinfo_sv.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE TS>
<TS version="2.1" language="sv_SE">
<context>
<name>Dialog</name>
<message>
<location filename="../dialog.ui" line="14"/>
<source>File Information</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../dialog.ui" line="22"/>
<source>Working dir:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../dialog.ui" line="56"/>
<source>Use startup notification</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../dialog.ui" line="70"/>
<source>Icon:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../dialog.ui" line="77"/>
<source>Command:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../dialog.ui" line="84"/>
<source>Comment:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../dialog.ui" line="118"/>
<source>Run in terminal</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../dialog.ui" line="125"/>
<source>Name:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../dialog.ui" line="132"/>
<source>Options</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../dialog.ui" line="162"/>
<source>Cancel</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../dialog.ui" line="169"/>
<source>Apply</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../dialog.cpp" line="84"/>
<location filename="../dialog.cpp" line="125"/>
<location filename="../dialog.cpp" line="160"/>
<source>Error</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../dialog.cpp" line="84"/>
<source>Lumina-fileinfo requires inputs:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../dialog.cpp" line="84"/>
<source>Example: "%1"</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../dialog.cpp" line="101"/>
<source>URL</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../dialog.cpp" line="111"/>
<source>Working dir</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../dialog.cpp" line="125"/>
<source>The filename cannot start with a "-".</source>
<translation type="unfinished"></translation><|fim▁hole|> </message>
<message>
<location filename="../dialog.cpp" line="160"/>
<source>Problem to read the desktop file called:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../dialog.cpp" line="182"/>
<source>There are some issues with this file !!!!</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../dialog.cpp" line="183"/>
<source>Either you correct this file your self with an editor, or you start from scratch using the link or app template.
Please note that this process will update the file called:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../dialog.cpp" line="217"/>
<location filename="../dialog.cpp" line="323"/>
<source>Open command</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../dialog.cpp" line="217"/>
<source>All Files (*)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../dialog.cpp" line="233"/>
<source>Working Directory</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../dialog.cpp" line="285"/>
<location filename="../dialog.cpp" line="300"/>
<location filename="../dialog.cpp" line="309"/>
<source>Problem to write to disk</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../dialog.cpp" line="285"/>
<source>We have a problem to write the adapted desktop file to the disk. Can you re-try the modification after solving the issue with the disk ?</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../dialog.cpp" line="300"/>
<location filename="../dialog.cpp" line="309"/>
<source>We have a problem to execute the following command:</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../dialog.cpp" line="323"/>
<source>Image Files (*.png *.jpg *.bmp)</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../dialog.cpp" line="334"/>
<location filename="../dialog.cpp" line="351"/>
<source>By modifying this value, you will loose all translated versions</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../dialog.cpp" line="335"/>
<source>The field: Name is translated in several other languages. If you want to continue, you will loose all translated versions</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../dialog.cpp" line="352"/>
<source>The field: Comment is translated in several other languages. If you want to continue, you will loose all translated versions</source>
<translation type="unfinished"></translation>
</message>
</context>
</TS><|fim▁end|> | |
<|file_name|>paginate.js<|end_file_name|><|fim▁begin|>var URL = require('url');
var Pagination = function(request, model){
this.request = request;
this.model = model;
<|fim▁hole|> var url = URL.parse(this.request.url).pathname;
var page = this.request.param('page');
page = page === undefined ? 0 : page;
this.model.find(query).sort(sort).skip(page*limit).limit( (limit + 1) ).select( selected ).exec(function(err, members){
//Fetched more than the limit
members.splice(limit, 1);
var paginatedMembers = {
data : members
};
if(members.length >= limit ){
nextPage = parseInt(page) + 1;
paginatedMembers["next"] = url + "?page=" + nextPage;
}
if (page >= 1) {
prevPage = parseInt(page) - 1;
paginatedMembers["prev"] = url + "?page=" + prevPage;
};
onDataReception(paginatedMembers);
});
};
}
module.exports = function(request, model){
return new Pagination(request, model);
}<|fim▁end|> | this.paginate = function(query, limit, sort, selected, onDataReception){
|
<|file_name|>regions-close-associated-type-into-object.rs<|end_file_name|><|fim▁begin|>trait X {}
trait Iter {
type Item: X;
fn into_item(self) -> Self::Item;
fn as_item(&self) -> &Self::Item;
}
fn bad1<T: Iter>(v: T) -> Box<dyn X + 'static>
{
let item = v.into_item();
Box::new(item) //~ ERROR associated type `<T as Iter>::Item` may not live long enough
}
fn bad2<T: Iter>(v: T) -> Box<dyn X + 'static>
where Box<T::Item> : X
{
let item: Box<_> = Box::new(v.into_item());
Box::new(item) //~ ERROR associated type `<T as Iter>::Item` may not live long enough
}
fn bad3<'a, T: Iter>(v: T) -> Box<dyn X + 'a>
{
let item = v.into_item();
Box::new(item) //~ ERROR associated type `<T as Iter>::Item` may not live long enough
}
fn bad4<'a, T: Iter>(v: T) -> Box<dyn X + 'a>
where Box<T::Item> : X
{
let item: Box<_> = Box::new(v.into_item());
Box::new(item) //~ ERROR associated type `<T as Iter>::Item` may not live long enough
}
fn ok1<'a, T: Iter>(v: T) -> Box<dyn X + 'a>
where T::Item : 'a
{
let item = v.into_item();
Box::new(item) // OK, T::Item : 'a is declared
}
fn ok2<'a, T: Iter>(v: &T, w: &'a T::Item) -> Box<dyn X + 'a>
where T::Item : Clone
{<|fim▁hole|>fn ok3<'a, T: Iter>(v: &'a T) -> Box<dyn X + 'a>
where T::Item : Clone + 'a
{
let item = Clone::clone(v.as_item());
Box::new(item) // OK, T::Item : 'a was declared
}
fn meh1<'a, T: Iter>(v: &'a T) -> Box<dyn X + 'a>
where T::Item : Clone
{
// This case is kind of interesting. It's the same as `ok3` but
// without the explicit declaration. This is valid because `T: 'a
// => T::Item: 'a`, and the former we can deduce from our argument
// of type `&'a T`.
let item = Clone::clone(v.as_item());
Box::new(item)
}
fn main() {}<|fim▁end|> | let item = Clone::clone(w);
Box::new(item) // OK, T::Item : 'a is implied
}
|
<|file_name|>Attribute.java<|end_file_name|><|fim▁begin|>/**
*
*/
package agentRefactoringStrand;
/**
* @author Daavid
*
*/
public class Attribute {
<|fim▁hole|><|fim▁end|> | private String name;
} |
<|file_name|>SourceProcessor.java<|end_file_name|><|fim▁begin|>package io.ray.streaming.runtime.core.processor;<|fim▁hole|>
import io.ray.streaming.message.Record;
import io.ray.streaming.operator.SourceOperator;
/**
* The processor for the stream sources, containing a SourceOperator.
*
* @param <T> The type of source data.
*/
public class SourceProcessor<T> extends StreamProcessor<Record, SourceOperator<T>> {
public SourceProcessor(SourceOperator<T> operator) {
super(operator);
}
@Override
public void process(Record record) {
throw new UnsupportedOperationException("SourceProcessor should not process record");
}
public void fetch() {
operator.fetch();
}
@Override
public void close() {}
}<|fim▁end|> | |
<|file_name|>cli.js<|end_file_name|><|fim▁begin|>module.exports = function (process, manifest, actionCallback) {<|fim▁hole|> .option('-v, --verbose', 'Enable verbose output')
.command('*')
.action(function (args) {
actionCallback(process, args);
});
program.parse(process.argv);
};<|fim▁end|> | var program = require('commander');
program
.version(manifest.version) |
<|file_name|>test.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright(C) 2010-2011 Romain Bignon
#<|fim▁hole|># weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
from weboob.tools.test import BackendTest
class GazelleTest(BackendTest):
BACKEND = 'gazelle'
def test_torrent(self):
l = list(self.backend.iter_torrents('sex'))
if len(l) > 0:
self.backend.get_torrent_file(l[0].id)<|fim▁end|> | # This file is part of weboob.
# |
<|file_name|>ShadowNode.js<|end_file_name|><|fim▁begin|>var UTIL = require('./util');
var ShadowNode;
module.exports = ShadowNode = function(patch,options){
this.shadow = options.shadow;
this.native = options.native;
this.elem = new patch.type(this);
this.elem.props = patch.props;
this.elem.props.children = patch.children;
this.elem.state = this.elem.getInitialState ? this.elem.getInitialState() : {};
this.elem.componentWillMount();
this.render();
this.elem.componentDidMount();
};
var proto = ShadowNode.prototype;
Object.defineProperty(proto,'parent',{
get : function(){
return this.native.parent;
}
});
proto.setPatch = function(patch){
var oldProps = this.elem.props;
this.elem._isUpdating = true;
var newProps = patch.props;
newProps.children = patch.children;
this.elem.componentWillRecieveProps(newProps);
this.elem.props = newProps;
this.elem.componentDidRecieveProps(oldProps);
this.update();
};
proto.update = function(){
// This is called by set state and by props updating
this.elem.componentWillUpdate();
this.render();
this.elem.componentWillUpdate();
};
proto.remove = function(){
this.elem.componentWillUnmount();
this.destroyed = true;
if (this.figure){
return this.figure.remove();
}
this.shadow = void 0;
this.figure = void 0;
this.native = void 0;
this.elem.componentDidUnmount();
};
proto.render = function(){
var newPatch = this.elem.render();
var lastPatch = this.lastPatch;
this.lastPatch = newPatch;
if (!lastPatch && !newPatch) return;
if (UTIL.isNative(newPatch)){
if (this.figure){
this.figure.remove();
this.figure = void 0;
}
this.native.shadowTail = this;
return this.native.setPatch(newPatch);
}
if (UTIL.differentTypes(lastPatch,newPatch)){
if (this.figure) this.figure.remove();
this.figure = new ShadowNode(newPatch,{
shadow : this,native : this.native
});
return this.figure;
}
if (UTIL.differentPatch(lastPatch,newPatch)){<|fim▁hole|> // component did update
}
};<|fim▁end|> | // component will update
this.figure.setPatch(newPatch);
|
<|file_name|>LLVMMOPMain.java<|end_file_name|><|fim▁begin|>/**
* @author fengchen, Dongyun Jin, Patrick Meredith, Michael Ilseman
*
* To change the template for this generated type comment go to
* Window>Preferences>Java>Code Generation>Code and Comments
*/
package llvmmop;
import java.io.File;
import java.io.FileWriter;
import java.io.FilenameFilter;
import java.util.ArrayList;
import java.util.List;
import com.runtimeverification.rvmonitor.java.rvj.Main;
import llvmmop.parser.ast.MOPSpecFile;
import llvmmop.util.Tool;
import llvmmop.util.AJFileCombiner;
class JavaFileFilter implements FilenameFilter {
public boolean accept(File dir, String name) {
return name.endsWith(".java");
}
}
class MOPFileFilter implements FilenameFilter {
public boolean accept(File dir, String name) {
return name.endsWith(".mop");
}
}
public class LLVMMOPMain {
static File outputDir = null;
public static boolean debug = false;
public static boolean noopt1 = false;
public static boolean toJavaLib = false;
public static boolean statistics = false;
public static boolean statistics2 = false;
public static String aspectname = null;
public static boolean specifiedAJName = false;
public static boolean isJarFile = false;
public static String jarFilePath = null;
public static final int NONE = 0;
public static final int HANDLERS = 1;
public static final int EVENTS = 2;
public static int logLevel = NONE;
public static boolean dacapo = false;
public static boolean dacapo2 = false;
public static boolean silent = false;
public static boolean empty_advicebody = false;
public static boolean translate2RV = true;
public static boolean merge = false;
public static boolean inline = false;
public static boolean scalable = false;
public static boolean keepRVFiles = false;
public static List<String []> listFilePairs = new ArrayList<String []>();
public static List<String> listRVMFiles = new ArrayList<String>();
static private File getTargetDir(ArrayList<File> specFiles) throws MOPException{
if(LLVMMOPMain.outputDir != null){
return outputDir;
}
boolean sameDir = true;
File parentFile = null;
for(File file : specFiles){
if(parentFile == null){
parentFile = file.getAbsoluteFile().getParentFile();
} else {
if(file.getAbsoluteFile().getParentFile().equals(parentFile)){
continue;
} else {
sameDir = false;
break;
}
}
}
if(sameDir){
return parentFile;
} else {
return new File(".");
}
}
/**
* Process a java file including mop annotations to generate an aspect file. The path argument should be an existing java file name. The location
* argument should contain the original file name, But it may have a different directory.
*
* @param path
* an absolute path of a specification file
* @param location
* an absolute path for result file
*/
public static void processJavaFile(File file, String location) throws MOPException {
MOPNameSpace.init();
String specStr = SpecExtractor.process(file);
MOPSpecFile spec = SpecExtractor.parse(specStr);
if (LLVMMOPMain.aspectname == null) {
LLVMMOPMain.aspectname = Tool.getFileName(file.getAbsolutePath());
}
MOPProcessor processor = new MOPProcessor(LLVMMOPMain.aspectname);
String aspect = processor.process(spec);
writeFile(aspect, location, "MonitorAspect.aj");
}
/**
* Process a specification file to generate an aspect file. The path argument should be an existing specification file name. The location
* argument should contain the original file name, But it may have a different directory.
*
* @param path
* an absolute path of a specification file
* @param location
* an absolute path for result file
*/
public static void processSpecFile(File file, String location) throws MOPException {
MOPNameSpace.init();
String specStr = SpecExtractor.process(file);
MOPSpecFile spec = SpecExtractor.parse(specStr);
if (LLVMMOPMain.aspectname == null) {
LLVMMOPMain.aspectname = Tool.getFileName(file.getAbsolutePath());
}
MOPProcessor processor = new MOPProcessor(LLVMMOPMain.aspectname);
String output = processor.process(spec);
if (translate2RV) {
writeFile(processor.translate2RV(spec), file.getAbsolutePath(), ".rvm");
}
if (toJavaLib) {
writeFile(output, location, "JavaLibMonitor.java");
} else {
writeFile(output, location, "MonitorAspect.aj");
}
}
public static void processMultipleFiles(ArrayList<File> specFiles) throws MOPException {
String aspectName;
if(outputDir == null){
outputDir = getTargetDir(specFiles);
}
if(LLVMMOPMain.aspectname != null) {
aspectName = LLVMMOPMain.aspectname;
} else {
if(specFiles.size() == 1) {
aspectName = Tool.getFileName(specFiles.get(0).getAbsolutePath());
} else {
int suffixNumber = 0;
// generate auto name like 'MultiMonitorApsect.aj'
File aspectFile;
do{
suffixNumber++;
aspectFile = new File(outputDir.getAbsolutePath() + File.separator + "MultiSpec_" + suffixNumber + "MonitorAspect.aj");
} while(aspectFile.exists());
aspectName = "MultiSpec_" + suffixNumber;
}
LLVMMOPMain.aspectname = aspectName;
}
MOPProcessor processor = new MOPProcessor(aspectName);
MOPNameSpace.init();
ArrayList<MOPSpecFile> specs = new ArrayList<MOPSpecFile>();
for(File file : specFiles){
String specStr = SpecExtractor.process(file);
MOPSpecFile spec = SpecExtractor.parse(specStr);
if (translate2RV) {
writeFile(processor.translate2RV(spec), file.getAbsolutePath(), ".rvm");
}
specs.add(spec);
}
MOPSpecFile combinedSpec = SpecCombiner.process(specs);
String output = processor.process(combinedSpec);
writeCombinedAspectFile(output, aspectName);
}
protected static void writeJavaFile(String javaContent, String location) throws MOPException {
if ((javaContent == null) || (javaContent.length() == 0))
throw new MOPException("Nothing to write as a java file");
if (!Tool.isJavaFile(location))
throw new MOPException(location + "should be a Java file!");
try {
FileWriter f = new FileWriter(location);
f.write(javaContent);
f.close();
} catch (Exception e) {
throw new MOPException(e.getMessage());
}
}
protected static void writeCombinedAspectFile(String aspectContent, String aspectName) throws MOPException {
if (aspectContent == null || aspectContent.length() == 0)
return;
try {
FileWriter f = new FileWriter(outputDir.getAbsolutePath() + File.separator + aspectName + "MonitorAspect.aj");
f.write(aspectContent);
f.close();
} catch (Exception e) {
throw new MOPException(e.getMessage());
}
System.out.println(" " + aspectName + "MonitorAspect.aj is generated");
}
protected static void writeFile(String content, String location, String suffix) throws MOPException {
if (content == null || content.length() == 0)
return;
int i = location.lastIndexOf(File.separator);
String filePath = "";
try {
filePath = location.substring(0, i + 1) + Tool.getFileName(location) + suffix;
FileWriter f = new FileWriter(filePath);
f.write(content);
f.close();
} catch (Exception e) {
throw new MOPException(e.getMessage());
}
if (suffix.equals(".rvm")) {
listRVMFiles.add(filePath);
}
System.out.println(" " + Tool.getFileName(location) + suffix + " is generated");
}
<|fim▁hole|> try {
FileWriter f = new FileWriter(location.substring(0, i + 1) + Tool.getFileName(location) + "PluginOutput.txt");
f.write(pluginOutput);
f.close();
} catch (Exception e) {
throw new MOPException(e.getMessage());
}
System.out.println(" " + Tool.getFileName(location) + "PluginOutput.txt is generated");
}
public static String polishPath(String path) {
if (path.indexOf("%20") > 0)
path = path.replaceAll("%20", " ");
return path;
}
public static ArrayList<File> collectFiles(String[] files, String path) throws MOPException {
ArrayList<File> ret = new ArrayList<File>();
for (String file : files) {
String fPath = path.length() == 0 ? file : path + File.separator + file;
File f = new File(fPath);
if (!f.exists()) {
throw new MOPException("[Error] Target file, " + file + ", doesn't exsit!");
} else if (f.isDirectory()) {
ret.addAll(collectFiles(f.list(new JavaFileFilter()), f.getAbsolutePath()));
ret.addAll(collectFiles(f.list(new MOPFileFilter()), f.getAbsolutePath()));
} else {
if (Tool.isSpecFile(file)) {
ret.add(f);
} else if (Tool.isJavaFile(file)) {
ret.add(f);
} else
throw new MOPException("Unrecognized file type! The JavaMOP specification file should have .mop as the extension.");
}
}
return ret;
}
public static void process(String[] files, String path) throws MOPException {
ArrayList<File> specFiles = collectFiles(files, path);
if(LLVMMOPMain.aspectname != null && files.length > 1){
LLVMMOPMain.merge = true;
}
if (LLVMMOPMain.merge) {
System.out.println("-Processing " + specFiles.size()
+ " specification(s)");
processMultipleFiles(specFiles);
String javaFile = outputDir.getAbsolutePath() + File.separator
+ LLVMMOPMain.aspectname + "RuntimeMonitor.java";
String ajFile = outputDir.getAbsolutePath() + File.separator
+ LLVMMOPMain.aspectname + "MonitorAspect.aj";
String combinerArgs[] = new String[2];
combinerArgs[0] = javaFile;
combinerArgs[1] = ajFile;
listFilePairs.add(combinerArgs);
} else {
for (File file : specFiles) {
boolean needResetAspectName = LLVMMOPMain.aspectname == null;
String location = outputDir == null ? file.getAbsolutePath() : outputDir.getAbsolutePath() + File.separator + file.getName();
System.out.println("-Processing " + file.getPath());
if (Tool.isSpecFile(file.getName())) {
processSpecFile(file, location);
} else if (Tool.isJavaFile(file.getName())) {
processJavaFile(file, location);
}
File combineDir = outputDir == null ? file.getAbsoluteFile()
.getParentFile() : outputDir;
String javaFile = combineDir.getAbsolutePath() + File.separator
+ LLVMMOPMain.aspectname + "RuntimeMonitor.java";
String ajFile = combineDir.getAbsolutePath() + File.separator
+ LLVMMOPMain.aspectname + "MonitorAspect.aj";
String combinerArgs[] = new String[2];
combinerArgs[0] = javaFile;
combinerArgs[1] = ajFile;
listFilePairs.add(combinerArgs);
if (needResetAspectName) {
LLVMMOPMain.aspectname = null;
}
}
}
}
public static void process(String arg) throws MOPException {
if(outputDir != null && !outputDir.exists())
throw new MOPException("The output directory, " + outputDir.getPath() + " does not exist.");
process(arg.split(";"), "");
}
// PM
public static void print_help() {
System.out.println("Usage: java [-cp javmaop_classpath] llvmmop.LLVMMOPMain [-options] files");
System.out.println("");
System.out.println("where options include:");
System.out.println(" Options enabled by default are prefixed with \'+\'");
System.out.println(" -h -help\t\t\t print this help message");
System.out.println(" -v | -verbose\t\t enable verbose output");
System.out.println(" -debug\t\t\t enable verbose error message");
System.out.println();
System.out.println(" -local\t\t\t+ use local logic engine");
System.out.println(" -remote\t\t\t use default remote logic engine");
System.out.println("\t\t\t\t " + Configuration.getServerAddr());
System.out.println("\t\t\t\t (You can change the default address");
System.out.println("\t\t\t\t in llvmmop/config/remote_server_addr.properties)");
System.out.println(" -remote:<server address>\t use remote logic engine");
System.out.println();
System.out.println(" -d <output path>\t\t select directory to store output files");
System.out.println(" -n | -aspectname <aspect name>\t use the given aspect name instead of source code name");
System.out.println();
System.out.println(" -showevents\t\t\t show every event/handler occurrence");
System.out.println(" -showhandlers\t\t\t show every handler occurrence");
System.out.println();
System.out.println(" -s | -statistics\t\t generate monitor with statistics");
System.out.println(" -noopt1\t\t\t don't use the enable set optimization");
System.out.println(" -javalib\t\t\t generate a java library rather than an AspectJ file");
System.out.println();
System.out.println(" -aspect:\"<command line>\"\t compile the result right after it is generated");
System.out.println();
}
public static void main(String[] args) {
ClassLoader loader = LLVMMOPMain.class.getClassLoader();
String mainClassPath = loader.getResource("llvmmop/LLVMMOPMain.class").toString();
if (mainClassPath.endsWith(".jar!/llvmmop/LLVMMOPMain.class") && mainClassPath.startsWith("jar:")) {
isJarFile = true;
jarFilePath = mainClassPath.substring("jar:file:".length(), mainClassPath.length() - "!/llvmmop/LLVMMOPMain.class".length());
jarFilePath = polishPath(jarFilePath);
}
int i = 0;
String files = "";
while (i < args.length) {
if (args[i].compareTo("-h") == 0 || args[i].compareTo("-help") == 0) {
print_help();
return;
}
if (args[i].compareTo("-d") == 0) {
i++;
outputDir = new File(args[i]);
} else if (args[i].compareTo("-local") == 0) {
} else if (args[i].compareTo("-remote") == 0) {
} else if (args[i].startsWith("-remote:")) {
} else if (args[i].compareTo("-v") == 0 || args[i].compareTo("-verbose") == 0) {
MOPProcessor.verbose = true;
} else if (args[i].compareTo("-javalib") == 0) {
toJavaLib = true;
} else if (args[i].compareTo("-debug") == 0) {
LLVMMOPMain.debug = true;
} else if (args[i].compareTo("-noopt1") == 0) {
LLVMMOPMain.noopt1 = true;
} else if (args[i].compareTo("-s") == 0 || args[i].compareTo("-statistics") == 0) {
LLVMMOPMain.statistics = true;
} else if (args[i].compareTo("-s2") == 0 || args[i].compareTo("-statistics2") == 0) {
LLVMMOPMain.statistics2 = true;
} else if (args[i].compareTo("-n") == 0 || args[i].compareTo("-aspectname") == 0) {
i++;
LLVMMOPMain.aspectname = args[i];
LLVMMOPMain.specifiedAJName = true;
} else if (args[i].compareTo("-showhandlers") == 0) {
if (LLVMMOPMain.logLevel < LLVMMOPMain.HANDLERS)
LLVMMOPMain.logLevel = LLVMMOPMain.HANDLERS;
} else if (args[i].compareTo("-showevents") == 0) {
if (LLVMMOPMain.logLevel < LLVMMOPMain.EVENTS)
LLVMMOPMain.logLevel = LLVMMOPMain.EVENTS;
} else if (args[i].compareTo("-dacapo") == 0) {
LLVMMOPMain.dacapo = true;
} else if (args[i].compareTo("-dacapo2") == 0) {
LLVMMOPMain.dacapo2 = true;
} else if (args[i].compareTo("-silent") == 0) {
LLVMMOPMain.silent = true;
} else if (args[i].compareTo("-merge") == 0) {
LLVMMOPMain.merge = true;
} else if (args[i].compareTo("-inline") == 0) {
LLVMMOPMain.inline = true;
} else if (args[i].compareTo("-noadvicebody") == 0) {
LLVMMOPMain.empty_advicebody = true;
} else if (args[i].compareTo("-scalable") == 0) {
LLVMMOPMain.scalable = true;
} else if (args[i].compareTo("-translate2RV") == 0) {
LLVMMOPMain.translate2RV = true;
} else if (args[i].compareTo("-keepRVFiles") == 0) {
LLVMMOPMain.keepRVFiles = true;
} else {
if (files.length() != 0)
files += ";";
files += args[i];
}
++i;
}
if (files.length() == 0) {
print_help();
return;
}
// Generate .rvm files and .aj files
try {
process(files);
} catch (Exception e) {
System.err.println(e.getMessage());
if (LLVMMOPMain.debug)
e.printStackTrace();
}
// replace mop with rvm and call rv-monitor
int length = args.length;
if (LLVMMOPMain.keepRVFiles) {
length--;
}
String rvArgs[] = new String [length];
int p = 0;
for (int j = 0; j < args.length; j++) {
if (args[j].compareTo("-keepRVFiles") == 0) {
// Don't pass keepRVFiles to rvmonitor
continue;
}
rvArgs[p] = args[j].replaceAll("\\.mop", "\\.rvm");
p++;
}
Main.main(rvArgs);
// Call AJFileCombiner here to combine these two
// TODO
for (String[] filePair : listFilePairs) {
AJFileCombiner.main(filePair);
File javaFile = new File(filePair[0]);
try {
if (!LLVMMOPMain.keepRVFiles) {
boolean deleted = javaFile.delete();
if (!deleted) {
System.err.println("Failed to delete java file: "
+ filePair[0]);
}
}
} catch (Exception e) {
}
}
for (String rvmFilePath : listRVMFiles) {
File rvmFile = new File(rvmFilePath);
try {
if (!LLVMMOPMain.keepRVFiles) {
boolean deleted = rvmFile.delete();
if (!deleted) {
System.err.println("Failed to delete java file: "
+ rvmFilePath);
}
}
} catch (Exception e) {
}
}
}
}<|fim▁end|> | // PM
protected static void writePluginOutputFile(String pluginOutput, String location) throws MOPException {
int i = location.lastIndexOf(File.separator);
|
<|file_name|>Arbol_Sintactico_Abstracto.py<|end_file_name|><|fim▁begin|>from Estructura import espaceado<|fim▁hole|> self.alcance = alcance
self.cont = 1
def imprimir(self,tabulacion):
if (len(self.hijos) > 1):
print tabulacion + "SECUENCIA"
for hijo in self.hijos:
hijo.nivel = 1
hijo.imprimir(espaceado(tabulacion))
def ejecutar(self):
for hijo in self.hijos:
hijo.nivel = 1
hijo.ejecutar()<|fim▁end|> |
class Arbol_Sintactico_Abstracto:
def __init__(self,alcance,hijos):
self.hijos = hijos |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | from .sizedist import *
from .WD01 import make_WD01_DustSpectrum |
<|file_name|>cell.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::trace::JSTraceable;
use js::jsapi::{JSTracer};
use servo_util::task_state;
use servo_util::task_state::{SCRIPT, IN_GC};
use std::cell::{RefCell, Ref, RefMut};
/// A mutable field in the DOM.
///
/// This extends the API of `core::cell::RefCell` to allow unsafe access in
/// certain situations, with dynamic checking in debug builds.
pub struct DOMRefCell<T> {
value: RefCell<T>,
}
// Functionality specific to Servo's `DOMRefCell` type
// ===================================================
impl<T> DOMRefCell<T> {
/// Return a reference to the contents.
///
/// For use in the layout task only.
pub unsafe fn borrow_for_layout<'a>(&'a self) -> &'a T {
debug_assert!(task_state::get().is_layout());
&*self.value.as_unsafe_cell().get()
}
/// Borrow the contents for the purpose of GC tracing.
///
/// This succeeds even if the object is mutably borrowed,<|fim▁hole|> &*self.value.as_unsafe_cell().get()
}
/// Is the cell mutably borrowed?
///
/// For safety checks in debug builds only.
pub fn is_mutably_borrowed(&self) -> bool {
self.value.try_borrow().is_some()
}
pub fn try_borrow<'a>(&'a self) -> Option<Ref<'a, T>> {
debug_assert!(task_state::get().is_script());
self.value.try_borrow()
}
pub fn try_borrow_mut<'a>(&'a self) -> Option<RefMut<'a, T>> {
debug_assert!(task_state::get().is_script());
self.value.try_borrow_mut()
}
}
impl<T: JSTraceable> JSTraceable for DOMRefCell<T> {
fn trace(&self, trc: *mut JSTracer) {
(*self).borrow().trace(trc)
}
}
// Functionality duplicated with `core::cell::RefCell`
// ===================================================
impl<T> DOMRefCell<T> {
pub fn new(value: T) -> DOMRefCell<T> {
DOMRefCell {
value: RefCell::new(value),
}
}
pub fn unwrap(self) -> T {
self.value.unwrap()
}
pub fn borrow<'a>(&'a self) -> Ref<'a, T> {
match self.try_borrow() {
Some(ptr) => ptr,
None => panic!("DOMRefCell<T> already mutably borrowed")
}
}
pub fn borrow_mut<'a>(&'a self) -> RefMut<'a, T> {
match self.try_borrow_mut() {
Some(ptr) => ptr,
None => panic!("DOMRefCell<T> already borrowed")
}
}
}<|fim▁end|> | /// so you have to be careful in trace code!
pub unsafe fn borrow_for_gc_trace<'a>(&'a self) -> &'a T {
debug_assert!(task_state::get().contains(SCRIPT | IN_GC)); |
<|file_name|>minSubArrayLen.py<|end_file_name|><|fim▁begin|>class Solution:
# @param {integer} s
# @param {integer[]} nums
# @return {integer}
def minSubArrayLen(self, s, nums):
i = 0
j = -1
n = len(nums)
t = 0
min_len = sys.maxint
while(i<n and j <n):
if t < s:
j += 1
if j >=n :
break
t += nums[j]
else:
if min_len > (j-i+1):
min_len = j-i+1
t -= nums[i]
i += 1
if min_len == sys.maxint:
return 0<|fim▁hole|><|fim▁end|> | else:
return min_len |
<|file_name|>jinja2tags.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import
# import jinja2
from jinja2.ext import Extension
from .templatetags.promotions_tags import promo_ballance
class PromotionsExtension(Extension):
def __init__(self, environment):
super(PromotionsExtension, self).__init__(environment)<|fim▁hole|># Nicer import name
core = PromotionsExtension<|fim▁end|> | environment.filters["promo_ballance"] = promo_ballance
|
<|file_name|>cppcheck.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python
# -*- encoding: utf-8 -*-
# Michel Mooij, [email protected]
"""
Tool Description
================
This module provides a waf wrapper (i.e. waftool) around the C/C++ source code
checking tool 'cppcheck'.
See http://cppcheck.sourceforge.net/ for more information on the cppcheck tool
itself.
Note that many linux distributions already provide a ready to install version
of cppcheck. On fedora, for instance, it can be installed using yum:
'sudo yum install cppcheck'
Usage
=====
In order to use this waftool simply add it to the 'options' and 'configure'
functions of your main waf script as shown in the example below:
def options(opt):
opt.load('cppcheck', tooldir='./waftools')
def configure(conf):
conf.load('cppcheck')
Note that example shown above assumes that the cppcheck waftool is located in
the sub directory named 'waftools'.
When configured as shown in the example above, cppcheck will automatically
perform a source code analysis on all C/C++ build tasks that have been
defined in your waf build system.
The example shown below for a C program will be used as input for cppcheck when
building the task.
def build(bld):
bld.program(name='foo', src='foobar.c')
The result of the source code analysis will be stored both as xml and html
files in the build location for the task. Should any error be detected by
cppcheck the build will be aborted and a link to the html report will be shown.
When needed source code checking by cppcheck can be disabled per task, per
detected error or warning for a particular task. It can be also be disabled for
all tasks.
In order to exclude a task from source code checking add the skip option to the
task as shown below:
def build(bld):
bld.program(
name='foo',
src='foobar.c'
cppcheck_skip=True
)
When needed problems detected by cppcheck may be suppressed using a file
containing a list of suppression rules. The relative or absolute path to this
file can be added to the build task as shown in the example below:
bld.program(
name='bar',
src='foobar.c',
cppcheck_suppress='bar.suppress'
)
A cppcheck suppress file should contain one suppress rule per line. Each of
these rules will be passed as an '--suppress=<rule>' argument to cppcheck.
Dependencies
================
This waftool depends on the python pygments module, it is used for source code
syntax highlighting when creating the html reports. see http://pygments.org/ for
more information on this package.
Remarks
================
The generation of the html report is originally based on the cppcheck-htmlreport.py
script that comes shipped with the cppcheck tool.
"""
import os
import sys
import xml.etree.ElementTree as ElementTree
from waflib import Task, TaskGen, Logs, Context
PYGMENTS_EXC_MSG= '''
The required module 'pygments' could not be found. Please install it using your
platform package manager (e.g. apt-get or yum), using 'pip' or 'easy_install',
see 'http://pygments.org/download/' for installation instructions.
'''
try:
import pygments
from pygments import formatters, lexers
except ImportError, e:
Logs.warn(PYGMENTS_EXC_MSG)
raise e
def options(opt):
opt.add_option('--cppcheck-skip', dest='cppcheck_skip',
default=False, action='store_true',
help='do not check C/C++ sources (default=False)')
opt.add_option('--cppcheck-err-resume', dest='cppcheck_err_resume',
default=False, action='store_true',
help='continue in case of errors (default=False)')
opt.add_option('--cppcheck-bin-enable', dest='cppcheck_bin_enable',
default='warning,performance,portability,style,unusedFunction', action='store',
help="cppcheck option '--enable=' for binaries (default=warning,performance,portability,style,unusedFunction)")
opt.add_option('--cppcheck-lib-enable', dest='cppcheck_lib_enable',
default='warning,performance,portability,style', action='store',
help="cppcheck option '--enable=' for libraries (default=warning,performance,portability,style)")
opt.add_option('--cppcheck-std-c', dest='cppcheck_std_c',
default='c99', action='store',
help='cppcheck standard to use when checking C (default=c99)')
opt.add_option('--cppcheck-std-cxx', dest='cppcheck_std_cxx',
default='c++03', action='store',
help='cppcheck standard to use when checking C++ (default=c++03)')
opt.add_option('--cppcheck-check-config', dest='cppcheck_check_config',
default=False, action='store_true',
help='forced check for missing buildin include files, e.g. stdio.h (default=False)')
opt.add_option('--cppcheck-max-configs', dest='cppcheck_max_configs',
default='20', action='store',
help='maximum preprocessor (--max-configs) define iterations (default=20)')
def configure(conf):
if conf.options.cppcheck_skip:
conf.env.CPPCHECK_SKIP = [True]
conf.env.CPPCHECK_STD_C = conf.options.cppcheck_std_c
conf.env.CPPCHECK_STD_CXX = conf.options.cppcheck_std_cxx
conf.env.CPPCHECK_MAX_CONFIGS = conf.options.cppcheck_max_configs
conf.env.CPPCHECK_BIN_ENABLE = conf.options.cppcheck_bin_enable
conf.env.CPPCHECK_LIB_ENABLE = conf.options.cppcheck_lib_enable
conf.find_program('cppcheck', var='CPPCHECK')
@TaskGen.feature('c')
@TaskGen.feature('cxx')
def cppcheck_execute(self):
if len(self.env.CPPCHECK_SKIP) or self.bld.options.cppcheck_skip:
return
if getattr(self, 'cppcheck_skip', False):
return
task = self.create_task('cppcheck')
task.cmd = _tgen_create_cmd(self)
task.fatal = []
if not self.bld.options.cppcheck_err_resume:
task.fatal.append('error')
def _tgen_create_cmd(self):
features = getattr(self, 'features', [])
std_c = self.env.CPPCHECK_STD_C
std_cxx = self.env.CPPCHECK_STD_CXX
max_configs = self.env.CPPCHECK_MAX_CONFIGS
bin_enable = self.env.CPPCHECK_BIN_ENABLE
lib_enable = self.env.CPPCHECK_LIB_ENABLE
cmd = '%s' % self.env.CPPCHECK
args = ['--inconclusive','--report-progress','--verbose','--xml','--xml-version=2']
args.append('--max-configs=%s' % max_configs)
if 'cxx' in features:
args.append('--language=c++')
args.append('--std=%s' % std_cxx)
else:
args.append('--language=c')
args.append('--std=%s' % std_c)
if self.bld.options.cppcheck_check_config:
args.append('--check-config')
if set(['cprogram','cxxprogram']) & set(features):
args.append('--enable=%s' % bin_enable)
else:
args.append('--enable=%s' % lib_enable)
for src in self.to_list(getattr(self, 'source', [])):
args.append('%r' % src)
for inc in self.to_incnodes(self.to_list(getattr(self, 'includes', []))):
args.append('-I%r' % inc)
for inc in self.to_incnodes(self.to_list(self.env.INCLUDES)):
args.append('-I%r' % inc)
return '%s %s' % (cmd, ' '.join(args))
class cppcheck(Task.Task):
quiet = True
def run(self):
stderr = self.generator.bld.cmd_and_log(self.cmd, quiet=Context.STDERR, output=Context.STDERR)
self._save_xml_report(stderr)
defects = self._get_defects(stderr)
index = self._create_html_report(defects)
self._errors_evaluate(defects, index)
return 0
def _save_xml_report(self, s):
'''use cppcheck xml result string, add the command string used to invoke cppcheck
and save as xml file.
'''
header = '%s\n' % s.split('\n')[0]
root = ElementTree.fromstring(s)
cmd = ElementTree.SubElement(root.find('cppcheck'), 'cmd')
cmd.text = str(self.cmd)
body = ElementTree.tostring(root)
node = self.generator.path.get_bld().find_or_declare('cppcheck.xml')
node.write(header + body)
def _get_defects(self, xml_string):
'''evaluate the xml string returned by cppcheck (on sdterr) and use it to create
a list of defects.
'''
defects = []
for error in ElementTree.fromstring(xml_string).iter('error'):
defect = {}
defect['id'] = error.get('id')
defect['severity'] = error.get('severity')
defect['msg'] = str(error.get('msg')).replace('<','<')
defect['verbose'] = error.get('verbose')
for location in error.findall('location'):
defect['file'] = location.get('file')
defect['line'] = str(int(location.get('line')) - 1)
defects.append(defect)
return defects
def _create_html_report(self, defects):
files, css_style_defs = self._create_html_files(defects)
index = self._create_html_index(files)
self._create_css_file(css_style_defs)
return index
def _create_html_files(self, defects):
sources = {}
defects = [defect for defect in defects if defect.has_key('file')]
for defect in defects:
name = defect['file']
if not sources.has_key(name):
sources[name] = [defect]
else:
sources[name].append(defect)
files = {}
css_style_defs = None
bpath = self.generator.path.get_bld().abspath()
names = sources.keys()
for i in range(0,len(names)):
name = names[i]
htmlfile = 'cppcheck/%i.html' % (i)
errors = sources[name]
files[name] = { 'htmlfile': '%s/%s' % (bpath, htmlfile), 'errors': errors }
css_style_defs = self._create_html_file(name, htmlfile, errors)
return files, css_style_defs
def _create_html_file(self, sourcefile, htmlfile, errors):
name = self.generator.get_name()
root = ElementTree.fromstring(CPPCHECK_HTML_FILE)
title = root.find('head/title')
title.text = 'cppcheck - report - %s' % name
body = root.find('body')
for div in body.findall('div'):
if div.get('id') == 'page':
page = div
break
for div in page.findall('div'):
if div.get('id') == 'header':
h1 = div.find('h1')
h1.text = 'cppcheck report - %s' % name
if div.get('id') == 'content':
content = div
srcnode = self.generator.bld.root.find_node(sourcefile)
hl_lines = [e['line'] for e in errors if e.has_key('line')]
formatter = CppcheckHtmlFormatter(linenos=True, style='colorful', hl_lines=hl_lines, lineanchors='line')
formatter.errors = [e for e in errors if e.has_key('line')]
css_style_defs = formatter.get_style_defs('.highlight')
lexer = pygments.lexers.guess_lexer_for_filename(sourcefile, "")
s = pygments.highlight(srcnode.read(), lexer, formatter)
table = ElementTree.fromstring(s)
content.append(table)
s = ElementTree.tostring(root, method='html')
s = CCPCHECK_HTML_TYPE + s
node = self.generator.path.get_bld().find_or_declare(htmlfile)
node.write(s)
return css_style_defs
def _create_html_index(self, files):
name = self.generator.get_name()
root = ElementTree.fromstring(CPPCHECK_HTML_FILE)
title = root.find('head/title')
title.text = 'cppcheck - report - %s' % name
body = root.find('body')
for div in body.findall('div'):
if div.get('id') == 'page':
page = div
break
for div in page.findall('div'):
if div.get('id') == 'header':
h1 = div.find('h1')
h1.text = 'cppcheck report - %s' % name
if div.get('id') == 'content':
content = div
self._create_html_table(content, files)
s = ElementTree.tostring(root, method='html')
s = CCPCHECK_HTML_TYPE + s
node = self.generator.path.get_bld().find_or_declare('cppcheck/index.html')
node.write(s)
return node
def _create_html_table(self, content, files):
table = ElementTree.fromstring(CPPCHECK_HTML_TABLE)
for name, val in files.items():
f = val['htmlfile']
s = '<tr><td colspan="4"><a href="%s">%s</a></td></tr>\n' % (f,name)
row = ElementTree.fromstring(s)
table.append(row)
errors = sorted(val['errors'], key=lambda e: int(e['line']) if e.has_key('line') else sys.maxint)
for e in errors:
if not e.has_key('line'):
s = '<tr><td></td><td>%s</td><td>%s</td><td>%s</td></tr>\n' % (e['id'], e['severity'], e['msg'])
else:
attr = ''
if e['severity'] == 'error':
attr = 'class="error"'
s = '<tr><td><a href="%s#line-%s">%s</a></td>' % (f, e['line'], e['line'])
s+= '<td>%s</td><td>%s</td><td %s>%s</td></tr>\n' % (e['id'], e['severity'], attr, e['msg'])
row = ElementTree.fromstring(s)
table.append(row)
content.append(table)
def _create_css_file(self, css_style_defs):
css = str(CPPCHECK_CSS_FILE)
if css_style_defs:
css = "%s\n%s\n" % (css, css_style_defs)
node = self.generator.path.get_bld().find_or_declare('cppcheck/style.css')
node.write(css)
def _errors_evaluate(self, errors, http_index):
name = self.generator.get_name()
fatal = self.fatal
severity = [err['severity'] for err in errors]
problems = [err for err in errors if err['severity'] != 'information']
if set(fatal) & set(severity):
exc = "\n"
exc += "\nccpcheck detected fatal error(s) in task '%s', see report for details:" % name
exc += "\n file://%r" % (http_index)
exc += "\n"
self.generator.bld.fatal(exc)
elif len(problems):
msg = "\nccpcheck detected (possible) problem(s) in task '%s', see report for details:" % name
msg += "\n file://%r" % http_index
msg += "\n"
Logs.error(msg)
class CppcheckHtmlFormatter(pygments.formatters.HtmlFormatter):
errors = []
def wrap(self, source, outfile):
line_no = 1
for i, t in super(CppcheckHtmlFormatter, self).wrap(source, outfile):
# If this is a source code line we want to add a span tag at the end.
if i == 1:
for error in self.errors:
if int(error['line']) == line_no:
t = t.replace('\n', CPPCHECK_HTML_ERROR % error['msg'])
line_no = line_no + 1
yield i, t
CCPCHECK_HTML_TYPE = \
'<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN" "http://www.w3.org/TR/html4/strict.dtd">\n'
CPPCHECK_HTML_FILE = """
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN" "http://www.w3.org/TR/html4/strict.dtd" [<!ENTITY nbsp " ">]>
<html>
<head>
<title>cppcheck - report - XXX</title>
<link href="style.css" rel="stylesheet" type="text/css" />
<style type="text/css">
</style>
</head>
<body class="body">
<div id="page-header"> </div>
<div id="page">
<div id="header">
<h1>cppcheck report - XXX</h1>
</div>
<div id="menu">
<a href="index.html">Defect list</a>
</div>
<div id="content">
</div>
<div id="footer">
<div>cppcheck - a tool for static C/C++ code analysis</div>
<div>
Internet: <a href="http://cppcheck.sourceforge.net">http://cppcheck.sourceforge.net</a><br/>
Forum: <a href="http://apps.sourceforge.net/phpbb/cppcheck/">http://apps.sourceforge.net/phpbb/cppcheck/</a><br/>
IRC: #cppcheck at irc.freenode.net
</div>
</div>
</div>
<div id="page-footer"> </div>
</body>
</html>
"""
CPPCHECK_HTML_TABLE = """
<table>
<tr>
<th>Line</th>
<th>Id</th>
<th>Severity</th>
<th>Message</th>
</tr>
</table>
"""
CPPCHECK_HTML_ERROR = \
'<span style="background: #ffaaaa;padding: 3px;"><--- %s</span>\n'
CPPCHECK_CSS_FILE = """
body.body {
font-family: Arial;
font-size: 13px;
background-color: black;
padding: 0px;
margin: 0px;
}
.error {
font-family: Arial;
font-size: 13px;
background-color: #ffb7b7;
padding: 0px;
margin: 0px;
}
th, td {
min-width: 100px;
text-align: left;
}
#page-header {
clear: both;
width: 1200px;
margin: 20px auto 0px auto;
height: 10px;
border-bottom-width: 2px;<|fim▁hole|>}
#page {
width: 1160px;
margin: auto;
border-left-width: 2px;
border-left-style: solid;
border-left-color: #aaaaaa;
border-right-width: 2px;
border-right-style: solid;
border-right-color: #aaaaaa;
background-color: White;
padding: 20px;
}
#page-footer {
clear: both;
width: 1200px;
margin: auto;
height: 10px;
border-top-width: 2px;
border-top-style: solid;
border-top-color: #aaaaaa;
}
#header {
width: 100%;
height: 70px;
background-image: url(logo.png);
background-repeat: no-repeat;
background-position: left top;
border-bottom-style: solid;
border-bottom-width: thin;
border-bottom-color: #aaaaaa;
}
#menu {
margin-top: 5px;
text-align: left;
float: left;
width: 100px;
height: 300px;
}
#menu > a {
margin-left: 10px;
display: block;
}
#content {
float: left;
width: 1020px;
margin: 5px;
padding: 0px 10px 10px 10px;
border-left-style: solid;
border-left-width: thin;
border-left-color: #aaaaaa;
}
#footer {
padding-bottom: 5px;
padding-top: 5px;
border-top-style: solid;
border-top-width: thin;
border-top-color: #aaaaaa;
clear: both;
font-size: 10px;
}
#footer > div {
float: left;
width: 33%;
}
"""<|fim▁end|> | border-bottom-style: solid;
border-bottom-color: #aaaaaa; |
<|file_name|>0002_auto_20160301_1154.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals<|fim▁hole|>
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('bookmarks', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='bookmark',
name='added',
field=models.DateTimeField(default=django.utils.timezone.now, verbose_name='added'),
),
migrations.AlterField(
model_name='bookmark',
name='favicon_checked',
field=models.DateTimeField(default=django.utils.timezone.now, verbose_name='favicon checked'),
),
migrations.AlterField(
model_name='bookmarkinstance',
name='saved',
field=models.DateTimeField(default=django.utils.timezone.now, verbose_name='saved'),
),
]<|fim▁end|> | |
<|file_name|>UnmountOldrootState.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#
# VM Backup extension
#
# Copyright 2015 Microsoft Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Requires Python 2.7+
#
import os
import re
import sys
from time import sleep
from OSEncryptionState import *
class UnmountOldrootState(OSEncryptionState):
def __init__(self, context):
super(UnmountOldrootState, self).__init__('UnmountOldrootState', context)
def should_enter(self):
self.context.logger.log("Verifying if machine should enter unmount_oldroot state")
if not super(UnmountOldrootState, self).should_enter():
return False
self.context.logger.log("Performing enter checks for unmount_oldroot state")
self.command_executor.ExecuteInBash('[ -e "/oldroot" ]', True)
if self.command_executor.Execute('mountpoint /oldroot') != 0:
return False
return True
def enter(self):
if not self.should_enter():
return
self.context.logger.log("Entering unmount_oldroot state")
self.command_executor.Execute('systemctl rescue', True)
self.command_executor.Execute('systemctl start sshd.service', True)
self.command_executor.Execute('systemctl start walinuxagent.service', True)
proc_comm = ProcessCommunicator()
self.command_executor.Execute(command_to_execute="systemctl list-units",
raise_exception_on_failure=True,
communicator=proc_comm)
for line in proc_comm.stdout.split('\n'):
if not "running" in line:
continue
if "walinuxagent.service" in line or "sshd.service" in line:
continue
match = re.search(r'\s(\S*?\.service)', line)
if match:
service = match.groups()[0]
self.command_executor.Execute('systemctl restart {0}'.format(service))
self.command_executor.Execute('swapoff -a', True)
self.bek_util.umount_azure_passhprase(self.encryption_config, force=True)
if os.path.exists("/oldroot/mnt"):
self.command_executor.Execute('umount /oldroot/mnt')
if os.path.exists("/oldroot/mnt/azure_bek_disk"):
self.command_executor.Execute('umount /oldroot/mnt/azure_bek_disk')
if os.path.exists("/mnt"):
self.command_executor.Execute('umount /mnt')
if os.path.exists("/mnt/azure_bek_disk"):
self.command_executor.Execute('umount /mnt/azure_bek_disk')
proc_comm = ProcessCommunicator()
self.command_executor.Execute(command_to_execute="fuser -vm /oldroot",
raise_exception_on_failure=True,
communicator=proc_comm)
self.context.logger.log("Processes using oldroot:\n{0}".format(proc_comm.stdout))
procs_to_kill = filter(lambda p: p.isdigit(), proc_comm.stdout.split())
procs_to_kill = reversed(sorted(procs_to_kill))
for victim in procs_to_kill:
proc_name = ""
try:
with open("/proc/{0}/cmdline".format(victim)) as f:
proc_name = f.read()
except IOError as e:
self.context.logger.log("Proc {0} is already dead".format(victim))
self.context.logger.log("Killing process: {0} ({1})".format(proc_name, victim))
if int(victim) == os.getpid():
self.context.logger.log("Restarting WALA in 30 seconds before committing suicide")
# Kill any other daemons that are blocked and would be executed after this process commits
# suicide
self.command_executor.ExecuteInBash('sleep 30 && pkill -f .*ForLinux.*handle.py.*daemon.* && systemctl start walinuxagent &', True)
if int(victim) == 1:
self.context.logger.log("Skipping init")
continue
if "mount.ntfs" in proc_name:
self.context.logger.log("Skipping mount.ntfs")
continue
self.command_executor.Execute('kill -9 {0}'.format(victim))
self.command_executor.Execute('telinit u', True)
sleep(3)
self.command_executor.Execute('umount /oldroot', True)
sleep(3)
attempt = 1
while True:
if attempt > 10:
raise Exception("Block device {0} did not appear in 10 restart attempts".format(self.rootfs_block_device))<|fim▁hole|> self.command_executor.Execute('systemctl restart systemd-timesyncd')
sleep(10)
if self.command_executor.ExecuteInBash('[ -b {0} ]'.format(self.rootfs_block_device), False) == 0:
break
attempt += 1
self.command_executor.Execute('e2fsck -yf {0}'.format(self.rootfs_block_device), True)
def should_exit(self):
self.context.logger.log("Verifying if machine should exit unmount_oldroot state")
if os.path.exists('/oldroot/bin'):
self.context.logger.log("/oldroot was not unmounted")
return False
return super(UnmountOldrootState, self).should_exit()<|fim▁end|> |
self.context.logger.log("Restarting systemd-udevd")
self.command_executor.Execute('systemctl restart systemd-udevd')
self.context.logger.log("Restarting systemd-timesyncd") |
<|file_name|>zkserver.go<|end_file_name|><|fim▁begin|>package main
import (
"flag"
"github.com/koofr/go-zkutils"
"log"
)
func main() {
var port = flag.Int("port", 2181, "ZooKeeper port for listening")
flag.Parse()
server, err := zkutils.NewTestServer(*port)
if err != nil {
log.Fatal(err)
}
defer server.Stop()<|fim▁hole|> log.Printf("ZooKeeper server running on 127.0.0.1:%d", *port)
<-make(chan int)
}<|fim▁end|> | |
<|file_name|>Pin.java<|end_file_name|><|fim▁begin|>/**
*
* Copyright 2014-2017 Florian Schmaus
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package eu.geekplace.javapinning.pin;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.security.cert.CertificateEncodingException;
import java.security.cert.X509Certificate;
import java.util.logging.Level;
import java.util.logging.Logger;
import eu.geekplace.javapinning.util.HexUtilities;
public abstract class Pin {
private static final Logger LOGGER = Logger.getLogger(Sha256Pin.class.getName());
protected static final MessageDigest sha256md;
static {
MessageDigest sha256mdtemp = null;
try {<|fim▁hole|> sha256mdtemp = MessageDigest.getInstance("SHA-256");
} catch (NoSuchAlgorithmException e) {
LOGGER.log(Level.WARNING, "SHA-256 MessageDigest not available", e);
}
sha256md = sha256mdtemp;
}
protected final byte[] pinBytes;
protected Pin(byte[] pinBytes) {
this.pinBytes = pinBytes;
}
protected Pin(String pinHexString) {
pinBytes = HexUtilities.decodeFromHex(pinHexString);
}
public abstract boolean pinsCertificate(X509Certificate x509certificate) throws CertificateEncodingException;
protected abstract boolean pinsCertificate(byte[] pubkey);
/**
* Create a new {@link Pin} from the given String.
* <p>
* The Pin String must be in the format <tt>[type]:[hex-string]</tt>, where
* <tt>type</tt> denotes the type of the Pin and <tt>hex-string</tt> is the
* binary value of the Pin encoded in hex. Currently supported types are
* <ul>
* <li>PLAIN</li>
* <li>SHA256</li>
* <li>CERTPLAIN</li>
* <li>CERTSHA256</li>
* </ul>
* The hex-string must contain only of whitespace characters, colons (':'),
* numbers [0-9] and ASCII letters [a-fA-F]. It must be a valid hex-encoded
* binary representation. First the string is lower-cased, then all
* whitespace characters and colons are removed before the string is decoded
* to bytes.
* </p>
*
* @param string
* the Pin String.
* @return the Pin for the given Pin String.
* @throws IllegalArgumentException
* if the given String is not a valid Pin String
*/
public static Pin fromString(String string) {
// The Pin's string may have multiple colons (':'), assume that
// everything before the first colon is the Pin type and everything
// after the colon is the Pin's byte encoded in hex.
String[] pin = string.split(":", 2);
if (pin.length != 2) {
throw new IllegalArgumentException("Invalid pin string, expected: 'format-specifier:hex-string'.");
}
String type = pin[0];
String pinHex = pin[1];
switch (type) {
case "SHA256":
return new Sha256Pin(pinHex);
case "PLAIN":
return new PlainPin(pinHex);
case "CERTSHA256":
return new CertSha256Pin(pinHex);
case "CERTPLAIN":
return new CertPlainPin(pinHex);
default:
throw new IllegalArgumentException();
}
}
/**
* Returns a clone of the bytes that represent this Pin.
* <p>
* This method is meant for unit testing only and therefore not public.
* </p>
*
* @return a clone of the bytes that represent this Pin.
*/
byte[] getPinBytes() {
return pinBytes.clone();
}
}<|fim▁end|> | |
<|file_name|>test_mnist.py<|end_file_name|><|fim▁begin|>"""
This file tests some of the YAML files in the maxout paper
"""
import os
import pylearn2
from pylearn2.datasets import control
from pylearn2.datasets.mnist import MNIST
from pylearn2.termination_criteria import EpochCounter
from pylearn2.testing.skip import skip_if_no_gpu
from pylearn2.utils.serial import load_train_file
def test_mnist():
"""
Test the mnist.yaml file from the maxout
paper on random input
"""
skip_if_no_gpu()
train = load_train_file(os.path.join(pylearn2.__path__[0],
"scripts/papers/maxout/mnist.yaml"))
# Load fake MNIST data
init_value = control.load_data
control.load_data = [False]
train.dataset = MNIST(which_set='train',
axes=['c', 0, 1, 'b'], start=0, stop=100)
train.algorithm._set_monitoring_dataset(train.dataset)
control.load_data = init_value
# Train shortly and prevent saving
train.algorithm.termination_criterion = EpochCounter(max_epochs=1)
train.extensions.pop(0)
train.save_freq = 0
train.main_loop()
def test_mnist_pi():
"""
Test the mnist_pi.yaml file from the maxout
paper on random input
"""
train = load_train_file(
os.path.join(pylearn2.__path__[0],
"scripts/papers/maxout/mnist_pi.yaml")
)
# Load fake MNIST data
init_value = control.load_data
control.load_data = [False]<|fim▁hole|>
# Train shortly and prevent saving
train.algorithm.termination_criterion = EpochCounter(max_epochs=1)
train.extensions.pop(0)
train.save_freq = 0
train.main_loop()<|fim▁end|> | train.dataset = MNIST(which_set='train', start=0, stop=100)
train.algorithm._set_monitoring_dataset(train.dataset)
control.load_data = init_value |
<|file_name|>discount.server.model.test.js<|end_file_name|><|fim▁begin|>'use strict';
/**
* Module dependencies.
*/
var should = require('should'),
mongoose = require('mongoose'),
User = mongoose.model('User'),
Discount = mongoose.model('Discount');
/**
* Globals
*/
var user, discount;
/**
* Unit tests
*/
describe('Discount Model Unit Tests:', function() {
beforeEach(function(done) {
user = new User({
firstName: 'Full',
lastName: 'Name',
displayName: 'Full Name',
email: '[email protected]',
username: 'username',
password: 'password'
});
user.save(function() {
discount = new Discount({
name: 'Discount Name',
user: user
});
done();
});
});
describe('Method Save', function() {
it('should be able to save without problems', function(done) {
return discount.save(function(err) {
should.not.exist(err);
done();
});
});
it('should be able to show an error when try to save without name', function(done) {
discount.name = '';
<|fim▁hole|> should.exist(err);
done();
});
});
});
afterEach(function(done) {
Discount.remove().exec();
User.remove().exec();
done();
});
});<|fim▁end|> | return discount.save(function(err) { |
<|file_name|>0001_initial.py<|end_file_name|><|fim▁begin|># Generated by Django 2.0.13 on 2019-06-22 18:48
import django.db.models.deletion
import django.utils.timezone
import django_fsm
from django.conf import settings
from django.db import migrations, models
import apps.core.models
class Migration(migrations.Migration):
initial = True
dependencies = [migrations.swappable_dependency(settings.AUTH_USER_MODEL)]
operations = [
migrations.CreateModel(
name="Meeting",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
(
"date_created",<|fim▁hole|> blank=True, default=django.utils.timezone.now, editable=False
),
),
(
"date_modified",
apps.core.models.DateTimeModifiedField(
blank=True, default=django.utils.timezone.now, editable=False
),
),
("format", models.CharField(blank=True, max_length=50)),
("message", models.TextField(blank=True)),
("datetime", models.DateTimeField()),
("state", django_fsm.FSMField(default="available", max_length=50)),
(
"cancelled_by",
models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="+",
to=settings.AUTH_USER_MODEL,
),
),
(
"mentor",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
related_name="mentors",
to=settings.AUTH_USER_MODEL,
),
),
(
"protege",
models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="proteges",
to=settings.AUTH_USER_MODEL,
),
),
],
options={"ordering": ("-datetime",)},
)
]<|fim▁end|> | apps.core.models.DateTimeCreatedField( |
<|file_name|>paraphrase_ms_coco_test.py<|end_file_name|><|fim▁begin|># coding=utf-8
# Copyright 2022 The Tensor2Tensor Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#<|fim▁hole|># distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for tensor2tensor.data_generators.paraphrase_ms_coco."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import mock
from tensor2tensor.data_generators import paraphrase_ms_coco
import tensorflow.compat.v1 as tf
class ParaphraseGenerationProblemTest(tf.test.TestCase):
def testCombinationPairs(self):
inputs = ["A", "B", "C"]
expected_combination = [("A", "B"), ("A", "C"), ("B", "C")]
actual_combination = paraphrase_ms_coco.create_combination(inputs)
self.assertEqual(actual_combination, expected_combination)
@mock.patch("tensor2tensor.data_generators"
".paraphrase_ms_coco.ParaphraseGenerationProblem.prepare_data",
return_value=[("sentence1", "sentence2")])
@mock.patch("tensor2tensor.data_generators"
".paraphrase_ms_coco.ParaphraseGenerationProblem.bidirectional")
def testBidirectionalTrue(self, data, bidirectional):
paraphrase_problem = paraphrase_ms_coco.ParaphraseGenerationProblem()
paraphrase_problem.bidirectional = True
expected_generated_data = [{"inputs": "sentence1", "targets": "sentence2"},
{"inputs": "sentence2", "targets": "sentence1"}]
actual_generated_data = list(paraphrase_problem
.generate_samples("data_dir",
"tmp_dir",
"dataset_split"))
self.assertEqual(actual_generated_data, expected_generated_data)
@mock.patch("tensor2tensor.data_generators"
".paraphrase_ms_coco.ParaphraseGenerationProblem.prepare_data",
return_value=[("sentence1", "sentence2")])
@mock.patch("tensor2tensor.data_generators"
".paraphrase_ms_coco.ParaphraseGenerationProblem.bidirectional")
def testBidirectionalFalse(self, data, bidirectional):
paraphrase_problem = paraphrase_ms_coco.ParaphraseGenerationProblem()
paraphrase_problem.bidirectional = False
expected_generated_data = [{"inputs": "sentence1", "targets": "sentence2"}]
actual_generated_data = list(paraphrase_problem
.generate_samples("data_dir",
"tmp_dir",
"dataset_split"))
self.assertEqual(actual_generated_data, expected_generated_data)
if __name__ == "__main__":
tf.test.main()<|fim▁end|> | # http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software |
<|file_name|>dao_test.go<|end_file_name|><|fim▁begin|>package dao
import (
"context"
"flag"
"log"
"math/rand"
"os"
"strconv"
"testing"
"time"
. "github.com/smartystreets/goconvey/convey"
"go-common/app/service/main/dapper/conf"
"go-common/app/service/main/dapper/model"
)
func init() {
rand.Seed(time.Now().UnixNano())
}
var cfg *conf.Config
var flagMap = map[string]string{
"app_id": "main.common-arch.dapper-service",
"conf_token": "528dd7e00bb411e894c14a552f48fef8",
"tree_id": "5172",
"conf_version": "server-1",
"deploy_env": "uat",
"conf_host": "config.bilibili.co",
"conf_path": os.TempDir(),
"region": "sh",
"zone": "sh001",
}
func TestMain(m *testing.M) {
for key, val := range flagMap {
flag.Set(key, val)
}
flag.Parse()
if err := conf.Init(); err != nil {
log.Printf("init config from remote error: %s", err)
}
cfg = conf.Conf
if cfg.InfluxDB != nil {
cfg.InfluxDB.Database = "dapper_ut"
}
if cfg.HBase != nil {
cfg.HBase.Namespace = "dapperut"
}
if hbaseAddrs := os.Getenv("TEST_HBASE_ADDRS"); hbaseAddrs != "" {
cfg.HBase = &conf.HBaseConfig{Addrs: hbaseAddrs, Namespace: "dapperut"}
if influxdbAddr := os.Getenv("TEST_INFLUXDB_ADDR"); influxdbAddr != "" {
cfg.InfluxDB = &conf.InfluxDBConfig{Addr: influxdbAddr, Database: "dapper_ut"}
}
}
os.Exit(m.Run())
}
func TestDao(t *testing.T) {
if cfg == nil {
t.Skipf("no config provide skipped")
}
daoImpl, err := New(cfg)
if err != nil {
t.Fatalf("new dao error: %s", err)
}
ctx := context.Background()
Convey("test fetch serviceName and operationName", t, func() {
serviceNames, err := daoImpl.FetchServiceName(ctx)
So(err, ShouldBeNil)
So(serviceNames, ShouldNotBeEmpty)
for _, serviceName := range serviceNames {
operationNames, err := daoImpl.FetchOperationName(ctx, serviceName)
So(err, ShouldBeNil)
t.Logf("%s operationNames: %v", serviceName, operationNames)
}
})
Convey("test write rawtrace", t, func() {
if err := daoImpl.WriteRawTrace(
context.Background(),
strconv.FormatUint(rand.Uint64(), 16),
map[string][]byte{strconv.FormatUint(rand.Uint64(), 16): []byte("hello world")},
); err != nil {
t.Error(err)
}
})
Convey("test batchwrite span point", t, func() {
points := []*model.SpanPoint{
&model.SpanPoint{
ServiceName: "service_a",
OperationName: "opt1",
PeerService: "peer_service_a",
SpanKind: "client",
Timestamp: time.Now().Unix() - rand.Int63n(3600),
MaxDuration: model.SamplePoint{
SpanID: rand.Uint64(),
TraceID: rand.Uint64(),
Value: rand.Int63n(1024),
},
MinDuration: model.SamplePoint{
SpanID: rand.Uint64(),
TraceID: rand.Uint64(),
Value: rand.Int63n(1024),
},
AvgDuration: model.SamplePoint{
SpanID: rand.Uint64(),
TraceID: rand.Uint64(),
Value: rand.Int63n(1024),
},
Errors: []model.SamplePoint{
model.SamplePoint{
SpanID: rand.Uint64(),
TraceID: rand.Uint64(),
Value: 1,
},
model.SamplePoint{
SpanID: rand.Uint64(),
TraceID: rand.Uint64(),
Value: 1,
},
},
},
&model.SpanPoint{
ServiceName: "service_b",
OperationName: "opt2",
PeerService: "peer_service_b",
SpanKind: "server",
Timestamp: time.Now().Unix() - rand.Int63n(3600),<|fim▁hole|> PeerService: "peer_service_c",
SpanKind: "client",
Timestamp: time.Now().Unix() - rand.Int63n(3600),
},
}
err := daoImpl.BatchWriteSpanPoint(context.Background(), points)
if err != nil {
t.Error(err)
}
})
}<|fim▁end|> | },
&model.SpanPoint{
ServiceName: "service_c",
OperationName: "opt3", |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
#
#
#
# This file is part of librix-thinclient.
#
# librix-thinclient is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.<|fim▁hole|>#
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with librix-thinclient. If not, see <http://www.gnu.org/licenses/>.
__all__ = [
'ui',
'lib',
'daemon',
'modules',
]<|fim▁end|> | #
# librix-thinclient is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of |
<|file_name|>paragraph_viz.js<|end_file_name|><|fim▁begin|>"use strict";
module.exports = {
tagName: "div",
className: "",
defaults: {
content: "default content"
},
render: function() {
},
client: function(options) {
var result = options.client_options.result;
var session = result.data.session;
var total = session.count;<|fim▁hole|> var helpers = this.helpers;
var color_picker = this.helpers['vendor/jquery.colors'].get_color;
for (index = 0; index < total; index++) {
cell_count = session[index] || 0;
var opacity = Math.round(cell_count / parseFloat(session.active) * 100.0) / 100;
var color = color_picker(result.sid);
var div = $("<div />")
.html("<div style='width: 10px; height: 20px' />")
.css("background-color", color)
.css("opacity", opacity || "0.001");
cells.append(div);
}
this.$el.find(".viz").append(cells);
}
};<|fim▁end|> | var cell_count;
var cells = $("<div/>");
var index; |
<|file_name|>XmidComparator.java<|end_file_name|><|fim▁begin|>package com.tamirhassan.pdfxtk.comparators;
/**
* pdfXtk - PDF Extraction Toolkit
* Copyright (c) by the authors/contributors. All rights reserved.
* This project includes code from PDFBox and TouchGraph.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. Neither the names pdfXtk or PDF Extraction Toolkit; nor the names of its
* contributors may be used to endorse or promote products derived from this
* software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* http://pdfxtk.sourceforge.net
*
*/
import java.util.Comparator;
import com.tamirhassan.pdfxtk.model.GenericSegment;
/**
* @author Tamir Hassan, [email protected]
* @version PDF Analyser 0.9
*
* Sorts on Xmid coordinate ((x1+x2)/2)
*/
public class XmidComparator implements Comparator<GenericSegment>
{
public int compare(GenericSegment obj1, GenericSegment obj2)
{
// sorts in x order
double x1 = obj1.getXmid();
double x2 = obj2.getXmid();
// causes a contract violation (rounding?)
// return (int) (x1 - x2);
if (x2 > x1) return -1;<|fim▁hole|> }
public boolean equals(Object obj)
{
return obj.equals(this);
}
}<|fim▁end|> | else if (x2 == x1) return 0;
else return 1; |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | # Major, Minor
VERSION = (1, 4) |
<|file_name|>socketio-area.client.routes.js<|end_file_name|><|fim▁begin|><|fim▁hole|>angular.module('socketio-area').config(['$stateProvider',
function($stateProvider) {
// Socketio area state routing
$stateProvider.
state('socketio-area', {
url: '/socketio',
templateUrl: 'modules/socketio-area/views/socketio-area.client.view.html'
});
}
]);<|fim▁end|> | 'use strict';
//Setting up route |
<|file_name|>RubyConstant.java<|end_file_name|><|fim▁begin|>package com.xruby.runtime.lang;
public abstract class RubyConstant extends RubyBasic {
public static RubyConstant QFALSE = new RubyConstant(RubyRuntime.FalseClassClass) {
public boolean isTrue() {
return false;
}<|fim▁hole|> public static RubyConstant QTRUE = new RubyConstant(RubyRuntime.TrueClassClass) {
public boolean isTrue() {
return true;
}
};
public static RubyConstant QNIL = new RubyConstant(RubyRuntime.NilClassClass) {
public boolean isTrue() {
return false;
}
public String toStr() {
throw new RubyException(RubyRuntime.TypeErrorClass, "Cannot convert nil into String");
}
};
private RubyConstant(RubyClass c) {
super(c);
}
}<|fim▁end|> | };
|
<|file_name|>graph_actions_test.py<|end_file_name|><|fim▁begin|># Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Graph actions tests."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import shutil
import tempfile
from tensorflow.contrib import testing
from tensorflow.contrib.framework.python.framework import checkpoint_utils
from tensorflow.contrib.framework.python.ops import variables as variables_lib
from tensorflow.contrib.learn.python import learn
from tensorflow.contrib.learn.python.learn.monitors import BaseMonitor
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import meta_graph
from tensorflow.python.framework import ops
from tensorflow.python.framework import test_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import resources
from tensorflow.python.ops import state_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
from tensorflow.python.summary import summary
from tensorflow.python.training import monitored_session
from tensorflow.python.training import saver as saver_lib
class _Feeder(object):
"""Simple generator for `feed_fn`, returning 10 * step."""
def __init__(self, tensor, max_step):
self._step = 0
self._tensor = tensor
self._max_step = max_step
@property
def step(self):
return self._step
def feed_fn(self):
if self._step >= self._max_step:
raise StopIteration
value = self._step * 10.0
self._step += 1
return {self._tensor: value}
class _BaseMonitorWrapper(BaseMonitor):
"""Base monitor wrapper to facilitate testing.
This monitor can act as either chief-exclusive or non-exclusive.
"""
def __init__(self, run_on_all_workers):
super(_BaseMonitorWrapper, self).__init__()
self._run_on_all_workers = run_on_all_workers
self._is_active = False
self._has_step = False
@property
def run_on_all_workers(self):
return self._run_on_all_workers
@property
def is_active(self):
return self._is_active
@property
def has_step(self):
return self._has_step
def begin(self, max_steps=None):
self._is_active = True
return super(_BaseMonitorWrapper, self).begin(max_steps)
def step_begin(self, step):
self._has_step = True
return super(_BaseMonitorWrapper, self).step_begin(step)
class GraphActionsTest(test.TestCase):
"""Graph actions tests."""
def setUp(self):
learn.graph_actions.clear_summary_writers()
self._output_dir = tempfile.mkdtemp()
testing.FakeSummaryWriter.install()
def tearDown(self):
testing.FakeSummaryWriter.uninstall()
if self._output_dir:
shutil.rmtree(self._output_dir)
learn.graph_actions.clear_summary_writers()
def _assert_summaries(self,
output_dir,
writer,
expected_summaries=None,
expected_graphs=None,
expected_meta_graphs=None,
expected_session_logs=None):
self.assertTrue(isinstance(writer, testing.FakeSummaryWriter))
writer.assert_summaries(
self,
expected_logdir=output_dir,
expected_graph=ops.get_default_graph(),
expected_summaries=expected_summaries,
expected_added_graphs=expected_graphs,
expected_added_meta_graphs=expected_meta_graphs,
expected_session_logs=expected_session_logs)
# TODO(ptucker): Test number and contents of checkpoint files.
def _assert_ckpt(self, output_dir, expected=True):
ckpt_state = saver_lib.get_checkpoint_state(output_dir)
if expected:
pattern = '%s/model.ckpt-.*' % output_dir
primary_ckpt_path = ckpt_state.model_checkpoint_path
self.assertRegexpMatches(primary_ckpt_path, pattern)
all_ckpt_paths = ckpt_state.all_model_checkpoint_paths
self.assertTrue(primary_ckpt_path in all_ckpt_paths)
for ckpt_path in all_ckpt_paths:
self.assertRegexpMatches(ckpt_path, pattern)
else:
self.assertTrue(ckpt_state is None)
# TODO(ptucker): Test lock, multi-threaded access?
def test_summary_writer(self):
writer = learn.graph_actions.get_summary_writer('log/dir/0')
self._assert_summaries('log/dir/0', writer)
self.assertTrue(
learn.graph_actions.get_summary_writer('log/dir/0') is
learn.graph_actions.get_summary_writer('log/dir/0'))
self.assertTrue(
learn.graph_actions.get_summary_writer('log/dir/0') is
not learn.graph_actions.get_summary_writer('log/dir/1'))
# TODO(ptucker): Test restore_checkpoint_path for eval; this should obsolete
# test_evaluate_with_saver().
# TODO(ptucker): Test start_queue_runners for both eval & train.
# TODO(ptucker): Test coord.request_stop & coord.join for eval.
def _build_inference_graph(self):
"""Build simple inference graph.
This includes a regular variable, local variable, and fake table.
Returns:
Tuple of 3 `Tensor` objects, 2 input and 1 output.
"""
variables_lib.create_global_step()
in0 = variables.Variable(1.0)
in1 = variables_lib.local_variable(2.0)
fake_table = variables.Variable(
3.0,
trainable=False,
collections=['fake_tables'],
name='fake_table_var')
in0.graph.add_to_collections([ops.GraphKeys.TABLE_INITIALIZERS],
fake_table.initializer)
out = in0 + in1 + fake_table
return in0, in1, out
def test_infer(self):
with ops.Graph().as_default() as g, self.test_session(g):
self._assert_ckpt(self._output_dir, False)
in0, in1, out = self._build_inference_graph()
self.assertEqual({
'a': 1.0,
'b': 2.0,
'c': 6.0
}, learn.graph_actions.infer(None, {'a': in0,
'b': in1,
'c': out}))
self._assert_ckpt(self._output_dir, False)
@test.mock.patch.object(
learn.graph_actions.coordinator.Coordinator,
'request_stop',
side_effect=learn.graph_actions.coordinator.Coordinator.request_stop,
autospec=True)
def test_coordinator_request_stop_called(self, request_stop):
with ops.Graph().as_default() as g, self.test_session(g):
in0, in1, out = self._build_inference_graph()
learn.graph_actions.infer(None, {'a': in0, 'b': in1, 'c': out})
self.assertTrue(request_stop.called)
@test.mock.patch.object(
learn.graph_actions.coordinator.Coordinator,
'request_stop',
side_effect=learn.graph_actions.coordinator.Coordinator.request_stop,
autospec=True)
def test_run_feeds_iter_cleanup_with_exceptions(self, request_stop):
with ops.Graph().as_default() as g, self.test_session(g):
in0, in1, out = self._build_inference_graph()
try:
for _ in learn.graph_actions.run_feeds_iter({
'a': in0,
'b': in1,
'c': out
}, [None] * 3):
self.assertFalse(request_stop.called)
raise ValueError('Fake exception')
except ValueError:
pass
self.assertTrue(request_stop.called)
def test_run_feeds_iter_calls_resources_init(self):
with ops.Graph().as_default() as g:
in0, _, _ = self._build_inference_graph()
handle = test_ops.stub_resource_handle_op(container='a', shared_name='b')
resources.register_resource(
handle=handle,
create_op=test_ops.resource_create_op(handle),
is_initialized_op=test_ops.resource_initialized_op(handle))
for _ in learn.graph_actions.run_feeds_iter(
{
'in0': in0
}, feed_dicts=[{}]):
self.assertTrue(test_ops.resource_initialized_op(handle).eval())
def test_infer_different_default_graph(self):
with self.test_session():
self._assert_ckpt(self._output_dir, False)
with ops.Graph().as_default():
in0, in1, out = self._build_inference_graph()
with ops.Graph().as_default():
self.assertEqual({
'a': 1.0,
'b': 2.0,
'c': 6.0
}, learn.graph_actions.infer(None, {'a': in0,
'b': in1,
'c': out}))
self._assert_ckpt(self._output_dir, False)
def test_infer_invalid_feed(self):
with ops.Graph().as_default() as g, self.test_session(g):
self._assert_ckpt(self._output_dir, False)
in0, _, _ = self._build_inference_graph()
with self.assertRaisesRegexp(TypeError, 'Can not convert a NoneType'):
learn.graph_actions.infer(None, {'a': in0}, feed_dict={None: 4.0})
self._assert_ckpt(self._output_dir, False)
def test_infer_feed(self):
with ops.Graph().as_default() as g, self.test_session(g):
self._assert_ckpt(self._output_dir, False)
in0, _, out = self._build_inference_graph()
self.assertEqual(
{
'c': 9.0
},
learn.graph_actions.infer(
None, {'c': out}, feed_dict={in0: 4.0}))
self._assert_ckpt(self._output_dir, False)
# TODO(ptucker): Test eval for 1 epoch.
def test_evaluate_invalid_args(self):
with ops.Graph().as_default() as g, self.test_session(g):
self._assert_ckpt(self._output_dir, False)
with self.assertRaisesRegexp(ValueError, 'utput directory'):
learn.graph_actions.evaluate(
g,
output_dir=None,
checkpoint_path=None,
eval_dict={'a': constant_op.constant(1.0)})
with self.assertRaisesRegexp(ValueError, 'utput directory'):
learn.graph_actions.evaluate(
g,
output_dir='',
checkpoint_path=None,
eval_dict={'a': constant_op.constant(1.0)})
self._assert_ckpt(self._output_dir, False)
def test_evaluate(self):
with ops.Graph().as_default() as g, self.test_session(g):
_, _, out = self._build_inference_graph()
writer = learn.graph_actions.get_summary_writer(self._output_dir)
self._assert_summaries(self._output_dir, writer, expected_session_logs=[])
self._assert_ckpt(self._output_dir, False)
results = learn.graph_actions.evaluate(
g,
output_dir=self._output_dir,
checkpoint_path=None,
eval_dict={'a': out},
max_steps=1)
self.assertEqual(({'a': 6.0}, 0), results)
self._assert_summaries(
self._output_dir,
writer,
expected_summaries={0: {
'a': 6.0
}},
expected_session_logs=[])
self._assert_ckpt(self._output_dir, False)
def test_evaluate_ready_for_local_init(self):
with ops.Graph().as_default() as g, self.test_session(g):
variables_lib.create_global_step()
v = variables.Variable(1.0)
w = variables.Variable(
v + 1, collections=[ops.GraphKeys.LOCAL_VARIABLES], trainable=False)
ready_for_local_init_op = variables.report_uninitialized_variables(
variables.global_variables())
ops.add_to_collection(ops.GraphKeys.READY_FOR_LOCAL_INIT_OP,
ready_for_local_init_op)
_ = learn.graph_actions.evaluate(
g,
output_dir=self._output_dir,
checkpoint_path=None,
eval_dict={'a': v},
max_steps=1)
def test_evaluate_feed_fn(self):
with ops.Graph().as_default() as g, self.test_session(g):
in0, _, out = self._build_inference_graph()
writer = learn.graph_actions.get_summary_writer(self._output_dir)
self._assert_summaries(self._output_dir, writer, expected_session_logs=[])
self._assert_ckpt(self._output_dir, False)<|fim▁hole|> output_dir=self._output_dir,
checkpoint_path=None,
eval_dict={'a': out},
feed_fn=feeder.feed_fn,
max_steps=3)
self.assertEqual(3, feeder.step)
self.assertEqual(({'a': 25.0}, 0), results)
self._assert_summaries(
self._output_dir,
writer,
expected_summaries={0: {
'a': 25.0
}},
expected_session_logs=[])
self._assert_ckpt(self._output_dir, False)
def test_evaluate_feed_fn_with_exhaustion(self):
with ops.Graph().as_default() as g, self.test_session(g):
in0, _, out = self._build_inference_graph()
writer = learn.graph_actions.get_summary_writer(self._output_dir)
self._assert_summaries(self._output_dir, writer, expected_session_logs=[])
feeder = _Feeder(in0, 2)
results = learn.graph_actions.evaluate(
g,
output_dir=self._output_dir,
checkpoint_path=None,
eval_dict={'a': out},
feed_fn=feeder.feed_fn,
max_steps=3)
self.assertEqual(2, feeder.step)
self.assertEqual(({'a': 15.0}, 0), results)
self._assert_summaries(
self._output_dir,
writer,
expected_summaries={0: {
'a': 15.0
}},
expected_session_logs=[])
def test_evaluate_with_saver(self):
with ops.Graph().as_default() as g, self.test_session(g):
_, _, out = self._build_inference_graph()
ops.add_to_collection(ops.GraphKeys.SAVERS, saver_lib.Saver())
writer = learn.graph_actions.get_summary_writer(self._output_dir)
self._assert_summaries(self._output_dir, writer, expected_session_logs=[])
results = learn.graph_actions.evaluate(
g,
output_dir=self._output_dir,
checkpoint_path=None,
eval_dict={'a': out},
max_steps=1)
self.assertEqual(({'a': 6.0}, 0), results)
self._assert_summaries(
self._output_dir,
writer,
expected_summaries={0: {
'a': 6.0
}},
expected_session_logs=[])
def test_train_invalid_args(self):
with ops.Graph().as_default() as g, self.test_session(g):
train_op = constant_op.constant(1.0)
loss_op = constant_op.constant(2.0)
with self.assertRaisesRegexp(ValueError, 'utput directory'):
learn.graph_actions._monitored_train(
g, # pylint: disable=protected-access
output_dir=None,
train_op=train_op,
loss_op=loss_op)
with self.assertRaisesRegexp(ValueError, 'utput directory'):
learn.graph_actions._monitored_train( # pylint: disable=protected-access
g,
output_dir='',
train_op=constant_op.constant(1.0),
loss_op=constant_op.constant(2.0))
with self.assertRaisesRegexp(ValueError, 'train_op'):
learn.graph_actions._monitored_train( # pylint: disable=protected-access
g,
output_dir=self._output_dir,
train_op=None,
loss_op=loss_op)
with self.assertRaisesRegexp(ValueError, 'loss_op'):
learn.graph_actions._monitored_train( # pylint: disable=protected-access
g,
output_dir=self._output_dir,
train_op=constant_op.constant(1.0),
loss_op=None)
with self.assertRaisesRegexp(ValueError, 'global_step'):
learn.graph_actions._monitored_train( # pylint: disable=protected-access
g,
output_dir=self._output_dir,
train_op=constant_op.constant(1.0),
loss_op=loss_op)
# TODO(ptucker): Resume training from previous ckpt.
# TODO(ptucker): !supervisor_is_chief
# TODO(ptucker): Custom init op for training.
# TODO(ptucker): Mock supervisor, and assert all interactions.
def test_train(self):
with ops.Graph().as_default() as g, self.test_session(g):
with ops.control_dependencies(self._build_inference_graph()):
train_op = state_ops.assign_add(variables_lib.get_global_step(), 1)
writer = learn.graph_actions.get_summary_writer(self._output_dir)
self._assert_summaries(self._output_dir, writer)
self._assert_ckpt(self._output_dir, False)
loss = learn.graph_actions._monitored_train( # pylint: disable=protected-access
g,
output_dir=self._output_dir,
train_op=train_op,
loss_op=constant_op.constant(2.0),
steps=1)
meta_graph_def = meta_graph.create_meta_graph_def(
graph_def=g.as_graph_def(add_shapes=True),
saver_def=monitored_session.Scaffold().finalize().saver.saver_def)
self.assertEqual(2.0, loss)
self._assert_summaries(
self._output_dir,
writer,
expected_graphs=[g],
expected_meta_graphs=[meta_graph_def])
self._assert_ckpt(self._output_dir, True)
def test_train_steps_is_incremental(self):
with ops.Graph().as_default() as g, self.test_session(g):
with ops.control_dependencies(self._build_inference_graph()):
train_op = state_ops.assign_add(variables_lib.get_global_step(), 1)
learn.graph_actions._monitored_train( # pylint: disable=protected-access
g,
output_dir=self._output_dir,
train_op=train_op,
loss_op=constant_op.constant(2.0),
steps=10)
step = checkpoint_utils.load_variable(
self._output_dir, variables_lib.get_global_step().name)
self.assertEqual(10, step)
with ops.Graph().as_default() as g, self.test_session(g):
with ops.control_dependencies(self._build_inference_graph()):
train_op = state_ops.assign_add(variables_lib.get_global_step(), 1)
learn.graph_actions._monitored_train( # pylint: disable=protected-access
g,
output_dir=self._output_dir,
train_op=train_op,
loss_op=constant_op.constant(2.0),
steps=15)
step = checkpoint_utils.load_variable(
self._output_dir, variables_lib.get_global_step().name)
self.assertEqual(25, step)
def test_train_max_steps_is_not_incremental(self):
with ops.Graph().as_default() as g, self.test_session(g):
with ops.control_dependencies(self._build_inference_graph()):
train_op = state_ops.assign_add(variables_lib.get_global_step(), 1)
learn.graph_actions._monitored_train( # pylint: disable=protected-access
g,
output_dir=self._output_dir,
train_op=train_op,
loss_op=constant_op.constant(2.0),
max_steps=10)
step = checkpoint_utils.load_variable(
self._output_dir, variables_lib.get_global_step().name)
self.assertEqual(10, step)
with ops.Graph().as_default() as g, self.test_session(g):
with ops.control_dependencies(self._build_inference_graph()):
train_op = state_ops.assign_add(variables_lib.get_global_step(), 1)
learn.graph_actions._monitored_train( # pylint: disable=protected-access
g,
output_dir=self._output_dir,
train_op=train_op,
loss_op=constant_op.constant(2.0),
max_steps=15)
step = checkpoint_utils.load_variable(
self._output_dir, variables_lib.get_global_step().name)
self.assertEqual(15, step)
def test_train_skip_train_if_max_step_already_saved(self):
with ops.Graph().as_default() as g, self.test_session(g):
with ops.control_dependencies(self._build_inference_graph()):
train_op = state_ops.assign_add(variables_lib.get_global_step(), 1)
learn.graph_actions._monitored_train( # pylint: disable=protected-access
g,
output_dir=self._output_dir,
train_op=train_op,
loss_op=constant_op.constant(2.0),
max_steps=10)
step = checkpoint_utils.load_variable(
self._output_dir, variables_lib.get_global_step().name)
self.assertEqual(10, step)
with ops.Graph().as_default() as g, self.test_session(g):
with ops.control_dependencies(self._build_inference_graph()):
train_op = state_ops.assign_add(variables_lib.get_global_step(), 1)
learn.graph_actions._monitored_train( # pylint: disable=protected-access
g,
output_dir=self._output_dir,
train_op=train_op,
loss_op=constant_op.constant(2.0),
max_steps=10)
step = checkpoint_utils.load_variable(
self._output_dir, variables_lib.get_global_step().name)
self.assertEqual(10, step)
def test_train_loss(self):
with ops.Graph().as_default() as g, self.test_session(g):
variables_lib.create_global_step()
loss_var = variables_lib.local_variable(10.0)
train_op = control_flow_ops.group(
state_ops.assign_add(variables_lib.get_global_step(), 1),
state_ops.assign_add(loss_var, -1.0))
writer = learn.graph_actions.get_summary_writer(self._output_dir)
self._assert_summaries(self._output_dir, writer)
self._assert_ckpt(self._output_dir, False)
loss = learn.graph_actions._monitored_train( # pylint: disable=protected-access
g,
output_dir=self._output_dir,
train_op=train_op,
loss_op=loss_var.value(),
steps=6)
self.assertEqual(4.0, loss)
self._assert_summaries(
self._output_dir,
writer,
expected_graphs=[g],
expected_meta_graphs=None)
self._assert_ckpt(self._output_dir, True)
def test_train_summaries(self):
with ops.Graph().as_default() as g, self.test_session(g):
with ops.control_dependencies(self._build_inference_graph()):
train_op = state_ops.assign_add(variables_lib.get_global_step(), 1)
loss_op = constant_op.constant(2.0)
summary.scalar('loss', loss_op)
writer = learn.graph_actions.get_summary_writer(self._output_dir)
self._assert_summaries(self._output_dir, writer)
self._assert_ckpt(self._output_dir, False)
loss = learn.graph_actions._monitored_train( # pylint: disable=protected-access
g,
output_dir=self._output_dir,
train_op=train_op,
loss_op=loss_op,
steps=1)
meta_graph_def = meta_graph.create_meta_graph_def(
graph_def=g.as_graph_def(add_shapes=True),
saver_def=monitored_session.Scaffold().finalize().saver.saver_def)
self.assertEqual(2.0, loss)
self._assert_summaries(
self._output_dir,
writer,
expected_graphs=[g],
expected_meta_graphs=[meta_graph_def],
expected_summaries={1: {
'loss': 2.0
}})
self._assert_ckpt(self._output_dir, True)
def test_train_override_saver(self):
with ops.Graph().as_default() as g, self.test_session(g):
with ops.control_dependencies(self._build_inference_graph()):
train_op = state_ops.assign_add(variables_lib.get_global_step(), 1)
self._assert_ckpt(self._output_dir, False)
real_saver = saver_lib.Saver()
saver = test.mock.Mock(wraps=real_saver, saver_def=real_saver.saver_def)
ops.add_to_collection(ops.GraphKeys.SAVERS, saver)
loss = learn.graph_actions._monitored_train( # pylint: disable=protected-access
g,
output_dir=self._output_dir,
train_op=train_op,
loss_op=constant_op.constant(2.0),
steps=1)
self.assertEqual(2.0, loss)
self._assert_ckpt(self._output_dir, True)
self.assertTrue(saver.build.called)
self.assertEqual(1, saver.save.call_count)
# TODO(ispir): remove following tests after deprecated train.
class GraphActionsTrainTest(test.TestCase):
"""Tests for train."""
def setUp(self):
learn.graph_actions.clear_summary_writers()
self._output_dir = tempfile.mkdtemp()
testing.FakeSummaryWriter.install()
def tearDown(self):
testing.FakeSummaryWriter.uninstall()
if self._output_dir:
shutil.rmtree(self._output_dir)
learn.graph_actions.clear_summary_writers()
def _assert_summaries(self,
output_dir,
expected_summaries=None,
expected_graphs=None,
expected_meta_graphs=None,
expected_session_logs=None):
writer = learn.graph_actions.get_summary_writer(output_dir)
self.assertTrue(isinstance(writer, testing.FakeSummaryWriter))
writer.assert_summaries(
self,
expected_logdir=output_dir,
expected_graph=ops.get_default_graph(),
expected_summaries=expected_summaries,
expected_added_graphs=expected_graphs,
expected_added_meta_graphs=expected_meta_graphs,
expected_session_logs=expected_session_logs)
# TODO(ptucker): Test number and contents of checkpoint files.
def _assert_ckpt(self, output_dir, expected=True):
ckpt_state = saver_lib.get_checkpoint_state(output_dir)
if expected:
pattern = '%s/model.ckpt-.*' % output_dir
primary_ckpt_path = ckpt_state.model_checkpoint_path
self.assertRegexpMatches(primary_ckpt_path, pattern)
all_ckpt_paths = ckpt_state.all_model_checkpoint_paths
self.assertTrue(primary_ckpt_path in all_ckpt_paths)
for ckpt_path in all_ckpt_paths:
self.assertRegexpMatches(ckpt_path, pattern)
else:
self.assertTrue(ckpt_state is None)
def _build_inference_graph(self):
"""Build simple inference graph.
This includes a regular variable, local variable, and fake table.
Returns:
Tuple of 3 `Tensor` objects, 2 input and 1 output.
"""
variables_lib.create_global_step()
in0 = variables.Variable(1.0)
in1 = variables_lib.local_variable(2.0)
fake_table = variables.Variable(
3.0,
trainable=False,
collections=['fake_tables'],
name='fake_table_var')
in0.graph.add_to_collections([ops.GraphKeys.TABLE_INITIALIZERS],
fake_table.initializer)
out = in0 + in1 + fake_table
return in0, in1, out
def test_train_invalid_args(self):
with ops.Graph().as_default() as g, self.test_session(g):
train_op = constant_op.constant(1.0)
loss_op = constant_op.constant(2.0)
with self.assertRaisesRegexp(ValueError, 'utput directory'):
learn.graph_actions.train(
g, output_dir=None, train_op=train_op, loss_op=loss_op)
with self.assertRaisesRegexp(ValueError, 'utput directory'):
learn.graph_actions.train(
g,
output_dir='',
train_op=constant_op.constant(1.0),
loss_op=constant_op.constant(2.0))
with self.assertRaisesRegexp(ValueError, 'train_op'):
learn.graph_actions.train(
g, output_dir=self._output_dir, train_op=None, loss_op=loss_op)
with self.assertRaisesRegexp(ValueError, 'loss_op'):
learn.graph_actions.train(
g,
output_dir=self._output_dir,
train_op=constant_op.constant(1.0),
loss_op=None)
with self.assertRaisesRegexp(ValueError, 'global_step'):
learn.graph_actions.train(
g,
output_dir=self._output_dir,
train_op=constant_op.constant(1.0),
loss_op=loss_op)
# TODO(ptucker): Resume training from previous ckpt.
# TODO(ptucker): !supervisor_is_chief
# TODO(ptucker): Custom init op for training.
# TODO(ptucker): Mock supervisor, and assert all interactions.
def test_train(self):
with ops.Graph().as_default() as g, self.test_session(g):
with ops.control_dependencies(self._build_inference_graph()):
train_op = state_ops.assign_add(variables_lib.get_global_step(), 1)
self._assert_summaries(self._output_dir)
self._assert_ckpt(self._output_dir, False)
loss = learn.graph_actions.train(
g,
output_dir=self._output_dir,
train_op=train_op,
loss_op=constant_op.constant(2.0),
steps=1)
# TODO(ebrevdo,ptucker,ispir): this meta_graph_def lacks the
# SaverDef, so we can't add it to the summary assertion test below.
# meta_graph_def = meta_graph.create_meta_graph_def()
self.assertEqual(2.0, loss)
self._assert_summaries(self._output_dir, expected_graphs=[g])
self._assert_ckpt(self._output_dir, True)
def test_train_steps_is_incremental(self):
with ops.Graph().as_default() as g, self.test_session(g):
with ops.control_dependencies(self._build_inference_graph()):
train_op = state_ops.assign_add(variables_lib.get_global_step(), 1)
learn.graph_actions.train(
g,
output_dir=self._output_dir,
train_op=train_op,
loss_op=constant_op.constant(2.0),
steps=10)
step = checkpoint_utils.load_variable(
self._output_dir, variables_lib.get_global_step().name)
self.assertEqual(10, step)
with ops.Graph().as_default() as g, self.test_session(g):
with ops.control_dependencies(self._build_inference_graph()):
train_op = state_ops.assign_add(variables_lib.get_global_step(), 1)
learn.graph_actions.train(
g,
output_dir=self._output_dir,
train_op=train_op,
loss_op=constant_op.constant(2.0),
steps=15)
step = checkpoint_utils.load_variable(
self._output_dir, variables_lib.get_global_step().name)
self.assertEqual(25, step)
def test_train_max_steps_is_not_incremental(self):
with ops.Graph().as_default() as g, self.test_session(g):
with ops.control_dependencies(self._build_inference_graph()):
train_op = state_ops.assign_add(variables_lib.get_global_step(), 1)
learn.graph_actions.train(
g,
output_dir=self._output_dir,
train_op=train_op,
loss_op=constant_op.constant(2.0),
max_steps=10)
step = checkpoint_utils.load_variable(
self._output_dir, variables_lib.get_global_step().name)
self.assertEqual(10, step)
with ops.Graph().as_default() as g, self.test_session(g):
with ops.control_dependencies(self._build_inference_graph()):
train_op = state_ops.assign_add(variables_lib.get_global_step(), 1)
learn.graph_actions.train(
g,
output_dir=self._output_dir,
train_op=train_op,
loss_op=constant_op.constant(2.0),
max_steps=15)
step = checkpoint_utils.load_variable(
self._output_dir, variables_lib.get_global_step().name)
self.assertEqual(15, step)
def test_train_loss(self):
with ops.Graph().as_default() as g, self.test_session(g):
variables_lib.create_global_step()
loss_var = variables_lib.local_variable(10.0)
train_op = control_flow_ops.group(
state_ops.assign_add(variables_lib.get_global_step(), 1),
state_ops.assign_add(loss_var, -1.0))
self._assert_summaries(self._output_dir)
self._assert_ckpt(self._output_dir, False)
loss = learn.graph_actions.train(
g,
output_dir=self._output_dir,
train_op=train_op,
loss_op=loss_var.value(),
steps=6)
# TODO(ebrevdo,ptucker,ispir): this meta_graph_def lacks the
# SaverDef, so we can't add it to the summary assertion test below.
# meta_graph_def = meta_graph.create_meta_graph_def()
self.assertEqual(4.0, loss)
self._assert_summaries(self._output_dir, expected_graphs=[g])
self._assert_ckpt(self._output_dir, True)
def test_train_summaries(self):
with ops.Graph().as_default() as g, self.test_session(g):
with ops.control_dependencies(self._build_inference_graph()):
train_op = state_ops.assign_add(variables_lib.get_global_step(), 1)
loss_op = constant_op.constant(2.0)
summary.scalar('loss', loss_op)
self._assert_summaries(self._output_dir)
self._assert_ckpt(self._output_dir, False)
loss = learn.graph_actions.train(
g,
output_dir=self._output_dir,
train_op=train_op,
loss_op=loss_op,
steps=1)
# TODO(ebrevdo,ptucker,ispir): this meta_graph_def lacks the
# SaverDef, so we can't add it to the summary assertion test below.
# meta_graph_def = meta_graph.create_meta_graph_def()
self.assertEqual(2.0, loss)
self._assert_summaries(
self._output_dir,
expected_graphs=[g],
expected_summaries={1: {
'loss': 2.0
}})
self._assert_ckpt(self._output_dir, True)
def test_train_chief_monitor(self):
with ops.Graph().as_default() as g, self.test_session(g):
with ops.control_dependencies(self._build_inference_graph()):
train_op = state_ops.assign_add(variables_lib.get_global_step(), 1)
loss_op = constant_op.constant(2.0)
summary.scalar('loss', loss_op)
chief_exclusive_monitor = _BaseMonitorWrapper(False)
all_workers_monitor = _BaseMonitorWrapper(True)
loss = learn.graph_actions.train(
g,
output_dir=self._output_dir,
train_op=train_op,
loss_op=loss_op,
supervisor_is_chief=True,
steps=1,
monitors=[chief_exclusive_monitor, all_workers_monitor])
self.assertEqual(2.0, loss)
self.assertTrue(chief_exclusive_monitor.is_active and
all_workers_monitor.is_active,
'All monitors must have been active.')
self.assertTrue(chief_exclusive_monitor.has_step and
all_workers_monitor.has_step,
'All monitors must have a step.')
def test_train_worker_monitor(self):
# We need to explicitly set device due to check on non-chief workers
# requiring all variables to have a device assigned.
with ops.Graph().as_default() as g, g.device('/cpu:0'):
global_step = variables_lib.create_global_step(g)
train_op = state_ops.assign_add(global_step, 1)
loss_op = constant_op.constant(2.0)
summary.scalar('loss', loss_op)
# Add explicit "local" init op to initialize all variables
# as there's no chief to init here.
init_op = variables.global_variables_initializer()
ops.add_to_collection(ops.GraphKeys.LOCAL_INIT_OP, init_op)
# Create worker monitors where one should be active on the worker
# and the other chief exclusive.
chief_exclusive_monitor = _BaseMonitorWrapper(False)
all_workers_monitor = _BaseMonitorWrapper(True)
with self.test_session(g):
loss = learn.graph_actions.train(
g,
output_dir=self._output_dir,
global_step_tensor=global_step,
train_op=train_op,
loss_op=loss_op,
supervisor_is_chief=False,
steps=1,
monitors=[chief_exclusive_monitor, all_workers_monitor])
self.assertEqual(2.0, loss)
self.assertTrue(not chief_exclusive_monitor.is_active and
all_workers_monitor.is_active,
'Only non-chief runnable monitor must have been active.')
self.assertTrue(not chief_exclusive_monitor.has_step and
all_workers_monitor.has_step,
'Only non-chief runnable monitor must have a step.')
if __name__ == '__main__':
test.main()<|fim▁end|> | feeder = _Feeder(in0, 3)
results = learn.graph_actions.evaluate(
g, |
<|file_name|>models.js<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | (function($) {
}) (JQuery); |
<|file_name|>process.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
__author__ = 'Adam R. Smith, Michael Meisinger, Dave Foster <[email protected]>'
import threading
import traceback
import gevent
from gevent import greenlet, Timeout
from gevent.event import Event, AsyncResult
from gevent.queue import Queue
from pyon.core import MSG_HEADER_ACTOR
from pyon.core.bootstrap import CFG
from pyon.core.exception import IonException, ContainerError
from pyon.core.exception import Timeout as IonTimeout
from pyon.core.thread import PyonThreadManager, PyonThread, ThreadManager, PyonThreadTraceback, PyonHeartbeatError
from pyon.datastore.postgresql.pg_util import init_db_stats, get_db_stats, clear_db_stats
from pyon.ion.service import BaseService
from pyon.util.containers import get_ion_ts, get_ion_ts_millis
from pyon.util.log import log
STAT_INTERVAL_LENGTH = 60000 # Interval time for process saturation stats collection
stats_callback = None
class OperationInterruptedException(BaseException):
"""
Interrupted exception. Used by external items timing out execution in the
IonProcessThread's control thread.
Derived from BaseException to specifically avoid try/except Exception blocks,
such as in Publisher's publish_event.
"""
pass
class IonProcessError(StandardError):
pass
class IonProcessThread(PyonThread):
"""
The control part of an ION process.
"""
def __init__(self, target=None, listeners=None, name=None, service=None, cleanup_method=None,
heartbeat_secs=10, **kwargs):
"""
Constructs the control part of an ION process.
Used by the container's IonProcessThreadManager, as part of spawn_process.
@param target A callable to run in the PyonThread. If None (typical), will use the target method
defined in this class.
@param listeners A list of listening endpoints attached to this thread.
@param name The name of this ION process.
@param service An instance of the BaseService derived class which contains the business logic for
the ION process.
@param cleanup_method An optional callable to run when the process is stopping. Runs after all other
notify_stop calls have run. Should take one param, this instance.
@param heartbeat_secs Number of seconds to wait in between heartbeats.
"""
self._startup_listeners = listeners or []
self.listeners = []
self._listener_map = {}
self.name = name
self.service = service
self._cleanup_method = cleanup_method
self.thread_manager = ThreadManager(failure_notify_callback=self._child_failed) # bubbles up to main thread manager
self._dead_children = [] # save any dead children for forensics
self._ctrl_thread = None
self._ctrl_queue = Queue()
self._ready_control = Event()
self._errors = []
self._ctrl_current = None # set to the AR generated by _routing_call when in the context of a call
# processing vs idle time (ms)
self._start_time = None
self._proc_time = 0 # busy time since start
self._proc_time_prior = 0 # busy time at the beginning of the prior interval
self._proc_time_prior2 = 0 # busy time at the beginning of 2 interval's ago
self._proc_interval_num = 0 # interval num of last record
# for heartbeats, used to detect stuck processes
self._heartbeat_secs = heartbeat_secs # amount of time to wait between heartbeats
self._heartbeat_stack = None # stacktrace of last heartbeat
self._heartbeat_time = None # timestamp of heart beat last matching the current op
self._heartbeat_op = None # last operation (by AR)
self._heartbeat_count = 0 # number of times this operation has been seen consecutively
self._log_call_exception = CFG.get_safe("container.process.log_exceptions", False)
self._log_call_dbstats = CFG.get_safe("container.process.log_dbstats", False)
self._warn_call_dbstmt_threshold = CFG.get_safe("container.process.warn_dbstmt_threshold", 0)
PyonThread.__init__(self, target=target, **kwargs)
def heartbeat(self):
"""
Returns a 3-tuple indicating everything is ok.
Should only be called after the process has been started.
Checks the following:
- All attached endpoints are alive + listening (this means ready)
- The control flow greenlet is alive + listening or processing
@return 3-tuple indicating (listeners ok, ctrl thread ok, heartbeat status). Use all on it for a
boolean indication of success.
"""
listeners_ok = True
for l in self.listeners:
if not (l in self._listener_map and not self._listener_map[l].proc.dead and l.get_ready_event().is_set()):
listeners_ok = False
ctrl_thread_ok = self._ctrl_thread.running
# are we currently processing something?
heartbeat_ok = True
if self._ctrl_current is not None:
st = traceback.extract_stack(self._ctrl_thread.proc.gr_frame)
if self._ctrl_current == self._heartbeat_op:
if st == self._heartbeat_stack:
self._heartbeat_count += 1 # we've seen this before! increment count
# we've been in this for the last X ticks, or it's been X seconds, fail this part of the heartbeat
if self._heartbeat_count > CFG.get_safe('container.timeout.heartbeat_proc_count_threshold', 30) or \
get_ion_ts_millis() - int(self._heartbeat_time) >= CFG.get_safe('container.timeout.heartbeat_proc_time_threshold', 30) * 1000:
heartbeat_ok = False
else:
# it's made some progress
self._heartbeat_count = 1
self._heartbeat_stack = st
self._heartbeat_time = get_ion_ts()
else:
self._heartbeat_op = self._ctrl_current
self._heartbeat_count = 1
self._heartbeat_time = get_ion_ts()
self._heartbeat_stack = st
else:
self._heartbeat_op = None
self._heartbeat_count = 0
#log.debug("%s %s %s", listeners_ok, ctrl_thread_ok, heartbeat_ok)
return (listeners_ok, ctrl_thread_ok, heartbeat_ok)
@property
def time_stats(self):
"""
Returns a 5-tuple of (total time, idle time, processing time, time since prior interval start,
busy since prior interval start), all in ms (int).
"""
now = get_ion_ts_millis()
running_time = now - self._start_time
idle_time = running_time - self._proc_time
cur_interval = now / STAT_INTERVAL_LENGTH
now_since_prior = now - (cur_interval - 1) * STAT_INTERVAL_LENGTH
if cur_interval == self._proc_interval_num:
proc_time_since_prior = self._proc_time-self._proc_time_prior2
elif cur_interval-1 == self._proc_interval_num:
proc_time_since_prior = self._proc_time-self._proc_time_prior
else:
proc_time_since_prior = 0
return (running_time, idle_time, self._proc_time, now_since_prior, proc_time_since_prior)
def _child_failed(self, child):
"""
Callback from gevent as set in the TheadManager, when a child greenlet fails.
Kills the ION process main greenlet. This propagates the error up to the process supervisor.
"""
# remove the child from the list of children (so we can shut down cleanly)
for x in self.thread_manager.children:
if x.proc == child:
self.thread_manager.children.remove(x)
break
self._dead_children.append(child)
# kill this process's main greenlet. This should be noticed by the container's proc manager
self.proc.kill(child.exception)
def add_endpoint(self, listener, activate=True):
"""
Adds a listening endpoint to be managed by this ION process.
Spawns the listen loop and sets the routing call to synchronize incoming messages
here. If this process hasn't been started yet, adds it to the list of listeners
to start on startup.
@param activate If True (default), start consuming from listener
"""
if self.proc:
listener.routing_call = self._routing_call
if self.name:
svc_name = "unnamed-service"
if self.service is not None and hasattr(self.service, 'name'):
svc_name = self.service.name
listen_thread_name = "%s-%s-listen-%s" % (svc_name, self.name, len(self.listeners)+1)
else:
listen_thread_name = "unknown-listener-%s" % (len(self.listeners)+1)
listen_thread = self.thread_manager.spawn(listener.listen, thread_name=listen_thread_name, activate=activate)
listen_thread.proc._glname = "ION Proc listener %s" % listen_thread_name
self._listener_map[listener] = listen_thread
self.listeners.append(listener)
else:
self._startup_listeners.append(listener)
def remove_endpoint(self, listener):
"""
Removes a listening endpoint from management by this ION process.
If the endpoint is unknown to this ION process, raises an error.
@return The PyonThread running the listen loop, if it exists. You are
responsible for closing it when appropriate.
"""
if listener in self.listeners:
self.listeners.remove(listener)
return self._listener_map.pop(listener)
elif listener in self._startup_listeners:
self._startup_listeners.remove(listener)
return None
else:
raise IonProcessError("Cannot remove unrecognized listener: %s" % listener)
def target(self, *args, **kwargs):
"""
Entry point for the main process greenlet.
Setup the base properties for this process (mainly the control thread).
"""
if self.name:
threading.current_thread().name = "%s-target" % self.name
# start time
self._start_time = get_ion_ts_millis()
self._proc_interval_num = self._start_time / STAT_INTERVAL_LENGTH
# spawn control flow loop
self._ctrl_thread = self.thread_manager.spawn(self._control_flow)
self._ctrl_thread.proc._glname = "ION Proc CL %s" % self.name
# wait on control flow loop, heartbeating as appropriate
while not self._ctrl_thread.ev_exit.wait(timeout=self._heartbeat_secs):
hbst = self.heartbeat()
if not all(hbst):
log.warn("Heartbeat status for process %s returned %s", self, hbst)
if self._heartbeat_stack is not None:
stack_out = "".join(traceback.format_list(self._heartbeat_stack))
else:
stack_out = "N/A"
#raise PyonHeartbeatError("Heartbeat failed: %s, stacktrace:\n%s" % (hbst, stack_out))
log.warn("Heartbeat failed: %s, stacktrace:\n%s", hbst, stack_out)
# this is almost a no-op as we don't fall out of the above loop without
# exiting the ctrl_thread, but having this line here makes testing much easier.
self._ctrl_thread.join()
def _routing_call(self, call, context, *callargs, **callkwargs):
"""
Endpoints call into here to synchronize across the entire IonProcess.
Returns immediately with an AsyncResult that can be waited on. Calls
are made by the loop in _control_flow. We pass in the calling greenlet so
exceptions are raised in the correct context.
@param call The call to be made within this ION processes' calling greenlet.
@param callargs The keyword args to pass to the call.
@param context Optional process-context (usually the headers of the incoming call) to be
set. Process-context is greenlet-local, and since we're crossing greenlet
boundaries, we must set it again in the ION process' calling greenlet.
"""
ar = AsyncResult()
if len(callargs) == 0 and len(callkwargs) == 0:
log.trace("_routing_call got no arguments for the call %s, check your call's parameters", call)
self._ctrl_queue.put((greenlet.getcurrent(), ar, call, callargs, callkwargs, context))
return ar
def has_pending_call(self, ar):
"""
Returns true if the call (keyed by the AsyncResult returned by _routing_call) is still pending.
"""
for _, qar, _, _, _, _ in self._ctrl_queue.queue:
if qar == ar:
return True
return False
def _cancel_pending_call(self, ar):
"""
Cancels a pending call (keyed by the AsyncResult returend by _routing_call).
@return True if the call was truly pending.
"""
if self.has_pending_call(ar):
ar.set(False)
return True
return False
def _interrupt_control_thread(self):
"""
Signal the control flow thread that it needs to abort processing, likely due to a timeout.
"""
self._ctrl_thread.proc.kill(exception=OperationInterruptedException, block=False)
def cancel_or_abort_call(self, ar):
"""
Either cancels a future pending call, or aborts the current processing if the given AR is unset.
The pending call is keyed by the AsyncResult returned by _routing_call.
"""
if not self._cancel_pending_call(ar) and not ar.ready():
self._interrupt_control_thread()
def _control_flow(self):
"""
Entry point for process control thread of execution.
This method is run by the control greenlet for each ION process. Listeners attached
to the process, either RPC Servers or Subscribers, synchronize calls to the process
by placing call requests into the queue by calling _routing_call.
This method blocks until there are calls to be made in the synchronized queue, and
then calls from within this greenlet. Any exception raised is caught and re-raised
in the greenlet that originally scheduled the call. If successful, the AsyncResult
created at scheduling time is set with the result of the call.
"""
svc_name = getattr(self.service, "name", "unnamed-service") if self.service else "unnamed-service"
proc_id = getattr(self.service, "id", "unknown-pid") if self.service else "unknown-pid"
if self.name:
threading.current_thread().name = "%s-%s" % (svc_name, self.name)
thread_base_name = threading.current_thread().name
self._ready_control.set()
for calltuple in self._ctrl_queue:
calling_gl, ar, call, callargs, callkwargs, context = calltuple
request_id = (context or {}).get("request-id", None)
if request_id:
threading.current_thread().name = thread_base_name + "-" + str(request_id)
#log.debug("control_flow making call: %s %s %s (has context: %s)", call, callargs, callkwargs, context is not None)
res = None
start_proc_time = get_ion_ts_millis()
self._record_proc_time(start_proc_time)
# check context for expiration
if context is not None and 'reply-by' in context:
if start_proc_time >= int(context['reply-by']):
log.info("control_flow: attempting to process message already exceeding reply-by, ignore")
# raise a timeout in the calling thread to allow endpoints to continue processing
e = IonTimeout("Reply-by time has already occurred (reply-by: %s, op start time: %s)" % (context['reply-by'], start_proc_time))
calling_gl.kill(exception=e, block=False)
continue
# If ar is set, means it is cancelled
if ar.ready():
log.info("control_flow: attempting to process message that has been cancelled, ignore")
continue
init_db_stats()
try:
# ******************************************************************
# ****** THIS IS WHERE THE RPC OPERATION/SERVICE CALL IS MADE ******
with self.service.push_context(context), \
self.service.container.context.push_context(context):
self._ctrl_current = ar
res = call(*callargs, **callkwargs)
# ****** END CALL, EXCEPTION HANDLING FOLLOWS ******
# ******************************************************************
except OperationInterruptedException:
# endpoint layer takes care of response as it's the one that caused this
log.debug("Operation interrupted")
pass
except Exception as e:
if self._log_call_exception:
log.exception("PROCESS exception: %s" % e.message)
# Raise the exception in the calling greenlet.
# Try decorating the args of the exception with the true traceback -
# this should be reported by ThreadManager._child_failed
exc = PyonThreadTraceback("IonProcessThread _control_flow caught an exception "
"(call: %s, *args %s, **kwargs %s, context %s)\n"
"True traceback captured by IonProcessThread' _control_flow:\n\n%s" % (
call, callargs, callkwargs, context, traceback.format_exc()))
e.args = e.args + (exc,)
if isinstance(e, (TypeError, IonException)):
# Pass through known process exceptions, in particular IonException
calling_gl.kill(exception=e, block=False)
else:
# Otherwise, wrap unknown, forward and hopefully we can continue on our way
self._errors.append((call, callargs, callkwargs, context, e, exc))
log.warn(exc)
log.warn("Attempting to continue...")
# Note: Too large exception string will crash the container (when passed on as msg header).
exception_str = str(exc)
if len(exception_str) > 10000:
exception_str = (
"Exception string representation too large. "
"Begin and end of the exception:\n"
+ exception_str[:2000] + "\n...\n" + exception_str[-2000:]
)
calling_gl.kill(exception=ContainerError(exception_str), block=False)
finally:
try:
# Compute statistics
self._compute_proc_stats(start_proc_time)
db_stats = get_db_stats()
if db_stats:
if self._warn_call_dbstmt_threshold > 0 and db_stats.get("count.all", 0) >= self._warn_call_dbstmt_threshold:
stats_str = ", ".join("{}={}".format(k, db_stats[k]) for k in sorted(db_stats.keys()))
log.warn("PROC_OP '%s.%s' EXCEEDED DB THRESHOLD. stats=%s", svc_name, call.__name__, stats_str)
elif self._log_call_dbstats:
stats_str = ", ".join("{}={}".format(k, db_stats[k]) for k in sorted(db_stats.keys()))
log.info("PROC_OP '%s.%s' DB STATS: %s", svc_name, call.__name__, stats_str)
clear_db_stats()
if stats_callback:
stats_callback(proc_id=proc_id, proc_name=self.name, svc=svc_name, op=call.__name__,
request_id=request_id, context=context,
db_stats=db_stats, proc_stats=self.time_stats, result=res, exc=None)
except Exception:
log.exception("Error computing process call stats")
self._ctrl_current = None
threading.current_thread().name = thread_base_name
# Set response in AsyncEvent of caller (endpoint greenlet)
ar.set(res)
def _record_proc_time(self, cur_time):
""" Keep the _proc_time of the prior and prior-prior intervals for stats computation<|fim▁hole|> cur_interval = cur_time / STAT_INTERVAL_LENGTH
if cur_interval == self._proc_interval_num:
# We're still in the same interval - no update
pass
elif cur_interval-1 == self._proc_interval_num:
# Record the stats from the prior interval
self._proc_interval_num = cur_interval
self._proc_time_prior2 = self._proc_time_prior
self._proc_time_prior = self._proc_time
elif cur_interval-1 > self._proc_interval_num:
# We skipped an entire interval - everything is prior2
self._proc_interval_num = cur_interval
self._proc_time_prior2 = self._proc_time
self._proc_time_prior = self._proc_time
def _compute_proc_stats(self, start_proc_time):
cur_time = get_ion_ts_millis()
self._record_proc_time(cur_time)
proc_time = cur_time - start_proc_time
self._proc_time += proc_time
def start_listeners(self):
"""
Starts all listeners in managed greenlets.
Usually called by the ProcManager, unless using IonProcess manually.
"""
try:
# disable normal error reporting, this method should only be called from startup
self.thread_manager._failure_notify_callback = None
# spawn all listeners in startup listeners (from initializer, or added later)
for listener in self._startup_listeners:
self.add_endpoint(listener)
with Timeout(seconds=CFG.get_safe('container.messaging.timeout.start_listener', 30)):
gevent.wait([x.get_ready_event() for x in self.listeners])
except Timeout:
# remove failed endpoints before reporting failure above
for listener, proc in self._listener_map.iteritems():
if proc.proc.dead:
log.info("removed dead listener: %s", listener)
self.listeners.remove(listener)
self.thread_manager.children.remove(proc)
raise IonProcessError("start_listeners did not complete in expected time")
finally:
self.thread_manager._failure_notify_callback = self._child_failed
def _notify_stop(self):
"""
Called when the process is about to be shut down.
Instructs all listeners to close, puts a StopIteration into the synchronized queue,
and waits for the listeners to close and for the control queue to exit.
"""
for listener in self.listeners:
try:
listener.close()
except Exception as ex:
tb = traceback.format_exc()
log.warn("Could not close listener, attempting to ignore: %s\nTraceback:\n%s", ex, tb)
self._ctrl_queue.put(StopIteration)
# wait_children will join them and then get() them, which may raise an exception if any of them
# died with an exception.
self.thread_manager.wait_children(30)
PyonThread._notify_stop(self)
# run the cleanup method if we have one
if self._cleanup_method is not None:
try:
self._cleanup_method(self)
except Exception as ex:
log.warn("Cleanup method error, attempting to ignore: %s\nTraceback: %s", ex, traceback.format_exc())
def get_ready_event(self):
"""
Returns an Event that is set when the control greenlet is up and running.
"""
return self._ready_control
class IonProcessThreadManager(PyonThreadManager):
def _create_thread(self, target=None, **kwargs):
return IonProcessThread(target=target, heartbeat_secs=self.heartbeat_secs, **kwargs)
# ---------------------------------------------------------------------------------------------------
# Process type variants
class StandaloneProcess(BaseService):
"""
A process is an ION process of type "standalone" that has an incoming messaging
attachment for the process and operations as defined in a service YML.
"""
process_type = "standalone"
class SimpleProcess(BaseService):
"""
A simple process is an ION process of type "simple" that has no incoming messaging
attachment.
"""
process_type = "simple"
class ImmediateProcess(BaseService):
"""
An immediate process is an ION process of type "immediate" that does its action in
the on_init and on_start hooks, and that it terminated immediately after completion.
Has no messaging attachment.
"""
process_type = "immediate"
class StreamProcess(BaseService):
"""
Base class for a stream process.
Such a process handles a sequence of otherwise unconstrained messages, resulting from a
subscription. There are no operations.
"""
process_type = "stream_process"
def call_process(self, message, stream_route, stream_id):
"""
Handles pre-processing of packet and process work
"""
self.process(message)
def process(self, message):
"""
Process a message as arriving based on a subscription.
"""
pass
# ---------------------------------------------------------------------------------------------------
# Process helpers
def get_ion_actor_id(process):
"""Given an ION process, return the ion-actor-id from the context, if set and present"""
ion_actor_id = None
if process:
ctx = process.get_context()
ion_actor_id = ctx.get(MSG_HEADER_ACTOR, None) if ctx else None
return ion_actor_id
def set_process_stats_callback(stats_cb):
""" Sets a callback function (hook) to push stats after a process operation call. """
global stats_callback
if stats_cb is None:
pass
elif stats_callback:
log.warn("Stats callback already defined")
stats_callback = stats_cb<|fim▁end|> | """ |
<|file_name|>owcreateclass.py<|end_file_name|><|fim▁begin|>"""Widget for creating classes from non-numeric attribute by substrings"""
import re
from itertools import count
import numpy as np
from AnyQt.QtWidgets import QGridLayout, QLabel, QLineEdit, QSizePolicy
from AnyQt.QtCore import QSize, Qt
from Orange.data import StringVariable, DiscreteVariable, Domain
from Orange.data.table import Table
from Orange.statistics.util import bincount
from Orange.preprocess.transformation import Transformation, Lookup
from Orange.widgets import gui, widget
from Orange.widgets.settings import DomainContextHandler, ContextSetting
from Orange.widgets.utils.itemmodels import DomainModel
from Orange.widgets.widget import Msg
def map_by_substring(a, patterns, case_sensitive, match_beginning):
"""
Map values in a using a list of patterns. The patterns are considered in
order of appearance.
Args:
a (np.array): input array of `dtype` `str`
patterns (list of str): list of stirngs
case_sensitive (bool): case sensitive match
match_beginning (bool): match only at the beginning of the string
Returns:
np.array of floats representing indices of matched patterns
"""
res = np.full(len(a), np.nan)
if not case_sensitive:
a = np.char.lower(a)
patterns = (pattern.lower() for pattern in patterns)
for val_idx, pattern in reversed(list(enumerate(patterns))):
indices = np.char.find(a, pattern)
matches = indices == 0 if match_beginning else indices != -1
res[matches] = val_idx
return res
class ValueFromStringSubstring(Transformation):
"""
Transformation that computes a discrete variable from a string variable by
pattern matching.
Given patterns `["abc", "a", "bc", ""]`, string data
`["abcd", "aa", "bcd", "rabc", "x"]` is transformed to values of the new
attribute with indices`[0, 1, 2, 0, 3]`.
Args:
variable (:obj:`~Orange.data.StringVariable`): the original variable
patterns (list of str): list of string patterns
case_sensitive (bool, optional): if set to `True`, the match is case
sensitive
match_beginning (bool, optional): if set to `True`, the pattern must
appear at the beginning of the string
"""
def __init__(self, variable, patterns,
case_sensitive=False, match_beginning=False):
super().__init__(variable)
self.patterns = patterns
self.case_sensitive = case_sensitive
self.match_beginning = match_beginning
def transform(self, c):
"""
Transform the given data.
Args:
c (np.array): an array of type that can be cast to dtype `str`
Returns:
np.array of floats representing indices of matched patterns
"""
nans = np.equal(c, None)
c = c.astype(str)
c[nans] = ""
res = map_by_substring(
c, self.patterns, self.case_sensitive, self.match_beginning)
res[nans] = np.nan
return res
<|fim▁hole|>
class ValueFromDiscreteSubstring(Lookup):
"""
Transformation that computes a discrete variable from discrete variable by
pattern matching.
Say that the original attribute has values
`["abcd", "aa", "bcd", "rabc", "x"]`. Given patterns
`["abc", "a", "bc", ""]`, the values are mapped to the values of the new
attribute with indices`[0, 1, 2, 0, 3]`.
Args:
variable (:obj:`~Orange.data.DiscreteVariable`): the original variable
patterns (list of str): list of string patterns
case_sensitive (bool, optional): if set to `True`, the match is case
sensitive
match_beginning (bool, optional): if set to `True`, the pattern must
appear at the beginning of the string
"""
def __init__(self, variable, patterns,
case_sensitive=False, match_beginning=False):
super().__init__(variable, [])
self.case_sensitive = case_sensitive
self.match_beginning = match_beginning
self.patterns = patterns # Finally triggers computation of the lookup
def __setattr__(self, key, value):
"""__setattr__ is overloaded to recompute the lookup table when the
patterns, the original attribute or the flags change."""
super().__setattr__(key, value)
if hasattr(self, "patterns") and \
key in ("case_sensitive", "match_beginning", "patterns",
"variable"):
self.lookup_table = map_by_substring(
self.variable.values, self.patterns,
self.case_sensitive, self.match_beginning)
class OWCreateClass(widget.OWWidget):
name = "Create Class"
description = "Create class attribute from a string attribute"
icon = "icons/CreateClass.svg"
category = "Data"
keywords = ["data"]
inputs = [("Data", Table, "set_data")]
outputs = [("Data", Table)]
want_main_area = False
settingsHandler = DomainContextHandler()
attribute = ContextSetting(None)
class_name = ContextSetting("class")
rules = ContextSetting({})
match_beginning = ContextSetting(False)
case_sensitive = ContextSetting(False)
TRANSFORMERS = {StringVariable: ValueFromStringSubstring,
DiscreteVariable: ValueFromDiscreteSubstring}
class Warning(widget.OWWidget.Warning):
no_nonnumeric_vars = Msg("Data contains only numeric variables.")
def __init__(self):
super().__init__()
self.data = None
# The following lists are of the same length as self.active_rules
#: list of pairs with counts of matches for each patter when the
# patterns are applied in order and when applied on the entire set,
# disregarding the preceding patterns
self.match_counts = []
#: list of list of QLineEdit: line edit pairs for each pattern
self.line_edits = []
#: list of QPushButton: list of remove buttons
self.remove_buttons = []
#: list of list of QLabel: pairs of labels with counts
self.counts = []
combo = gui.comboBox(
self.controlArea, self, "attribute", label="From column: ",
box=True, orientation=Qt.Horizontal, callback=self.update_rules,
model=DomainModel(valid_types=(StringVariable, DiscreteVariable)))
# Don't use setSizePolicy keyword argument here: it applies to box,
# not the combo
combo.setSizePolicy(QSizePolicy.MinimumExpanding, QSizePolicy.Fixed)
patternbox = gui.vBox(self.controlArea, box=True)
#: QWidget: the box that contains the remove buttons, line edits and
# count labels. The lines are added and removed dynamically.
self.rules_box = rules_box = QGridLayout()
patternbox.layout().addLayout(self.rules_box)
box = gui.hBox(patternbox)
gui.button(
box, self, "+", callback=self.add_row, autoDefault=False, flat=True,
minimumSize=(QSize(20, 20)))
gui.rubber(box)
self.rules_box.setColumnMinimumWidth(1, 70)
self.rules_box.setColumnMinimumWidth(0, 10)
self.rules_box.setColumnStretch(0, 1)
self.rules_box.setColumnStretch(1, 1)
self.rules_box.setColumnStretch(2, 100)
rules_box.addWidget(QLabel("Name"), 0, 1)
rules_box.addWidget(QLabel("Substring"), 0, 2)
rules_box.addWidget(QLabel("#Instances"), 0, 3, 1, 2)
self.update_rules()
gui.lineEdit(
self.controlArea, self, "class_name",
label="Name for the new class:",
box=True, orientation=Qt.Horizontal)
optionsbox = gui.vBox(self.controlArea, box=True)
gui.checkBox(
optionsbox, self, "match_beginning", "Match only at the beginning",
callback=self.options_changed)
gui.checkBox(
optionsbox, self, "case_sensitive", "Case sensitive",
callback=self.options_changed)
layout = QGridLayout()
gui.widgetBox(self.controlArea, orientation=layout)
for i in range(3):
layout.setColumnStretch(i, 1)
layout.addWidget(self.report_button, 0, 0)
apply = gui.button(None, self, "Apply", autoDefault=False,
callback=self.apply)
layout.addWidget(apply, 0, 2)
# TODO: Resizing upon changing the number of rules does not work
self.setSizePolicy(QSizePolicy.Preferred, QSizePolicy.Maximum)
@property
def active_rules(self):
"""
Returns the class names and patterns corresponding to the currently
selected attribute. If the attribute is not yet in the dictionary,
set the default.
"""
return self.rules.setdefault(self.attribute and self.attribute.name,
[["", ""], ["", ""]])
def rules_to_edits(self):
"""Fill the line edites with the rules from the current settings."""
for editr, textr in zip(self.line_edits, self.active_rules):
for edit, text in zip(editr, textr):
edit.setText(text)
def set_data(self, data):
"""Input data signal handler."""
self.closeContext()
self.rules = {}
self.data = data
model = self.controls.attribute.model()
model.set_domain(data and data.domain)
self.Warning.no_nonnumeric_vars(shown=data is not None and not model)
if not model:
self.attribute = None
self.send("Data", None)
return
self.attribute = model[0]
self.openContext(data)
self.update_rules()
self.apply()
def update_rules(self):
"""Called when the rules are changed: adjust the number of lines in
the form and fill them, update the counts. The widget does not have
auto-apply."""
self.adjust_n_rule_rows()
self.rules_to_edits()
self.update_counts()
# TODO: Indicator that changes need to be applied
def options_changed(self):
self.update_counts()
def adjust_n_rule_rows(self):
"""Add or remove lines if needed and fix the tab order."""
def _add_line():
self.line_edits.append([])
n_lines = len(self.line_edits)
for coli in range(1, 3):
edit = QLineEdit()
self.line_edits[-1].append(edit)
self.rules_box.addWidget(edit, n_lines, coli)
edit.textChanged.connect(self.sync_edit)
button = gui.button(
None, self, label='×', flat=True, height=20,
styleSheet='* {font-size: 16pt; color: silver}'
'*:hover {color: black}',
autoDefault=False, callback=self.remove_row)
button.setMinimumSize(QSize(12, 20))
self.remove_buttons.append(button)
self.rules_box.addWidget(button, n_lines, 0)
self.counts.append([])
for coli, kwargs in enumerate(
(dict(alignment=Qt.AlignRight),
dict(alignment=Qt.AlignLeft, styleSheet="color: gray"))):
label = QLabel(**kwargs)
self.counts[-1].append(label)
self.rules_box.addWidget(label, n_lines, 3 + coli)
def _remove_line():
for edit in self.line_edits.pop():
edit.deleteLater()
self.remove_buttons.pop().deleteLater()
for label in self.counts.pop():
label.deleteLater()
def _fix_tab_order():
prev = None
for row, rule in zip(self.line_edits, self.active_rules):
for col_idx, edit in enumerate(row):
edit.row, edit.col_idx = rule, col_idx
if prev is not None:
self.setTabOrder(prev, edit)
prev = edit
n = len(self.active_rules)
while n > len(self.line_edits):
_add_line()
while len(self.line_edits) > n:
_remove_line()
_fix_tab_order()
def add_row(self):
"""Append a new row at the end."""
self.active_rules.append(["", ""])
self.adjust_n_rule_rows()
self.update_counts()
def remove_row(self):
"""Remove a row."""
remove_idx = self.remove_buttons.index(self.sender())
del self.active_rules[remove_idx]
self.update_rules()
self.update_counts()
def sync_edit(self, text):
"""Handle changes in line edits: update the active rules and counts"""
edit = self.sender()
edit.row[edit.col_idx] = text
self.update_counts()
def class_labels(self):
"""Construct a list of class labels. Empty labels are replaced with
C1, C2, C3. If C<n> already appears in the list of values given by
the user, the labels start at C<n+1> instead.
"""
largest_c = max((int(label[1:]) for label, _ in self.active_rules
if re.match("^C\\d+", label)),
default=0)
class_count = count(largest_c + 1)
return [label_edit.text() or "C{}".format(next(class_count))
for label_edit, _ in self.line_edits]
def update_counts(self):
"""Recompute and update the counts of matches."""
def _matcher(strings, pattern):
"""Return indices of strings into patterns; consider case
sensitivity and matching at the beginning. The given strings are
assumed to be in lower case if match is case insensitive. Patterns
are fixed on the fly."""
if not self.case_sensitive:
pattern = pattern.lower()
indices = np.char.find(strings, pattern.strip())
return indices == 0 if self.match_beginning else indices != -1
def _lower_if_needed(strings):
return strings if self.case_sensitive else np.char.lower(strings)
def _string_counts():
"""
Generate pairs of arrays for each rule until running out of data
instances. np.sum over the two arrays in each pair gives the
number of matches of the remaining instances (considering the
order of patterns) and of the original data.
For _string_counts, the arrays contain bool masks referring to the
original data
"""
nonlocal data
data = data.astype(str)
data = data[~np.char.equal(data, "")]
data = _lower_if_needed(data)
remaining = np.array(data)
for _, pattern in self.active_rules:
matching = _matcher(remaining, pattern)
total_matching = _matcher(data, pattern)
yield matching, total_matching
remaining = remaining[~matching]
if len(remaining) == 0:
break
def _discrete_counts():
"""
Generate pairs similar to _string_counts, except that the arrays
contain bin counts for the attribute's values matching the pattern.
"""
attr_vals = np.array(attr.values)
attr_vals = _lower_if_needed(attr_vals)
bins = bincount(data, max_val=len(attr.values) - 1)[0]
remaining = np.array(bins)
for _, pattern in self.active_rules:
matching = _matcher(attr_vals, pattern)
yield remaining[matching], bins[matching]
remaining[matching] = 0
if not np.any(remaining):
break
def _clear_labels():
"""Clear all labels"""
for lab_matched, lab_total in self.counts:
lab_matched.setText("")
lab_total.setText("")
def _set_labels():
"""Set the labels to show the counts"""
for (n_matched, n_total), (lab_matched, lab_total), (lab, patt) in \
zip(self.match_counts, self.counts, self.active_rules):
n_before = n_total - n_matched
lab_matched.setText("{}".format(n_matched))
if n_before and (lab or patt):
lab_total.setText("+ {}".format(n_before))
if n_matched:
tip = "{} of the {} matching instances are already " \
"covered above".format(n_before, n_total)
else:
tip = "All matching instances are already covered above"
lab_total.setToolTip(tip)
lab_matched.setToolTip(tip)
def _set_placeholders():
"""Set placeholders for empty edit lines"""
matches = [n for n, _ in self.match_counts] + \
[0] * len(self.line_edits)
for n_matched, (_, patt) in zip(matches, self.line_edits):
if not patt.text():
patt.setPlaceholderText(
"(remaining instances)" if n_matched else "(unused)")
labels = self.class_labels()
for label, (lab_edit, _) in zip(labels, self.line_edits):
if not lab_edit.text():
lab_edit.setPlaceholderText(label)
_clear_labels()
attr = self.attribute
if attr is None:
return
counters = {StringVariable: _string_counts,
DiscreteVariable: _discrete_counts}
data = self.data.get_column_view(attr)[0]
self.match_counts = [[int(np.sum(x)) for x in matches]
for matches in counters[type(attr)]()]
_set_labels()
_set_placeholders()
def apply(self):
"""Output the transformed data."""
if not self.attribute:
self.send("Data", None)
return
domain = self.data.domain
rules = self.active_rules
# Transposition + stripping
valid_rules = [label or pattern or n_matches
for (label, pattern), n_matches in
zip(rules, self.match_counts)]
patterns = [pattern
for (_, pattern), valid in zip(rules, valid_rules)
if valid]
names = [name for name, valid in zip(self.class_labels(), valid_rules)
if valid]
transformer = self.TRANSFORMERS[type(self.attribute)]
compute_value = transformer(
self.attribute, patterns, self.case_sensitive, self.match_beginning)
new_class = DiscreteVariable(
self.class_name, names, compute_value=compute_value)
new_domain = Domain(
domain.attributes, new_class, domain.metas + domain.class_vars)
new_data = Table(new_domain, self.data)
self.send("Data", new_data)
def send_report(self):
def _cond_part():
rule = "<b>{}</b> ".format(class_name)
if patt:
rule += "if <b>{}</b> contains <b>{}</b>".format(
self.attribute.name, patt)
else:
rule += "otherwise"
return rule
def _count_part():
if not n_matched:
return "all {} matching instances are already covered " \
"above".format(n_total)
elif n_matched < n_total and patt:
return "{} matching instances (+ {} that are already " \
"covered above".format(n_matched, n_total - n_matched)
else:
return "{} matching instances".format(n_matched)
if not self.attribute:
return
self.report_items("Input", [("Source attribute", self.attribute.name)])
output = ""
names = self.class_labels()
for (n_matched, n_total), class_name, (lab, patt) in \
zip(self.match_counts, names, self.active_rules):
if lab or patt or n_total:
output += "<li>{}; {}</li>".format(_cond_part(), _count_part())
if output:
self.report_items("Output", [("Class name", self.class_name)])
self.report_raw("<ol>{}</ol>".format(output))
def main(): # pragma: no cover
"""Simple test for manual inspection of the widget"""
import sys
from AnyQt.QtWidgets import QApplication
a = QApplication(sys.argv)
table = Table("zoo")
ow = OWCreateClass()
ow.show()
ow.set_data(table)
a.exec()
ow.saveSettings()
if __name__ == "__main__": # pragma: no cover
main()<|fim▁end|> | |
<|file_name|>useragent_test.go<|end_file_name|><|fim▁begin|>// Copyright (C) 2017 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.<|fim▁hole|>
import (
"testing"
"github.com/google/gapid/core/assert"
"github.com/google/gapid/core/log"
"github.com/google/gapid/core/os/device"
)
var (
win7 = &device.Configuration{
OS: &device.OS{
Kind: device.Windows,
MajorVersion: 6, MinorVersion: 1, PointVersion: 5,
},
}
win10 = &device.Configuration{
OS: &device.OS{
Kind: device.Windows,
MajorVersion: 10, MinorVersion: 0, PointVersion: 5,
},
}
macOS = &device.Configuration{
OS: &device.OS{
Kind: device.OSX,
MajorVersion: 10, MinorVersion: 12, PointVersion: 6,
},
}
linux = &device.Configuration{
OS: &device.OS{
Kind: device.Linux,
MajorVersion: 1, MinorVersion: 2, PointVersion: 3,
},
}
)
func TestUseragent(t *testing.T) {
ctx := log.Testing(t)
version := ApplicationInfo{"GAPID", 1, 2, 3}
for _, test := range []struct {
name string
cfg *device.Configuration
expected string
}{
{"win7", win7, "GAPID/1.2.3 (Windows NT 6.1)"},
{"win10", win10, "GAPID/1.2.3 (Windows NT 10.0)"},
{"macOS", macOS, "GAPID/1.2.3 (Macintosh; Intel Mac OS X 10_12_6)"},
{"linux", linux, "GAPID/1.2.3 (Linux)"},
} {
assert.For(ctx, test.name).ThatString(UserAgent(test.cfg, version)).Equals(test.expected)
}
}<|fim▁end|> | // See the License for the specific language governing permissions and
// limitations under the License.
package net |
<|file_name|>conftest.py<|end_file_name|><|fim▁begin|># encoding: utf-8
# pylint: disable=redefined-outer-name,missing-docstring
import pytest
from tests import utils
from app import create_app
@pytest.yield_fixture(scope='session')
def flask_app():
app = create_app(flask_config_name='testing')
from app.extensions import db
with app.app_context():
db.create_all()
yield app
db.drop_all()
@pytest.yield_fixture()
def db(flask_app):
# pylint: disable=unused-argument,invalid-name
from app.extensions import db as db_instance
yield db_instance
db_instance.session.rollback()
@pytest.fixture(scope='session')
def flask_app_client(flask_app):
flask_app.test_client_class = utils.AutoAuthFlaskClient
flask_app.response_class = utils.JSONResponse
return flask_app.test_client()
@pytest.yield_fixture(scope='session')
def regular_user(flask_app):
# pylint: disable=invalid-name,unused-argument
from app.extensions import db
regular_user_instance = utils.generate_user_instance(
username='regular_user'
)
db.session.add(regular_user_instance)
db.session.commit()
yield regular_user_instance
db.session.delete(regular_user_instance)
db.session.commit()
@pytest.yield_fixture(scope='session')
def readonly_user(flask_app):
# pylint: disable=invalid-name,unused-argument
from app.extensions import db
readonly_user_instance = utils.generate_user_instance(<|fim▁hole|>
db.session.add(readonly_user_instance)
db.session.commit()
yield readonly_user_instance
db.session.delete(readonly_user_instance)
db.session.commit()
@pytest.yield_fixture(scope='session')
def admin_user(flask_app):
# pylint: disable=invalid-name,unused-argument
from app.extensions import db
admin_user_instance = utils.generate_user_instance(
username='admin_user',
is_admin=True
)
db.session.add(admin_user_instance)
db.session.commit()
yield admin_user_instance
db.session.delete(admin_user_instance)
db.session.commit()
@pytest.yield_fixture(scope='session')
def internal_user(flask_app):
# pylint: disable=invalid-name,unused-argument
from app.extensions import db
internal_user_instance = utils.generate_user_instance(
username='internal_user',
is_regular_user=False,
is_admin=False,
is_active=True,
is_internal=True
)
db.session.add(internal_user_instance)
db.session.commit()
yield internal_user_instance
db.session.delete(internal_user_instance)
db.session.commit()<|fim▁end|> | username='readonly_user',
is_regular_user=False
) |
<|file_name|>macros.rs<|end_file_name|><|fim▁begin|>macro_rules! consume_spaces {
($lexer:expr) => (
loop {
match $lexer.peek_next() {
Some(lexer::Token::Space) => (),
_ => break,
}
// Consume the actual space.
$lexer.next();
}
)
}
macro_rules! get_lex_token {
($lexer:expr) => (
{
let token = $lexer.next();
if let None = token {
return Err(ParsingError::new("Unexpected end of the data."));
}
token.unwrap().consume()
}
)
}
macro_rules! consume_lex_token {
($lexer:expr, $val:pat) => (
{
let (token, pos) = get_lex_token!($lexer);
match token {
$val => (),
v => {
let msg = format!("Expected: {:?}, found: {:?}.", stringify!($val), v);
return Err(
ParsingError::new(&msg).position(pos),
);
}
}
}<|fim▁hole|><|fim▁end|> | )
} |
<|file_name|>h1.rs<|end_file_name|><|fim▁begin|>//! Adapts the HTTP/1.1 implementation into the `HttpMessage` API.
use std::borrow::Cow;
use std::cmp::min;
use std::fmt;
use std::io::{self, Write, BufWriter, BufRead, Read};
use std::net::Shutdown;
#[cfg(feature = "timeouts")]
use std::time::Duration;
use httparse;
use buffer::BufReader;
use Error;
use header::{Headers, ContentLength, TransferEncoding};
use header::Encoding::Chunked;
use method::{Method};
use net::{NetworkConnector, NetworkStream};
use status::StatusCode;
use version::HttpVersion;
use version::HttpVersion::{Http10, Http11};
use uri::RequestUri;
use self::HttpReader::{SizedReader, ChunkedReader, EofReader, EmptyReader};
use self::HttpWriter::{ChunkedWriter, SizedWriter, EmptyWriter, ThroughWriter};
use http::{
RawStatus,
Protocol,
HttpMessage,
RequestHead,
ResponseHead,
};
use header;
use version;
/// An implementation of the `HttpMessage` trait for HTTP/1.1.
#[derive(Debug)]
pub struct Http11Message {
method: Option<Method>,
stream: Option<Box<NetworkStream + Send>>,
writer: Option<HttpWriter<BufWriter<Box<NetworkStream + Send>>>>,
reader: Option<HttpReader<BufReader<Box<NetworkStream + Send>>>>,
}
impl Write for Http11Message {
#[inline]
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
match self.writer {
None => Err(io::Error::new(io::ErrorKind::Other,
"Not in a writable state")),
Some(ref mut writer) => writer.write(buf),
}
}
#[inline]
fn flush(&mut self) -> io::Result<()> {
match self.writer {
None => Err(io::Error::new(io::ErrorKind::Other,
"Not in a writable state")),
Some(ref mut writer) => writer.flush(),
}
}
}
impl Read for Http11Message {
#[inline]
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
match self.reader {
None => Err(io::Error::new(io::ErrorKind::Other,
"Not in a readable state")),
Some(ref mut reader) => reader.read(buf),
}
}
}
impl HttpMessage for Http11Message {
fn set_outgoing(&mut self, mut head: RequestHead) -> ::Result<RequestHead> {
let stream = match self.stream.take() {
Some(stream) => stream,
None => {
return Err(From::from(io::Error::new(
io::ErrorKind::Other,
"Message not idle, cannot start new outgoing")));
}
};
let mut stream = BufWriter::new(stream);
let mut uri = head.url.serialize_path().unwrap();
if let Some(ref q) = head.url.query {
uri.push('?');
uri.push_str(&q[..]);
}
let version = version::HttpVersion::Http11;
debug!("request line: {:?} {:?} {:?}", head.method, uri, version);
try!(write!(&mut stream, "{} {} {}{}",
head.method, uri, version, LINE_ENDING));
let stream = {
let mut write_headers = |mut stream: BufWriter<Box<NetworkStream + Send>>, head: &RequestHead| {
debug!("headers={:?}", head.headers);
match write!(&mut stream, "{}{}", head.headers, LINE_ENDING) {
Ok(_) => Ok(stream),
Err(e) => {
self.stream = Some(stream.into_inner().unwrap());
Err(e)
}
}
};
match &head.method {
&Method::Get | &Method::Head => {
EmptyWriter(try!(write_headers(stream, &head)))
},
_ => {
let mut chunked = true;
let mut len = 0;
match head.headers.get::<header::ContentLength>() {
Some(cl) => {
chunked = false;
len = **cl;
},
None => ()
};
// can't do in match above, thanks borrowck
if chunked {
let encodings = match head.headers.get_mut::<header::TransferEncoding>() {
Some(encodings) => {
//TODO: check if chunked is already in encodings. use HashSet?
encodings.push(header::Encoding::Chunked);
false
},
None => true
};
if encodings {
head.headers.set(
header::TransferEncoding(vec![header::Encoding::Chunked]))
}
}
let stream = try!(write_headers(stream, &head));
if chunked {
ChunkedWriter(stream)
} else {
SizedWriter(stream, len)
}
}
}
};
self.writer = Some(stream);
self.method = Some(head.method.clone());
Ok(head)
}
fn get_incoming(&mut self) -> ::Result<ResponseHead> {
try!(self.flush_outgoing());
let stream = match self.stream.take() {
Some(stream) => stream,
None => {
// The message was already in the reading state...
// TODO Decide what happens in case we try to get a new incoming at that point
return Err(From::from(
io::Error::new(io::ErrorKind::Other,
"Read already in progress")));
}
};
let mut stream = BufReader::new(stream);
let head = match parse_response(&mut stream) {
Ok(head) => head,
Err(e) => {
self.stream = Some(stream.into_inner());
return Err(e);
}
};
let raw_status = head.subject;
let headers = head.headers;
let method = self.method.take().unwrap_or(Method::Get);
// According to https://tools.ietf.org/html/rfc7230#section-3.3.3
// 1. HEAD reponses, and Status 1xx, 204, and 304 cannot have a body.
// 2. Status 2xx to a CONNECT cannot have a body.
// 3. Transfer-Encoding: chunked has a chunked body.
// 4. If multiple differing Content-Length headers or invalid, close connection.
// 5. Content-Length header has a sized body.
// 6. Not Client.
// 7. Read till EOF.
self.reader = Some(match (method, raw_status.0) {
(Method::Head, _) => EmptyReader(stream),
(_, 100...199) | (_, 204) | (_, 304) => EmptyReader(stream),
(Method::Connect, 200...299) => EmptyReader(stream),
_ => {
if let Some(&TransferEncoding(ref codings)) = headers.get() {
if codings.last() == Some(&Chunked) {
ChunkedReader(stream, None)
} else {
trace!("not chuncked. read till eof");
EofReader(stream)
}
} else if let Some(&ContentLength(len)) = headers.get() {
SizedReader(stream, len)
} else if headers.has::<ContentLength>() {
trace!("illegal Content-Length: {:?}", headers.get_raw("Content-Length"));
return Err(Error::Header);
} else {
trace!("neither Transfer-Encoding nor Content-Length");
EofReader(stream)
}
}
});
trace!("Http11Message.reader = {:?}", self.reader);
Ok(ResponseHead {
headers: headers,
raw_status: raw_status,
version: head.version,
})
}
fn has_body(&self) -> bool {
match self.reader {
Some(EmptyReader(..)) => false,
_ => true
}
}
#[cfg(feature = "timeouts")]
#[inline]
fn set_read_timeout(&self, dur: Option<Duration>) -> io::Result<()> {
self.get_ref().set_read_timeout(dur)
}
#[cfg(feature = "timeouts")]
#[inline]
fn set_write_timeout(&self, dur: Option<Duration>) -> io::Result<()> {
self.get_ref().set_write_timeout(dur)
}
#[inline]
fn close_connection(&mut self) -> ::Result<()> {
try!(self.get_mut().close(Shutdown::Both));
Ok(())
}
}
impl Http11Message {
/// Consumes the `Http11Message` and returns the underlying `NetworkStream`.
pub fn into_inner(mut self) -> Box<NetworkStream + Send> {
if self.stream.is_some() {
self.stream.take().unwrap()
} else if self.writer.is_some() {
self.writer.take().unwrap().into_inner().into_inner().unwrap()
} else if self.reader.is_some() {
self.reader.take().unwrap().into_inner().into_inner()
} else {
panic!("Http11Message lost its underlying stream somehow");
}
}
/// Gets a mutable reference to the underlying `NetworkStream`, regardless of the state of the
/// `Http11Message`.
pub fn get_ref(&self) -> &(NetworkStream + Send) {
if self.stream.is_some() {
&**self.stream.as_ref().unwrap()
} else if self.writer.is_some() {
&**self.writer.as_ref().unwrap().get_ref().get_ref()
} else if self.reader.is_some() {
&**self.reader.as_ref().unwrap().get_ref().get_ref()
} else {
panic!("Http11Message lost its underlying stream somehow");
}
}
/// Gets a mutable reference to the underlying `NetworkStream`, regardless of the state of the
/// `Http11Message`.
pub fn get_mut(&mut self) -> &mut (NetworkStream + Send) {
if self.stream.is_some() {
&mut **self.stream.as_mut().unwrap()
} else if self.writer.is_some() {
&mut **self.writer.as_mut().unwrap().get_mut().get_mut()
} else if self.reader.is_some() {
&mut **self.reader.as_mut().unwrap().get_mut().get_mut()
} else {
panic!("Http11Message lost its underlying stream somehow");
}
}
/// Creates a new `Http11Message` that will use the given `NetworkStream` for communicating to
/// the peer.
pub fn with_stream(stream: Box<NetworkStream + Send>) -> Http11Message {
Http11Message {
method: None,
stream: Some(stream),
writer: None,
reader: None,
}
}
/// Flushes the current outgoing content and moves the stream into the `stream` property.
///
/// TODO It might be sensible to lift this up to the `HttpMessage` trait itself...
pub fn flush_outgoing(&mut self) -> ::Result<()> {
match self.writer {
None => return Ok(()),
Some(_) => {},
};
let writer = self.writer.take().unwrap();
// end() already flushes
let raw = match writer.end() {
Ok(buf) => buf.into_inner().unwrap(),
Err(e) => {
self.writer = Some(e.1);
return Err(From::from(e.0));
}
};
self.stream = Some(raw);
Ok(())
}
}
/// The `Protocol` implementation provides HTTP/1.1 messages.
pub struct Http11Protocol {
connector: Connector,
}
impl Protocol for Http11Protocol {
fn new_message(&self, host: &str, port: u16, scheme: &str) -> ::Result<Box<HttpMessage>> {
let stream = try!(self.connector.connect(host, port, scheme)).into();
Ok(Box::new(Http11Message::with_stream(stream)))
}
}
impl Http11Protocol {
/// Creates a new `Http11Protocol` instance that will use the given `NetworkConnector` for
/// establishing HTTP connections.
pub fn with_connector<C, S>(c: C) -> Http11Protocol
where C: NetworkConnector<Stream=S> + Send + Sync + 'static,
S: NetworkStream + Send {
Http11Protocol {
connector: Connector(Box::new(ConnAdapter(c))),
}
}
}
struct ConnAdapter<C: NetworkConnector + Send + Sync>(C);
impl<C: NetworkConnector<Stream=S> + Send + Sync, S: NetworkStream + Send>
NetworkConnector for ConnAdapter<C> {
type Stream = Box<NetworkStream + Send>;
#[inline]
fn connect(&self, host: &str, port: u16, scheme: &str)
-> ::Result<Box<NetworkStream + Send>> {
Ok(try!(self.0.connect(host, port, scheme)).into())
}
}
struct Connector(Box<NetworkConnector<Stream=Box<NetworkStream + Send>> + Send + Sync>);
impl NetworkConnector for Connector {
type Stream = Box<NetworkStream + Send>;
#[inline]
fn connect(&self, host: &str, port: u16, scheme: &str)
-> ::Result<Box<NetworkStream + Send>> {
Ok(try!(self.0.connect(host, port, scheme)).into())
}
}
/// Readers to handle different Transfer-Encodings.
///
/// If a message body does not include a Transfer-Encoding, it *should*
/// include a Content-Length header.
pub enum HttpReader<R> {
/// A Reader used when a Content-Length header is passed with a positive integer.
SizedReader(R, u64),
/// A Reader used when Transfer-Encoding is `chunked`.
ChunkedReader(R, Option<u64>),
/// A Reader used for responses that don't indicate a length or chunked.
///
/// Note: This should only used for `Response`s. It is illegal for a
/// `Request` to be made with both `Content-Length` and
/// `Transfer-Encoding: chunked` missing, as explained from the spec:
///
/// > If a Transfer-Encoding header field is present in a response and
/// > the chunked transfer coding is not the final encoding, the
/// > message body length is determined by reading the connection until
/// > it is closed by the server. If a Transfer-Encoding header field
/// > is present in a request and the chunked transfer coding is not
/// > the final encoding, the message body length cannot be determined
/// > reliably; the server MUST respond with the 400 (Bad Request)
/// > status code and then close the connection.
EofReader(R),
/// A Reader used for messages that should never have a body.
///
/// See https://tools.ietf.org/html/rfc7230#section-3.3.3
EmptyReader(R),
}
impl<R: Read> HttpReader<R> {
/// Unwraps this HttpReader and returns the underlying Reader.
pub fn into_inner(self) -> R {
match self {
SizedReader(r, _) => r,
ChunkedReader(r, _) => r,
EofReader(r) => r,
EmptyReader(r) => r,
}
}
/// Gets a borrowed reference to the underlying Reader.
pub fn get_ref(&self) -> &R {
match *self {
SizedReader(ref r, _) => r,
ChunkedReader(ref r, _) => r,
EofReader(ref r) => r,
EmptyReader(ref r) => r,
}
}
/// Gets a mutable reference to the underlying Reader.
pub fn get_mut(&mut self) -> &mut R {
match *self {
SizedReader(ref mut r, _) => r,
ChunkedReader(ref mut r, _) => r,
EofReader(ref mut r) => r,
EmptyReader(ref mut r) => r,
}
}
}
impl<R> fmt::Debug for HttpReader<R> {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
match *self {
SizedReader(_,rem) => write!(fmt, "SizedReader(remaining={:?})", rem),
ChunkedReader(_, None) => write!(fmt, "ChunkedReader(chunk_remaining=unknown)"),
ChunkedReader(_, Some(rem)) => write!(fmt, "ChunkedReader(chunk_remaining={:?})", rem),
EofReader(_) => write!(fmt, "EofReader"),
EmptyReader(_) => write!(fmt, "EmptyReader"),
}
}
}
impl<R: Read> Read for HttpReader<R> {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
match *self {
SizedReader(ref mut body, ref mut remaining) => {
trace!("Sized read, remaining={:?}", remaining);
if *remaining == 0 {
Ok(0)
} else {
let to_read = min(*remaining as usize, buf.len());
let num = try!(body.read(&mut buf[..to_read])) as u64;
trace!("Sized read: {}", num);
if num > *remaining {
*remaining = 0;
} else if num == 0 {
return Err(io::Error::new(io::ErrorKind::Other, "early eof"));
} else {
*remaining -= num;
}
Ok(num as usize)
}
},
ChunkedReader(ref mut body, ref mut opt_remaining) => {
let mut rem = match *opt_remaining {
Some(ref rem) => *rem,
// None means we don't know the size of the next chunk
None => try!(read_chunk_size(body))
};
trace!("Chunked read, remaining={:?}", rem);
if rem == 0 {
*opt_remaining = Some(0);
// chunk of size 0 signals the end of the chunked stream
// if the 0 digit was missing from the stream, it would
// be an InvalidInput error instead.
trace!("end of chunked");
return Ok(0)
}
let to_read = min(rem as usize, buf.len());
let count = try!(body.read(&mut buf[..to_read])) as u64;
if count == 0 {
*opt_remaining = Some(0);
return Err(io::Error::new(io::ErrorKind::Other, "early eof"));
}
rem -= count;
*opt_remaining = if rem > 0 {
Some(rem)
} else {
try!(eat(body, LINE_ENDING.as_bytes()));
None
};
Ok(count as usize)
},
EofReader(ref mut body) => {
let r = body.read(buf);
trace!("eofread: {:?}", r);
r
},
EmptyReader(_) => Ok(0)
}
}
}
fn eat<R: Read>(rdr: &mut R, bytes: &[u8]) -> io::Result<()> {
let mut buf = [0];
for &b in bytes.iter() {
match try!(rdr.read(&mut buf)) {
1 if buf[0] == b => (),
_ => return Err(io::Error::new(io::ErrorKind::InvalidInput,
"Invalid characters found")),
}
}
Ok(())
}
/// Chunked chunks start with 1*HEXDIGIT, indicating the size of the chunk.
fn read_chunk_size<R: Read>(rdr: &mut R) -> io::Result<u64> {
macro_rules! byte (
($rdr:ident) => ({
let mut buf = [0];
match try!($rdr.read(&mut buf)) {
1 => buf[0],
_ => return Err(io::Error::new(io::ErrorKind::InvalidInput,
"Invalid chunk size line")),
}
})
);
let mut size = 0u64;
let radix = 16;
let mut in_ext = false;
let mut in_chunk_size = true;
loop {
match byte!(rdr) {
b@b'0'...b'9' if in_chunk_size => {
size *= radix;
size += (b - b'0') as u64;
},
b@b'a'...b'f' if in_chunk_size => {
size *= radix;
size += (b + 10 - b'a') as u64;
},
b@b'A'...b'F' if in_chunk_size => {
size *= radix;
size += (b + 10 - b'A') as u64;
},
CR => {
match byte!(rdr) {
LF => break,
_ => return Err(io::Error::new(io::ErrorKind::InvalidInput,
"Invalid chunk size line"))
}
},
// If we weren't in the extension yet, the ";" signals its start
b';' if !in_ext => {
in_ext = true;
in_chunk_size = false;
},
// "Linear white space" is ignored between the chunk size and the
// extension separator token (";") due to the "implied *LWS rule".
b'\t' | b' ' if !in_ext & !in_chunk_size => {},
// LWS can follow the chunk size, but no more digits can come
b'\t' | b' ' if in_chunk_size => in_chunk_size = false,
// We allow any arbitrary octet once we are in the extension, since
// they all get ignored anyway. According to the HTTP spec, valid
// extensions would have a more strict syntax:
// (token ["=" (token | quoted-string)])
// but we gain nothing by rejecting an otherwise valid chunk size.
ext if in_ext => {
todo!("chunk extension byte={}", ext);
},
// Finally, if we aren't in the extension and we're reading any
// other octet, the chunk size line is invalid!
_ => {
return Err(io::Error::new(io::ErrorKind::InvalidInput,
"Invalid chunk size line"));
}
}
}
trace!("chunk size={:?}", size);
Ok(size)
}
/// Writers to handle different Transfer-Encodings.
pub enum HttpWriter<W: Write> {
/// A no-op Writer, used initially before Transfer-Encoding is determined.
ThroughWriter(W),
/// A Writer for when Transfer-Encoding includes `chunked`.
ChunkedWriter(W),
/// A Writer for when Content-Length is set.
///
/// Enforces that the body is not longer than the Content-Length header.
SizedWriter(W, u64),
/// A writer that should not write any body.
EmptyWriter(W),
}
impl<W: Write> HttpWriter<W> {
/// Unwraps the HttpWriter and returns the underlying Writer.
#[inline]
pub fn into_inner(self) -> W {
match self {
ThroughWriter(w) => w,
ChunkedWriter(w) => w,
SizedWriter(w, _) => w,
EmptyWriter(w) => w,
}
}
/// Access the inner Writer.
#[inline]
pub fn get_ref<'a>(&'a self) -> &'a W {
match *self {
ThroughWriter(ref w) => w,
ChunkedWriter(ref w) => w,
SizedWriter(ref w, _) => w,
EmptyWriter(ref w) => w,
}
}
/// Access the inner Writer mutably.
///
/// Warning: You should not write to this directly, as you can corrupt
/// the state.
#[inline]
pub fn get_mut<'a>(&'a mut self) -> &'a mut W {
match *self {
ThroughWriter(ref mut w) => w,
ChunkedWriter(ref mut w) => w,
SizedWriter(ref mut w, _) => w,
EmptyWriter(ref mut w) => w,
}
}
/// Ends the HttpWriter, and returns the underlying Writer.
///
/// A final `write_all()` is called with an empty message, and then flushed.
/// The ChunkedWriter variant will use this to write the 0-sized last-chunk.
#[inline]
pub fn end(mut self) -> Result<W, EndError<W>> {
fn inner<W: Write>(w: &mut W) -> io::Result<()> {
try!(w.write(&[]));
w.flush()
}
match inner(&mut self) {
Ok(..) => Ok(self.into_inner()),
Err(e) => Err(EndError(e, self))
}
}
}
#[derive(Debug)]
pub struct EndError<W: Write>(io::Error, HttpWriter<W>);
impl<W: Write> From<EndError<W>> for io::Error {
fn from(e: EndError<W>) -> io::Error {
e.0
}
}
impl<W: Write> Write for HttpWriter<W> {
#[inline]
fn write(&mut self, msg: &[u8]) -> io::Result<usize> {
match *self {
ThroughWriter(ref mut w) => w.write(msg),
ChunkedWriter(ref mut w) => {
let chunk_size = msg.len();
trace!("chunked write, size = {:?}", chunk_size);
try!(write!(w, "{:X}{}", chunk_size, LINE_ENDING));
try!(w.write_all(msg));
try!(w.write_all(LINE_ENDING.as_bytes()));
Ok(msg.len())
},
SizedWriter(ref mut w, ref mut remaining) => {
let len = msg.len() as u64;
if len > *remaining {
let len = *remaining;
*remaining = 0;
try!(w.write_all(&msg[..len as usize]));
Ok(len as usize)
} else {
*remaining -= len;
try!(w.write_all(msg));
Ok(len as usize)
}
},
EmptyWriter(..) => {
if !msg.is_empty() {
error!("Cannot include a body with this kind of message");
}
Ok(0)
}
}
}
#[inline]
fn flush(&mut self) -> io::Result<()> {
match *self {
ThroughWriter(ref mut w) => w.flush(),
ChunkedWriter(ref mut w) => w.flush(),
SizedWriter(ref mut w, _) => w.flush(),
EmptyWriter(ref mut w) => w.flush(),
}
}
}
impl<W: Write> fmt::Debug for HttpWriter<W> {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
match *self {
ThroughWriter(_) => write!(fmt, "ThroughWriter"),
ChunkedWriter(_) => write!(fmt, "ChunkedWriter"),
SizedWriter(_, rem) => write!(fmt, "SizedWriter(remaining={:?})", rem),
EmptyWriter(_) => write!(fmt, "EmptyWriter"),
}
}
}
const MAX_HEADERS: usize = 100;
/// Parses a request into an Incoming message head.
#[inline]
pub fn parse_request<R: Read>(buf: &mut BufReader<R>) -> ::Result<Incoming<(Method, RequestUri)>> {
parse::<R, httparse::Request, (Method, RequestUri)>(buf)
}
/// Parses a response into an Incoming message head.
#[inline]
pub fn parse_response<R: Read>(buf: &mut BufReader<R>) -> ::Result<Incoming<RawStatus>> {
parse::<R, httparse::Response, RawStatus>(buf)
}
fn parse<R: Read, T: TryParse<Subject=I>, I>(rdr: &mut BufReader<R>) -> ::Result<Incoming<I>> {
loop {
match try!(try_parse::<R, T, I>(rdr)) {
httparse::Status::Complete((inc, len)) => {
rdr.consume(len);
return Ok(inc);
},
_partial => ()
}
match try!(rdr.read_into_buf()) {
0 if rdr.get_buf().is_empty() => {
return Err(Error::Io(io::Error::new(
io::ErrorKind::ConnectionAborted,
"Connection closed"
)))
},
0 => return Err(Error::TooLarge),
_ => ()
}
}
}
fn try_parse<R: Read, T: TryParse<Subject=I>, I>(rdr: &mut BufReader<R>) -> TryParseResult<I> {
let mut headers = [httparse::EMPTY_HEADER; MAX_HEADERS];
let buf = rdr.get_buf();
if buf.len() == 0 {
return Ok(httparse::Status::Partial);
}
trace!("try_parse({:?})", buf);
<T as TryParse>::try_parse(&mut headers, buf)
}
#[doc(hidden)]
trait TryParse {
type Subject;
fn try_parse<'a>(headers: &'a mut [httparse::Header<'a>], buf: &'a [u8]) ->
TryParseResult<Self::Subject>;
}
type TryParseResult<T> = Result<httparse::Status<(Incoming<T>, usize)>, Error>;
impl<'a> TryParse for httparse::Request<'a, 'a> {
type Subject = (Method, RequestUri);
fn try_parse<'b>(headers: &'b mut [httparse::Header<'b>], buf: &'b [u8]) ->
TryParseResult<(Method, RequestUri)> {
trace!("Request.try_parse([Header; {}], [u8; {}])", headers.len(), buf.len());
let mut req = httparse::Request::new(headers);
Ok(match try!(req.parse(buf)) {
httparse::Status::Complete(len) => {
trace!("Request.try_parse Complete({})", len);
httparse::Status::Complete((Incoming {
version: if req.version.unwrap() == 1 { Http11 } else { Http10 },
subject: (
try!(req.method.unwrap().parse()),
try!(req.path.unwrap().parse())
),
headers: try!(Headers::from_raw(req.headers))
}, len))
},
httparse::Status::Partial => httparse::Status::Partial
})
}
}
impl<'a> TryParse for httparse::Response<'a, 'a> {
type Subject = RawStatus;
fn try_parse<'b>(headers: &'b mut [httparse::Header<'b>], buf: &'b [u8]) ->
TryParseResult<RawStatus> {
trace!("Response.try_parse([Header; {}], [u8; {}])", headers.len(), buf.len());
let mut res = httparse::Response::new(headers);
Ok(match try!(res.parse(buf)) {
httparse::Status::Complete(len) => {
trace!("Response.try_parse Complete({})", len);
let code = res.code.unwrap();
let reason = match StatusCode::from_u16(code).canonical_reason() {
Some(reason) if reason == res.reason.unwrap() => Cow::Borrowed(reason),
_ => Cow::Owned(res.reason.unwrap().to_owned())
};
httparse::Status::Complete((Incoming {
version: if res.version.unwrap() == 1 { Http11 } else { Http10 },
subject: RawStatus(code, reason),
headers: try!(Headers::from_raw(res.headers))
}, len))
},
httparse::Status::Partial => httparse::Status::Partial
})
}
}
/// An Incoming Message head. Includes request/status line, and headers.
#[derive(Debug)]
pub struct Incoming<S> {
/// HTTP version of the message.
pub version: HttpVersion,
/// Subject (request line or status line) of Incoming message.
pub subject: S,
/// Headers of the Incoming message.
pub headers: Headers
}
/// The `\r` byte.
pub const CR: u8 = b'\r';
/// The `\n` byte.
pub const LF: u8 = b'\n';
/// The bytes `\r\n`.
pub const LINE_ENDING: &'static str = "\r\n";
#[cfg(test)]
mod tests {
use std::error::Error;
use std::io::{self, Read, Write};
use buffer::BufReader;<|fim▁hole|> use super::{read_chunk_size, parse_request, parse_response};
#[test]
fn test_write_chunked() {
use std::str::from_utf8;
let mut w = super::HttpWriter::ChunkedWriter(Vec::new());
w.write_all(b"foo bar").unwrap();
w.write_all(b"baz quux herp").unwrap();
let buf = w.end().unwrap();
let s = from_utf8(buf.as_ref()).unwrap();
assert_eq!(s, "7\r\nfoo bar\r\nD\r\nbaz quux herp\r\n0\r\n\r\n");
}
#[test]
fn test_write_sized() {
use std::str::from_utf8;
let mut w = super::HttpWriter::SizedWriter(Vec::new(), 8);
w.write_all(b"foo bar").unwrap();
assert_eq!(w.write(b"baz").unwrap(), 1);
let buf = w.end().unwrap();
let s = from_utf8(buf.as_ref()).unwrap();
assert_eq!(s, "foo barb");
}
#[test]
fn test_read_chunk_size() {
fn read(s: &str, result: u64) {
assert_eq!(read_chunk_size(&mut s.as_bytes()).unwrap(), result);
}
fn read_err(s: &str) {
assert_eq!(read_chunk_size(&mut s.as_bytes()).unwrap_err().kind(),
io::ErrorKind::InvalidInput);
}
read("1\r\n", 1);
read("01\r\n", 1);
read("0\r\n", 0);
read("00\r\n", 0);
read("A\r\n", 10);
read("a\r\n", 10);
read("Ff\r\n", 255);
read("Ff \r\n", 255);
// Missing LF or CRLF
read_err("F\rF");
read_err("F");
// Invalid hex digit
read_err("X\r\n");
read_err("1X\r\n");
read_err("-\r\n");
read_err("-1\r\n");
// Acceptable (if not fully valid) extensions do not influence the size
read("1;extension\r\n", 1);
read("a;ext name=value\r\n", 10);
read("1;extension;extension2\r\n", 1);
read("1;;; ;\r\n", 1);
read("2; extension...\r\n", 2);
read("3 ; extension=123\r\n", 3);
read("3 ;\r\n", 3);
read("3 ; \r\n", 3);
// Invalid extensions cause an error
read_err("1 invalid extension\r\n");
read_err("1 A\r\n");
read_err("1;no CRLF");
}
#[test]
fn test_read_sized_early_eof() {
let mut r = super::HttpReader::SizedReader(MockStream::with_input(b"foo bar"), 10);
let mut buf = [0u8; 10];
assert_eq!(r.read(&mut buf).unwrap(), 7);
let e = r.read(&mut buf).unwrap_err();
assert_eq!(e.kind(), io::ErrorKind::Other);
assert_eq!(e.description(), "early eof");
}
#[test]
fn test_read_chunked_early_eof() {
let mut r = super::HttpReader::ChunkedReader(MockStream::with_input(b"\
9\r\n\
foo bar\
"), None);
let mut buf = [0u8; 10];
assert_eq!(r.read(&mut buf).unwrap(), 7);
let e = r.read(&mut buf).unwrap_err();
assert_eq!(e.kind(), io::ErrorKind::Other);
assert_eq!(e.description(), "early eof");
}
#[test]
fn test_parse_incoming() {
let mut raw = MockStream::with_input(b"GET /echo HTTP/1.1\r\nHost: hyper.rs\r\n\r\n");
let mut buf = BufReader::new(&mut raw);
parse_request(&mut buf).unwrap();
}
#[test]
fn test_parse_raw_status() {
let mut raw = MockStream::with_input(b"HTTP/1.1 200 OK\r\n\r\n");
let mut buf = BufReader::new(&mut raw);
let res = parse_response(&mut buf).unwrap();
assert_eq!(res.subject.1, "OK");
let mut raw = MockStream::with_input(b"HTTP/1.1 200 Howdy\r\n\r\n");
let mut buf = BufReader::new(&mut raw);
let res = parse_response(&mut buf).unwrap();
assert_eq!(res.subject.1, "Howdy");
}
#[test]
fn test_parse_tcp_closed() {
use std::io::ErrorKind;
use error::Error;
let mut empty = MockStream::new();
let mut buf = BufReader::new(&mut empty);
match parse_request(&mut buf) {
Err(Error::Io(ref e)) if e.kind() == ErrorKind::ConnectionAborted => (),
other => panic!("unexpected result: {:?}", other)
}
}
#[cfg(feature = "nightly")]
use test::Bencher;
#[cfg(feature = "nightly")]
#[bench]
fn bench_parse_incoming(b: &mut Bencher) {
let mut raw = MockStream::with_input(b"GET /echo HTTP/1.1\r\nHost: hyper.rs\r\n\r\n");
let mut buf = BufReader::new(&mut raw);
b.iter(|| {
parse_request(&mut buf).unwrap();
buf.get_mut().read.set_position(0);
});
}
}<|fim▁end|> | use mock::MockStream;
|
<|file_name|>DefaultSnmpControllerTest.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2016-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.snmp.ctl;
import com.btisystems.pronx.ems.core.snmp.ISnmpConfiguration;
import com.btisystems.pronx.ems.core.snmp.ISnmpConfigurationFactory;
import com.btisystems.pronx.ems.core.snmp.ISnmpSession;
import com.btisystems.pronx.ems.core.snmp.ISnmpSessionFactory;
import com.google.common.collect.Maps;
import org.junit.Before;
import org.junit.Test;
import org.onosproject.alarm.Alarm;
import org.onosproject.alarm.AlarmId;
import org.onosproject.alarm.DefaultAlarm;
import java.io.IOException;
import static org.junit.Assert.*;
/**
* DefaultSnmpController test class.
*/
public class DefaultSnmpControllerTest {
ISnmpSessionFactory mockSnmpSessionFactory = new MockISnmpSessionFactory();
DefaultSnmpController snmpController = new DefaultSnmpController();
DefaultSnmpDevice device = new DefaultSnmpDevice("1.1.1.1", 1, "test", "test");
ISnmpSession snmpSession = new ISnmpSessionAdapter();
long time = System.currentTimeMillis();
DefaultAlarm alarm = new DefaultAlarm.Builder(
AlarmId.alarmId(device.deviceId(), Long.toString(time)),
device.deviceId(), "SNMP alarm retrieval failed",<|fim▁hole|>
@Before
public void setUp() {
snmpController.factoryMap = Maps.newHashMap();
snmpController.factoryMap.put(1, mockSnmpSessionFactory);
}
@Test
public void testActivate() {
snmpController.activate(null);
assertTrue("Snmp session factory map should contain atleast one factory object",
snmpController.factoryMap.size() > 0);
}
@Test
public void testDeactivate() {
snmpController.deactivate();
assertEquals("Device map should be clear", 0, snmpController.getDevices().size());
assertEquals("Session map should be clear", 0, snmpController.sessionMap.size());
}
@Test
public void addDevice() {
snmpController.addDevice(device);
assertEquals("Controller should contain device", device, snmpController.getDevice(device.deviceId()));
}
/**
* tests session creation and get from map if already exists.
*/
@Test
public void getNotExistingSession() throws Exception {
addDevice();
assertEquals("Session should be created", snmpSession, snmpController.getSession(device.deviceId()));
assertEquals("Map should contain session", 1, snmpController.snmpDeviceMap.size());
assertEquals("Session should be fetched from map", snmpSession, snmpController.getSession(device.deviceId()));
}
@Test
public void removeDevice() {
addDevice();
snmpController.removeDevice(device.deviceId());
assertNull("Device shoudl not be present", snmpController.getDevice(device.deviceId()));
}
@Test
public void walkFailedAlarm() {
assertEquals("Alarms should be equals", alarm, snmpController.buildWalkFailedAlarm(device.deviceId()));
}
public class MockISnmpSessionFactory implements ISnmpSessionFactory {
@Override
public ISnmpSession createSession(ISnmpConfiguration configuration, String ipAddress) throws IOException {
new ISnmpSessionAdapter();
return snmpSession;
}
@Override
public ISnmpSession createSession(String ipAddress, String community)
throws IOException {
return snmpSession;
}
@Override
public ISnmpSession createSession(String ipAddress, String community,
String factoryName,
ISnmpConfigurationFactory.AccessType accessType)
throws IOException {
return snmpSession;
}
}
}<|fim▁end|> | Alarm.SeverityLevel.CRITICAL,
time).build(); |
<|file_name|>after_signup.py<|end_file_name|><|fim▁begin|>import urllib
from canvas import util
def make_cookie_key(key):
return 'after_signup_' + str(key)<|fim▁hole|>
def _get(request, key):
key = make_cookie_key(key)
val = request.COOKIES.get(key)
if val is not None:
val = util.loads(urllib.unquote(val))
return (key, val,)
def get_posted_comment(request):
'''
Gets a comment waiting to be posted, if one exists.
Returns a pair containing the cookie key used to retrieve it and its deserialized JSON.
'''
#TODO use dcramer's django-cookies so that we don't rely on having the response object to mutate cookies.
# That would make this API much cleaner and isolated.
return _get(request, 'post_comment')<|fim▁end|> | |
<|file_name|>hier_block.py<|end_file_name|><|fim▁begin|>import collections
import os
import codecs
from .top_block import TopBlockGenerator
from .. import Constants
from ..io import yaml
class HierBlockGenerator(TopBlockGenerator):
"""Extends the top block generator to also generate a block YML file"""
def __init__(self, flow_graph, _):
"""
Initialize the hier block generator object.
Args:
flow_graph: the flow graph object
"""
platform = flow_graph.parent
output_dir = platform.config.hier_block_lib_dir
if not os.path.exists(output_dir):
os.mkdir(output_dir)
TopBlockGenerator.__init__(self, flow_graph, output_dir)
self._mode = Constants.HIER_BLOCK_FILE_MODE
self.file_path_yml = self.file_path[:-3] + '.block.yml'
def write(self):
"""generate output and write it to files"""
TopBlockGenerator.write(self)
data = yaml.dump(self._build_block_n_from_flow_graph_io())
replace = [
('parameters:', '\nparameters:'),
('inputs:', '\ninputs:'),
('outputs:', '\noutputs:'),
('asserts:', '\nasserts:'),
('templates:', '\ntemplates:'),
('documentation:', '\ndocumentation:'),
('file_format:', '\nfile_format:'),
]
for r in replace:
data = data.replace(*r)
with codecs.open(self.file_path_yml, 'w', encoding='utf-8') as fp:
fp.write(data)
# Windows only supports S_IREAD and S_IWRITE, other flags are ignored
os.chmod(self.file_path_yml, self._mode)
def _build_block_n_from_flow_graph_io(self):
"""
Generate a block YML nested data from the flow graph IO
Returns:
a yml node tree
"""
# Extract info from the flow graph
block_id = self._flow_graph.get_option('id')
parameters = self._flow_graph.get_parameters()
def var_or_value(name):
if name in (p.name for p in parameters):
return "${" + name + " }"
return name
# Build the nested data
data = collections.OrderedDict()
data['id'] = block_id
data['label'] = (
self._flow_graph.get_option('title') or
self._flow_graph.get_option('id').replace('_', ' ').title()
)
data['category'] = self._flow_graph.get_option('category')
# Parameters
data['parameters'] = []
for param_block in parameters:
p = collections.OrderedDict()
p['id'] = param_block.name
p['label'] = param_block.params['label'].get_value() or param_block.name
p['dtype'] = param_block.params['value'].dtype
p['default'] = param_block.params['value'].get_value()
p['hide'] = param_block.params['hide'].get_value()
data['parameters'].append(p)
# Ports
for direction in ('inputs', 'outputs'):
data[direction] = []
for port in get_hier_block_io(self._flow_graph, direction):
p = collections.OrderedDict()
p['label'] = port.parent.params['label'].value
if port.domain != Constants.DEFAULT_DOMAIN:
p['domain'] = port.domain
p['dtype'] = port.dtype
if port.domain != Constants.GR_MESSAGE_DOMAIN:
p['vlen'] = var_or_value(port.vlen)
if port.optional:
p['optional'] = True
data[direction].append(p)
t = data['templates'] = collections.OrderedDict()
t['imports'] = "from {0} import {0} # grc-generated hier_block".format(
self._flow_graph.get_option('id'))
# Make data
if parameters:
t['make'] = '{cls}(\n {kwargs},\n)'.format(
cls=block_id,
kwargs=',\n '.join(
'{key}=${{ {key} }}'.format(key=param.name) for param in parameters
),
)
else:
t['make'] = '{cls}()'.format(cls=block_id)
# Self-connect if there aren't any ports
if not data['inputs'] and not data['outputs']:
t['make'] += '\nself.connect(self.${id})'
# Callback data
t['callbacks'] = [
'set_{key}(${{ {key} }})'.format(key=param_block.name) for param_block in parameters
]
# Documentation
data['documentation'] = "\n".join(field for field in (
self._flow_graph.get_option('author'),
self._flow_graph.get_option('description'),
self.file_path
) if field)
data['grc_source'] = str(self._flow_graph.grc_file_path)
data['file_format'] = 1
return data
<|fim▁hole|> def _build_block_n_from_flow_graph_io(self):
n = HierBlockGenerator._build_block_n_from_flow_graph_io(self)
block_n = collections.OrderedDict()
# insert flags after category
for key, value in n.items():
block_n[key] = value
if key == 'category':
block_n['flags'] = 'need_qt_gui'
if not block_n['label'].upper().startswith('QT GUI'):
block_n['label'] = 'QT GUI ' + block_n['label']
gui_hint_param = collections.OrderedDict()
gui_hint_param['id'] = 'gui_hint'
gui_hint_param['label'] = 'GUI Hint'
gui_hint_param['dtype'] = 'gui_hint'
gui_hint_param['hide'] = 'part'
block_n['parameters'].append(gui_hint_param)
block_n['templates']['make'] += (
"\n<% win = 'self.%s'%id %>"
"\n${ gui_hint() % win }"
)
return block_n
def get_hier_block_io(flow_graph, direction, domain=None):
"""
Get a list of io ports for this flow graph.
Returns a list of blocks
"""
pads = flow_graph.get_pad_sources() if direction == 'inputs' else flow_graph.get_pad_sinks()
for pad in pads:
for port in (pad.sources if direction == 'inputs' else pad.sinks):
if domain and port.domain != domain:
continue
yield port<|fim▁end|> |
class QtHierBlockGenerator(HierBlockGenerator):
|
<|file_name|>kekule.chemEditor.baseEditors.js<|end_file_name|><|fim▁begin|>/**
* @fileoverview
* Base types and classes used by chem editor.
* @author Partridge Jiang
*/
/*
* requires /lan/classes.js
* requires /chemDoc/issueCheckers/kekule.issueCheckers.js
* requires /widgets/operation/kekule.operations.js
* requires /render/kekule.render.base.js
* requires /render/kekule.render.boundInfoRecorder.js
* requires /html/xbrowsers/kekule.x.js
* requires /widgets/kekule.widget.base.js
* requires /widgets/chem/kekule.chemWidget.chemObjDisplayers.js
* requires /widgets/chem/editor/kekule.chemEditor.extensions.js
* requires /widgets/chem/editor/kekule.chemEditor.editorUtils.js
* requires /widgets/chem/editor/kekule.chemEditor.configs.js
* requires /widgets/chem/editor/kekule.chemEditor.operations.js
* requires /widgets/chem/editor/kekule.chemEditor.modifications.js
*/
(function(){
"use strict";
var OU = Kekule.ObjUtils;
var AU = Kekule.ArrayUtils;
var EU = Kekule.HtmlElementUtils;
var CU = Kekule.CoordUtils;
var CNS = Kekule.Widget.HtmlClassNames;
var CCNS = Kekule.ChemWidget.HtmlClassNames;
/** @ignore */
Kekule.ChemWidget.HtmlClassNames = Object.extend(Kekule.ChemWidget.HtmlClassNames, {
EDITOR: 'K-Chem-Editor',
EDITOR_CLIENT: 'K-Chem-Editor-Client',
EDITOR_UIEVENT_RECEIVER: 'K-Chem-Editor-UiEvent-Receiver',
EDITOR2D: 'K-Chem-Editor2D',
EDITOR3D: 'K-Chem-Editor3D'
});
/**
* Namespace for chem editor.
* @namespace
*/
Kekule.ChemWidget.Editor = {};
/**
* Alias to {@link Kekule.ChemWidget.Editor}.
* @namespace
*/
Kekule.Editor = Kekule.ChemWidget.Editor;
/**
* In editor, there exist three types of coord: one based on object system (inner coord),
* another one based on context of editor (outer coord, context coord),
* and the third based on screen.
* This enum is an alias of Kekule.Render.CoordSystem
* @class
*/
Kekule.Editor.CoordSys = Kekule.Render.CoordSystem;
/**
* Enumeration of regions in/out box.
* @enum
* @ignore
*/
Kekule.Editor.BoxRegion = {
OUTSIDE: 0,
CORNER_TL: 1,
CORNER_TR: 2,
CORNER_BL: 3,
CORNER_BR: 4,
EDGE_TOP: 11,
EDGE_LEFT: 12,
EDGE_BOTTOM: 13,
EDGE_RIGHT: 14,
INSIDE: 20
};
/**
* Enumeration of mode in selecting object in editor.
* @enum
* @ignore
*/
Kekule.Editor.SelectMode = {
/** Draw a box in editor when selecting, select all object inside a box. **/
RECT: 0,
/** Draw a curve in editor when selecting, select all object inside this curve polygon. **/
POLYGON: 1,
/** Draw a curve in editor when selecting, select all object intersecting this curve. **/
POLYLINE: 2,
/** Click on a child object to select the whole standalone ancestor. **/
ANCESTOR: 10
};
// add some global options
Kekule.globalOptions.add('chemWidget.editor', {
'enableIssueCheck': true,
'enableCreateNewDoc': true,
'enableOperHistory': true,
'enableOperContext': true,
'initOnNewDoc': true,
'enableSelect': true,
'enableMove': true,
'enableResize': true,
'enableAspectRatioLockedResize': true,
'enableRotate': true,
'enableGesture': true
});
Kekule.globalOptions.add('chemWidget.editor.issueChecker', {
'enableAutoIssueCheck': true,
'enableAutoScrollToActiveIssue': true,
'enableIssueMarkerHint': true,
'durationLimit': 50 // issue check must be finished in 50ms, avoid blocking the UI
});
/**
* A base chem editor.
* @class
* @augments Kekule.ChemWidget.ChemObjDisplayer
* @param {Variant} parentOrElementOrDocument
* @param {Kekule.ChemObject} chemObj initially loaded chemObj.
* @param {Int} renderType Display in 2D or 3D. Value from {@link Kekule.Render.RendererType}.
* @param {Kekule.Editor.BaseEditorConfigs} editorConfigs Configuration of this editor.
*
* @property {Kekule.Editor.BaseEditorConfigs} editorConfigs Configuration of this editor.
* @property {Bool} enableCreateNewDoc Whether create new object in editor is allowed.
* @property {Bool} initOnNewDoc Whether create a new doc when editor instance is initialized.
* Note, the new doc will only be created when property enableCreateNewDoc is true.
* @property {Bool} enableOperHistory Whether undo/redo is enabled.
* @property {Kekule.OperationHistory} operHistory History of operations. Used to enable undo/redo function.
* @property {Int} renderType Display in 2D or 3D. Value from {@link Kekule.Render.RendererType}.
* @property {Kekule.ChemObject} chemObj The root object in editor.
* @property {Bool} enableIssueCheck Whether issue check is available in editor.
* @property {Array} issueCheckerIds Issue checker class IDs used in editor.
* @property {Bool} enableAutoIssueCheck Whether the issue checking is automatically executed when objects changing in editor.
* @property {Array} issueCheckResults Array of {@link Kekule.IssueCheck.CheckResult}, results of auto or manual check.
* @property {Kekule.IssueCheck.CheckResult} activeIssueCheckResult Current selected issue check result in issue inspector.
* @property {Bool} showAllIssueMarkers Whether all issue markers shouled be marked in editor.
* Note, the active issue will always be marked.
* @property {Bool} enableIssueMarkerHint Whether display hint text on issue markers.
* @property {Bool} enableAutoScrollToActiveIssue Whether the editor will automatically scroll to the issue object when selecting in issue inspector.
* @property {Bool} enableOperContext If this property is set to true, object being modified will be drawn in a
* separate context to accelerate the interface refreshing.
* @property {Object} objContext Context to draw basic chem objects. Can be 2D or 3D context. Alias of property drawContext
* @property {Object} operContext Context to draw objects being operated. Can be 2D or 3D context.
* @property {Object} uiContext Context to draw UI marks. Usually this is a 2D context.
* @property {Object} objDrawBridge Bridge to draw chem objects. Alias of property drawBridge.
* @property {Object} uiDrawBridge Bridge to draw UI markers.
* @property {Int} selectMode Value from Kekule.Editor.SelectMode, set the mode of selecting operation in editor.
* @property {Array} selection An array of selected basic object.
* @property {Hash} zoomCenter The center coord (based on client element) when zooming editor.
* //@property {Bool} standardizeObjectsBeforeSaving Whether standardize molecules (and other possible objects) before saving them.
*/
/**
* Invoked when the an chem object is loaded into editor.
* event param of it has one fields: {obj: Object}
* @name Kekule.Editor.BaseEditor#load
* @event
*/
/**
* Invoked when the chem object inside editor is changed.
* event param of it has one fields: {obj: Object, propNames: Array}
* @name Kekule.Editor.BaseEditor#editObjChanged
* @event
*/
/**
* Invoked when multiple chem objects inside editor is changed.
* event param of it has one fields: {details}.
* @name Kekule.Editor.BaseEditor#editObjsChanged
* @event
*/
/**
* Invoked when chem objects inside editor is changed and the changes has been updated by editor.
* event param of it has one fields: {details}.
* Note: this event is not the same as editObjsChanged. When beginUpdateObj is called, editObjsChanged
* event still will be invoked but editObjsUpdated event will be suppressed.
* @name Kekule.Editor.BaseEditor#editObjsUpdated
* @event
*/
/**
* Invoked when the selected objects in editor has been changed.
* When beginUpdateObj is called, selectedObjsUpdated event will be suppressed.
* event param of it has one fields: {objs}.
* @name Kekule.Editor.BaseEditor#selectedObjsUpdated
* @event
*/
/**
* Invoked when the pointer (usually the mouse) hovering on basic objects(s) in editor.
* Event param of it has field {objs}. When the pointer move out of the obj, the objs field will be a empty array.
* @name Kekule.Editor.BaseEditor#hoverOnObjs
* @event
*/
/**
* Invoked when the selection in editor has been changed.
* @name Kekule.Editor.BaseEditor#selectionChange
* @event
*/
/**
* Invoked when the operation history has modifications.
* @name Kekule.Editor.BaseEditor#operChange
* @event
*/
/**
* Invoked when the an operation is pushed into operation history.
* event param of it has one fields: {operation: Kekule.Operation}
* @name Kekule.Editor.BaseEditor#operPush
* @event
*/
/**
* Invoked when the an operation is popped from history.
* event param of it has one fields: {operation: Kekule.Operation}
* @name Kekule.Editor.BaseEditor#operPop
* @event
*/
/**
* Invoked when one operation is undone.
* event param of it has two fields: {operation: Kekule.Operation, currOperIndex: Int}
* @name Kekule.Editor.BaseEditor#operUndo
* @event
*/
/**
* Invoked when one operation is redone.
* event param of it has two fields: {operation: Kekule.Operation, currOperIndex: Int}
* @name Kekule.Editor.BaseEditor#operRedo
* @event
*/
/**
* Invoked when the operation history is cleared.
* event param of it has one field: {currOperIndex: Int}
* @name Kekule.Editor.BaseEditor#operHistoryClear
* @event
*/
Kekule.Editor.BaseEditor = Class.create(Kekule.ChemWidget.ChemObjDisplayer,
/** @lends Kekule.Editor.BaseEditor# */
{
/** @private */
CLASS_NAME: 'Kekule.Editor.BaseEditor',
/** @private */
BINDABLE_TAG_NAMES: ['div', 'span'],
/** @private */
OBSERVING_GESTURES: ['rotate', 'rotatestart', 'rotatemove', 'rotateend', 'rotatecancel',
'pinch', 'pinchstart', 'pinchmove', 'pinchend', 'pinchcancel', 'pinchin', 'pinchout'],
/** @constructs */
initialize: function(/*$super, */parentOrElementOrDocument, chemObj, renderType, editorConfigs)
{
this._objSelectFlag = 0; // used internally
this._objectUpdateFlag = 0; // used internally
this._objectManipulateFlag = 0; // used internally
this._uiMarkerUpdateFlag = 0; // used internally
this._updatedObjectDetails = []; // used internally
this._operatingObjs = []; // used internally
this._operatingRenderers = []; // used internally
this._initialRenderTransformParams = null; // used internally, must init before $super
// as in $super, chemObj may be loaded and _initialRenderTransformParams will be set at that time
this._objChanged = false; // used internally, mark whether some changes has been made to chem object
this._lengthCaches = {}; // used internally, stores some value related to distance and length
var getOptionValue = Kekule.globalOptions.get;
/*
this.setPropStoreFieldValue('enableIssueCheck', true);
this.setPropStoreFieldValue('enableCreateNewDoc', true);
this.setPropStoreFieldValue('enableOperHistory', true);
this.setPropStoreFieldValue('enableOperContext', true);
this.setPropStoreFieldValue('initOnNewDoc', true);
*/
this.setPropStoreFieldValue('enableIssueCheck', getOptionValue('chemWidget.editor.enableIssueCheck', true));
this.setPropStoreFieldValue('enableCreateNewDoc', getOptionValue('chemWidget.editor.enableCreateNewDoc', true));
this.setPropStoreFieldValue('enableOperHistory', getOptionValue('chemWidget.editor.enableOperHistory', true));
this.setPropStoreFieldValue('enableOperContext', getOptionValue('chemWidget.editor.enableOperContext', true));
this.setPropStoreFieldValue('initOnNewDoc', getOptionValue('chemWidget.editor.initOnNewDoc', true));
//this.setPropStoreFieldValue('initialZoom', 1.5);
//this.setPropStoreFieldValue('selectMode', Kekule.Editor.SelectMode.POLYGON); // debug
this.tryApplySuper('initialize', [parentOrElementOrDocument, chemObj, renderType]) /* $super(parentOrElementOrDocument, chemObj, renderType) */;
//this.initEventHandlers();
if (!this.getChemObj() && this.getInitOnNewDoc() && this.getEnableCreateNewDoc())
this.newDoc();
this.setPropStoreFieldValue('editorConfigs', editorConfigs || this.createDefaultConfigs());
//this.setPropStoreFieldValue('uiMarkers', []);
//this.setEnableGesture(true);
this.setEnableGesture(getOptionValue('chemWidget.editor.enableGesture', true));
},
/** @private */
initProperties: function()
{
this.defineProp('editorConfigs', {'dataType': 'Kekule.Editor.BaseEditorConfigs', 'serializable': false,
'getter': function() { return this.getDisplayerConfigs(); },
'setter': function(value) { return this.setDisplayerConfigs(value); }
});
this.defineProp('defBondLength', {'dataType': DataType.FLOAT, 'serializable': false,
'getter': function()
{
var result = this.getPropStoreFieldValue('defBondLength');
if (!result)
result = this.getEditorConfigs().getStructureConfigs().getDefBondLength();
return result;
}
});
this.defineProp('defBondScreenLength', {'dataType': DataType.FLOAT, 'serializable': false, 'setter': null,
'getter': function()
{
/*
var result = this.getPropStoreFieldValue('defBondScreenLength');
if (!result)
{
var bLength = this.getDefBondLength();
result = this.translateDistance(bLength, Kekule.Render.CoordSys.CHEM, Kekule.Render.CoordSys.SCREEN);
}
return result;
*/
var cached = this._lengthCaches.defBondScreenLength;
if (cached)
return cached;
else
{
var bLength = this.getDefBondLength() || 0;
var result = this.translateDistance(bLength, Kekule.Render.CoordSystem.CHEM, Kekule.Render.CoordSystem.SCREEN);
this._lengthCaches.defBondScreenLength = result;
return result;
}
}
});
// Different pointer event (mouse, touch) has different bound inflation settings, stores here
this.defineProp('currBoundInflation', {'dataType': DataType.NUMBER, 'serializable': false, 'setter': null,
'getter': function(){
var pType = this.getCurrPointerType();
return this.getInteractionBoundInflation(pType);
}
});
// The recent pointer device interacted with this editor
this.defineProp('currPointerType', {'dataType': DataType.STRING, 'serializable': false});
//this.defineProp('standardizeObjectsBeforeSaving', {'dataType': DataType.BOOL});
this.defineProp('enableCreateNewDoc', {'dataType': DataType.BOOL, 'serializable': false});
this.defineProp('initOnNewDoc', {'dataType': DataType.BOOL, 'serializable': false});
this.defineProp('enableOperHistory', {'dataType': DataType.BOOL, 'serializable': false});
this.defineProp('operHistory', {
'dataType': 'Kekule.OperationHistory', 'serializable': false,
'getter': function()
{
/*
if (!this.getEnableOperHistory())
return null;
*/
var result = this.getPropStoreFieldValue('operHistory');
if (!result)
{
result = new Kekule.OperationHistory();
this.setPropStoreFieldValue('operHistory', result);
// install event handlers
result.addEventListener('push', this.reactOperHistoryPush, this);
result.addEventListener('pop', this.reactOperHistoryPop, this);
result.addEventListener('undo', this.reactOperHistoryUndo, this);
result.addEventListener('redo', this.reactOperHistoryRedo, this);
result.addEventListener('clear', this.reactOperHistoryClear, this);
result.addEventListener('change', this.reactOperHistoryChange, this);
}
return result;
},
'setter': null
});
this.defineProp('operationsInCurrManipulation', {'dataType': DataType.ARRAY, 'scope': Class.PropertyScope.PRIVATE, 'serializable': false}); // private
this.defineProp('selection', {'dataType': DataType.ARRAY, 'serializable': false,
'getter': function()
{
var result = this.getPropStoreFieldValue('selection');
if (!result)
{
result = [];
this.setPropStoreFieldValue('selection', result);
}
return result;
},
'setter': function(value)
{
this.setPropStoreFieldValue('selection', value);
this.selectionChanged();
}
});
this.defineProp('selectMode', {'dataType': DataType.INT,
'getter': function()
{
var result = this.getPropStoreFieldValue('selectMode');
if (Kekule.ObjUtils.isUnset(result))
result = Kekule.Editor.SelectMode.RECT; // default value
return result;
},
'setter': function(value)
{
if (this.getSelectMode() !== value)
{
//console.log('set select mode', value);
this.setPropStoreFieldValue('selectMode', value);
this.hideSelectingMarker();
}
}
});
// private, whether defaultly select in toggle mode
this.defineProp('isToggleSelectOn', {'dataType': DataType.BOOL});
this.defineProp('hotTrackedObjs', {'dataType': DataType.ARRAY, 'serializable': false,
'setter': function(value)
{
/*
if (this.getHotTrackedObjs() === value)
return;
*/
var objs = value? Kekule.ArrayUtils.toArray(value): [];
//console.log('setHotTrackedObjs', objs);
if (this.getEditorConfigs() && this.getEditorConfigs().getInteractionConfigs().getEnableHotTrack())
{
this.setPropStoreFieldValue('hotTrackedObjs', objs);
var bounds;
if (objs && objs.length)
{
bounds = [];
for (var i = 0, l = objs.length; i < l; ++i)
{
var bound = this.getBoundInfoRecorder().getBound(this.getObjContext(), objs[i]);
if (bounds)
{
//bounds.push(bound);
Kekule.ArrayUtils.pushUnique(bounds, bound); // bound may be an array of composite shape
}
}
}
if (bounds)
{
this.changeHotTrackMarkerBounds(bounds);
//console.log('show');
}
else
{
if (this.getUiHotTrackMarker().getVisible())
this.hideHotTrackMarker();
//console.log('hide');
}
}
}
});
this.defineProp('hotTrackedObj', {'dataType': DataType.OBJECT, 'serializable': false,
'getter': function() { return this.getHotTrackedObjs() && this.getHotTrackedObjs()[0]; },
'setter': function(value) { this.setHotTrackedObjs(value); }
});
this.defineProp('hoveredBasicObjs', {'dataType': DataType.ARRAY, 'serializable': false}); // a readonly array caching the basic objects at current pointer position
this.defineProp('enableOperContext', {'dataType': DataType.BOOL,
'setter': function(value)
{
this.setPropStoreFieldValue('enableOperContext', !!value);
if (!value) // release operContext
{
var ctx = this.getPropStoreFieldValue('operContext');
var b = this.getPropStoreFieldValue('drawBridge');
if (b && ctx)
b.releaseContext(ctx);
}
}
});
this.defineProp('issueCheckExecutor', {'dataType': 'Kekule.IssueCheck.Executor', 'serializable': false, 'setter': null,
'getter': function()
{
var result = this.getPropStoreFieldValue('issueCheckExecutor');
if (!result) // create default executor
{
result = this.createIssueCheckExecutor(); // new Kekule.IssueCheck.Executor();
var self = this;
result.addEventListener('execute', function(e){
self.setIssueCheckResults(e.checkResults);
});
this.setPropStoreFieldValue('issueCheckExecutor', result);
}
return result;
}
});
this.defineProp('issueCheckerIds', {'dataType': DataType.ARRAY,
'getter': function() { return this.getIssueCheckExecutor().getCheckerIds(); },
'setter': function(value) { this.getIssueCheckExecutor().setCheckerIds(value); }
});
this.defineProp('enableIssueCheck', {'dataType': DataType.BOOL,
'getter': function() { return this.getIssueCheckExecutor().getEnabled(); },
'setter': function(value) {
this.getIssueCheckExecutor().setEnabled(!!value);
if (!value) // when disable issue check, clear the check results
{
this.setIssueCheckResults(null);
}
}
});
this.defineProp('issueCheckDurationLimit', {'dataType': DataType.NUMBER,
'getter': function() { return this.getIssueCheckExecutor().getDurationLimit(); },
'setter': function(value) { this.getIssueCheckExecutor().setDurationLimit(value); }
});
this.defineProp('enableAutoIssueCheck', {'dataType': DataType.BOOL,
'setter': function(value)
{
if (!!value !== this.getEnableAutoIssueCheck())
{
this.setPropStoreFieldValue('enableAutoIssueCheck', !!value);
if (value) // when turn on from off, do a issue check
this.checkIssues();
// adjust property showAllIssueMarkers according to enableAutoIssueCheck
// If auto check is on, markers should be defaultly opened;
// if auto check is off, markers should be defaultly hidden.
this.setShowAllIssueMarkers(!!value);
}
}});
this.defineProp('issueCheckResults', {'dataType': DataType.ARRAY, 'serializable': false,
'setter': function(value)
{
var oldActive = this.getActiveIssueCheckResult();
this.setPropStoreFieldValue('issueCheckResults', value);
if (oldActive && (value || []).indexOf(oldActive) < 0)
this.setPropStoreFieldValue('activeIssueCheckResult', null);
this.issueCheckResultsChanged();
}
});
this.defineProp('activeIssueCheckResult', {'dataType': DataType.OBJECT, 'serializable': false,
'setter': function(value)
{
if (this.getActiveIssueCheckResult() !== value)
{
this.setPropStoreFieldValue('activeIssueCheckResult', value);
if (value) // when set active issue, deselect all selections to extrusive it
this.deselectAll();
this.issueCheckResultsChanged();
}
}
});
this.defineProp('enableAutoScrollToActiveIssue', {'dataType': DataType.BOOL});
this.defineProp('showAllIssueMarkers', {'dataType': DataType.BOOL,
'setter': function(value)
{
if (!!value !== this.getShowAllIssueMarkers())
{
this.setPropStoreFieldValue('showAllIssueMarkers', !!value);
this.recalcIssueCheckUiMarkers();
}
}
});
this.defineProp('enableIssueMarkerHint', {'dataType': DataType.BOOL});
this.defineProp('enableGesture', {'dataType': DataType.BOOL,
'setter': function(value)
{
var bValue = !!value;
if (this.getEnableGesture() !== bValue)
{
this.setPropStoreFieldValue('enableGesture', bValue);
if (bValue)
{
this.startObservingGestureEvents(this.OBSERVING_GESTURES);
}
else
{
this.startObservingGestureEvents(this.OBSERVING_GESTURES);
}
}
}
});
// private
this.defineProp('uiEventReceiverElem', {'dataType': DataType.OBJECT, 'serializable': false, setter: null});
// context parent properties, private
this.defineProp('objContextParentElem', {'dataType': DataType.OBJECT, 'serializable': false, setter: null});
this.defineProp('operContextParentElem', {'dataType': DataType.OBJECT, 'serializable': false, setter: null});
this.defineProp('uiContextParentElem', {'dataType': DataType.OBJECT, 'serializable': false, setter: null});
this.defineProp('objContext', {'dataType': DataType.OBJECT, 'serializable': false, setter: null,
'getter': function() { return this.getDrawContext(); }
});
this.defineProp('operContext', {'dataType': DataType.OBJECT, 'serializable': false, 'setter': null,
'getter': function()
{
if (!this.getEnableOperContext())
return null;
else
{
var result = this.getPropStoreFieldValue('operContext');
if (!result)
{
var bridge = this.getDrawBridge();
if (bridge)
{
var elem = this.getOperContextParentElem();
if (!elem)
return null;
else
{
var dim = Kekule.HtmlElementUtils.getElemScrollDimension(elem);
result = bridge.createContext(elem, dim.width, dim.height);
this.setPropStoreFieldValue('operContext', result);
}
}
}
return result;
}
}
});
this.defineProp('uiContext', {'dataType': DataType.OBJECT, 'serializable': false,
'getter': function()
{
var result = this.getPropStoreFieldValue('uiContext');
if (!result)
{
var bridge = this.getUiDrawBridge();
if (bridge)
{
var elem = this.getUiContextParentElem();
if (!elem)
return null;
else
{
var dim = Kekule.HtmlElementUtils.getElemScrollDimension(elem);
//var dim = Kekule.HtmlElementUtils.getElemClientDimension(elem);
result = bridge.createContext(elem, dim.width, dim.height);
this.setPropStoreFieldValue('uiContext', result);
}
}
}
return result;
}
});
this.defineProp('objDrawBridge', {'dataType': DataType.OBJECT, 'serializable': false, 'setter': null,
'getter': function() { return this.getDrawBridge(); }
});
this.defineProp('uiDrawBridge', {'dataType': DataType.OBJECT, 'serializable': false, 'setter': null,
'getter': function()
{
var result = this.getPropStoreFieldValue('uiDrawBridge');
if (!result && !this.__$uiDrawBridgeInitialized$__)
{
this.__$uiDrawBridgeInitialized$__ = true;
result = this.createUiDrawBridge();
this.setPropStoreFieldValue('uiDrawBridge', result);
}
return result;
}
});
this.defineProp('uiPainter', {'dataType': 'Kekule.Render.ChemObjPainter', 'serializable': false, 'setter': null,
'getter': function()
{
var result = this.getPropStoreFieldValue('uiPainter');
if (!result)
{
// ui painter will always in 2D mode
var markers = this.getUiMarkers();
result = new Kekule.Render.ChemObjPainter(Kekule.Render.RendererType.R2D, markers, this.getUiDrawBridge());
result.setCanModifyTargetObj(true);
this.setPropStoreFieldValue('uiPainter', result);
return result;
}
return result;
}
});
this.defineProp('uiRenderer', {'dataType': 'Kekule.Render.AbstractRenderer', 'serializable': false, 'setter': null,
'getter': function()
{
var p = this.getUiPainter();
if (p)
{
var r = p.getRenderer();
if (!r)
p.prepareRenderer();
return p.getRenderer() || null;
}
else
return null;
}
});
// private ui marks properties
//this.defineProp('uiMarkers', {'dataType': DataType.ARRAY, 'serializable': false, 'setter': null});
this.defineProp('uiMarkers', {'dataType': 'Kekule.ChemWidget.UiMarkerCollection', 'serializable': false, 'setter': null,
'getter': function()
{
var result = this.getPropStoreFieldValue('uiMarkers');
if (!result)
{
result = new Kekule.ChemWidget.UiMarkerCollection();
this.setPropStoreFieldValue('uiMarkers', result);
}
return result;
}
});
/*
this.defineProp('uiHotTrackMarker', {'dataType': 'Kekule.ChemWidget.AbstractUIMarker', 'serializable': false,
'getter': function() { return this.getUiMarkers().hotTrackMarker; },
'setter': function(value) { this.getUiMarkers().hotTrackMarker = value; }
});
this.defineProp('uiSelectionAreaMarker', {'dataType': 'Kekule.ChemWidget.AbstractUIMarker', 'serializable': false,
'getter': function() { return this.getUiMarkers().selectionAreaMarker; },
'setter': function(value) { this.getUiMarkers().selectionAreaMarker = value; }
});
this.defineProp('uiSelectingMarker', {'dataType': 'Kekule.ChemWidget.AbstractUIMarker', 'serializable': false,
'getter': function() { return this.getUiMarkers().selectingMarker; },
'setter': function(value) { this.getUiMarkers().selectingMarker = value; }
}); // marker of selecting rubber band
*/
this._defineUiMarkerProp('uiHotTrackMarker');
this._defineUiMarkerProp('uiSelectionAreaMarker'); // marker of selected range
this._defineUiMarkerProp('uiSelectingMarker'); // marker of selecting rubber band
this._defineIssueCheckUiMarkerGroupProps(); // error check marker group
this.defineProp('uiSelectionAreaContainerBox',
{'dataType': DataType.Object, 'serializable': false, 'scope': Class.PropertyScope.PRIVATE});
// a private chemObj-renderer map
this.defineProp('objRendererMap', {'dataType': 'Kekule.MapEx', 'serializable': false, 'setter': null,
'getter': function()
{
var result = this.getPropStoreFieldValue('objRendererMap');
if (!result)
{
result = new Kekule.MapEx(true);
this.setPropStoreFieldValue('objRendererMap', result);
}
return result;
}
});
// private object to record all bound infos
//this.defineProp('boundInfoRecorder', {'dataType': 'Kekule.Render.BoundInfoRecorder', 'serializable': false, 'setter': null});
this.defineProp('zoomCenter', {'dataType': DataType.HASH});
},
/** @ignore */
initPropValues: function(/*$super*/)
{
this.tryApplySuper('initPropValues') /* $super() */;
this.setOperationsInCurrManipulation([]);
/*
this.setEnableAutoIssueCheck(false);
this.setEnableAutoScrollToActiveIssue(true);
var ICIDs = Kekule.IssueCheck.CheckerIds;
this.setIssueCheckerIds([ICIDs.ATOM_VALENCE, ICIDs.BOND_ORDER, ICIDs.NODE_DISTANCE_2D]);
*/
var ICIDs = Kekule.IssueCheck.CheckerIds;
var getGlobalOptionValue = Kekule.globalOptions.get;
this.setEnableAutoIssueCheck(getGlobalOptionValue('chemWidget.editor.issueChecker.enableAutoIssueCheck', true));
this.setEnableAutoScrollToActiveIssue(getGlobalOptionValue('chemWidget.editor.issueChecker.enableAutoScrollToActiveIssue', true));
this.setIssueCheckerIds(getGlobalOptionValue('chemWidget.editor.issueChecker.issueCheckerIds', [ICIDs.ATOM_VALENCE, ICIDs.BOND_ORDER, ICIDs.NODE_DISTANCE_2D]));
this.setIssueCheckDurationLimit(getGlobalOptionValue('chemWidget.editor.issueChecker.durationLimit') || null);
this.setEnableIssueMarkerHint(getGlobalOptionValue('chemWidget.editor.issueChecker.enableIssueMarkerHint') || this.getEnableAutoIssueCheck());
},
/** @private */
_defineUiMarkerProp: function(propName, uiMarkerCollection)
{
return this.defineProp(propName, {'dataType': 'Kekule.ChemWidget.AbstractUIMarker', 'serializable': false,
'getter': function()
{
var result = this.getPropStoreFieldValue(propName);
if (!result)
{
result = this.createShapeBasedMarker(propName, null, null, false); // prop value already be set in createShapeBasedMarker method
}
return result;
},
'setter': function(value)
{
if (!uiMarkerCollection)
uiMarkerCollection = this.getUiMarkers();
var old = this.getPropValue(propName);
if (old)
{
uiMarkerCollection.removeMarker(old);
old.finalize();
}
uiMarkerCollection.addMarker(value);
this.setPropStoreFieldValue(propName, value);
}
});
},
/** @private */
_defineIssueCheckUiMarkerGroupProps: function(uiMarkerCollection)
{
var EL = Kekule.ErrorLevel;
var getSubPropName = function(baseName, errorLevel)
{
return baseName + '_' + EL.levelToString(errorLevel);
};
var baseName = 'issueCheckUiMarker';
var errorLevels = [EL.ERROR, EL.WARNING, EL.NOTE, EL.LOG];
for (var i = 0, l = errorLevels.length; i < l; ++i)
{
var pname = getSubPropName(baseName, errorLevels[i]);
this._defineUiMarkerProp(pname, uiMarkerCollection);
}
this._defineUiMarkerProp(baseName + '_active', uiMarkerCollection); // 'active' is special marker to mark the selected issue objects
// define getter/setter method for group
this['get' + baseName.upperFirst()] = function(level){
var p = getSubPropName(baseName, level);
return this.getPropValue(p);
};
this['set' + baseName.upperFirst()] = function(level, value){
var p = getSubPropName(baseName, level);
return this.setPropValue(p, value);
};
this['getActive' + baseName.upperFirst()] = function()
{
return this.getPropValue(baseName + '_active');
};
this['getAll' + baseName.upperFirst() + 's'] = function(){
var result = [];
for (var i = 0, l = errorLevels.length; i < l; ++i)
{
result.push(this.getIssueCheckUiMarker(errorLevels[i]));
}
var activeMarker = this.getActiveIssueCheckUiMarker();
if (activeMarker)
result.push(activeMarker);
return result;
}
},
/** @private */
doFinalize: function(/*$super*/)
{
var h = this.getPropStoreFieldValue('operHistory');
if (h)
{
h.finalize();
this.setPropStoreFieldValue('operHistory', null);
}
var b = this.getPropStoreFieldValue('objDrawBridge');
var ctx = this.getPropStoreFieldValue('operContext');
if (b && ctx)
{
b.releaseContext(ctx);
}
this.setPropStoreFieldValue('operContext', null);
var b = this.getPropStoreFieldValue('uiDrawBridge');
var ctx = this.getPropStoreFieldValue('uiContext');
if (b && ctx)
{
b.releaseContext(ctx);
}
this.setPropStoreFieldValue('uiDrawBridge', null);
this.setPropStoreFieldValue('uiContext', null);
var m = this.getPropStoreFieldValue('objRendererMap');
if (m)
m.finalize();
this.setPropStoreFieldValue('objRendererMap', null);
var e = this.getPropStoreFieldValue('issueCheckExecutor');
if (e)
e.finalize();
this.setPropStoreFieldValue('issueCheckExecutor', null);
this.tryApplySuper('doFinalize') /* $super() */;
},
/** @ignore */
elementBound: function(element)
{
this.setObserveElemResize(true);
},
/**
* Create a default editor config object.
* Descendants may override this method.
* @returns {Kekule.Editor.BaseEditorConfigs}
* @ignore
*/
createDefaultConfigs: function()
{
return new Kekule.Editor.BaseEditorConfigs();
},
/** @ignore */
doCreateRootElement: function(doc)
{
var result = doc.createElement('div');
return result;
},
/** @ignore */
doCreateSubElements: function(doc, rootElem)
{
var elem = doc.createElement('div');
elem.className = CCNS.EDITOR_CLIENT;
rootElem.appendChild(elem);
this._editClientElem = elem;
return [elem];
},
/** @ignore */
getCoreElement: function(/*$super*/)
{
return this._editClientElem || this.tryApplySuper('getCoreElement') /* $super() */;
},
/** @private */
getEditClientElem: function()
{
return this._editClientElem;
},
/** @ignore */
doGetWidgetClassName: function(/*$super*/)
{
var result = this.tryApplySuper('doGetWidgetClassName') /* $super() */ + ' ' + CCNS.EDITOR;
var additional = (this.getRenderType() === Kekule.Render.RendererType.R3D)?
CCNS.EDITOR3D: CCNS.EDITOR2D;
result += ' ' + additional;
return result;
},
/** @private */
doBindElement: function(element)
{
this.createContextParentElems();
this.createUiEventReceiverElem();
},
// override getter and setter of intialZoom property
/** @ignore */
doGetInitialZoom: function(/*$super*/)
{
var result;
var config = this.getEditorConfigs();
if (config)
result = config.getInteractionConfigs().getEditorInitialZoom();
if (!result)
result = this.tryApplySuper('doGetInitialZoom') /* $super() */;
return result;
},
/** @ignore */
doSetInitialZoom: function(/*$super, */value)
{
var config = this.getEditorConfigs();
if (config)
config.getInteractionConfigs().setEditorInitialZoom(value);
this.tryApplySuper('doSetInitialZoom', [value]) /* $super(value) */;
},
/** @ignore */
zoomTo: function(/*$super, */value, suspendRendering, zoomCenterCoord)
{
var CU = Kekule.CoordUtils;
var currZoomLevel = this.getCurrZoom();
var zoomLevel = value;
var result = this.tryApplySuper('zoomTo', [value, suspendRendering]) /* $super(value, suspendRendering) */;
// adjust zoom center
var selfElem = this.getElement();
var currScrollCoord = {'x': selfElem.scrollLeft, 'y': selfElem.scrollTop};
if (!zoomCenterCoord)
zoomCenterCoord = this.getZoomCenter();
if (!zoomCenterCoord ) // use the center of client as the zoom center
{
zoomCenterCoord = CU.add(currScrollCoord, {'x': selfElem.clientWidth / 2, 'y': selfElem.clientHeight / 2});
}
//console.log('zoom center info', this.getZoomCenter(), zoomCenterCoord);
//if (zoomCenterCoord)
{
var scrollDelta = CU.multiply(zoomCenterCoord, zoomLevel / currZoomLevel - 1);
selfElem.scrollLeft += scrollDelta.x;
selfElem.scrollTop += scrollDelta.y;
}
return result;
},
/**
* Zoom in.
*/
/*
zoomIn: function(step, zoomCenterCoord)
{
var curr = this.getCurrZoom();
var ratio = Kekule.ZoomUtils.getNextZoomInRatio(curr, step || 1);
return this.zoomTo(ratio, null, zoomCenterCoord);
},
*/
/**
* Zoom out.
*/
/*
zoomOut: function(step, zoomCenterCoord)
{
var curr = this.getCurrZoom();
var ratio = Kekule.ZoomUtils.getNextZoomOutRatio(curr, step || 1);
return this.zoomTo(ratio, null, zoomCenterCoord);
},
*/
/**
* Reset to normal size.
*/
/*
resetZoom: function(zoomCenterCoord)
{
return this.zoomTo(this.getInitialZoom() || 1, null, zoomCenterCoord);
},
*/
/**
* Change the size of client element.
* Width and height is based on px.
* @private
*/
changeClientSize: function(width, height, zoomLevel)
{
this._initialRenderTransformParams = null;
var elem = this.getCoreElement();
var style = elem.style;
if (!zoomLevel)
zoomLevel = 1;
var w = width * zoomLevel;
var h = height * zoomLevel;
if (w)
style.width = w + 'px';
if (h)
style.height = h + 'px';
var ctxes = [this.getObjContext(), this.getOperContext(), this.getUiContext()];
for (var i = 0, l = ctxes.length; i < l; ++i)
{
var ctx = ctxes[i];
if (ctx) // change ctx size also
{
this.getDrawBridge().setContextDimension(ctx, w, h);
}
}
this.repaint();
},
/**
* Returns the screen box (x1, y1, x2, y2) of current visible client area in editor.
* @returns {Hash}
*/
getVisibleClientScreenBox: function()
{
var elem = this.getEditClientElem().parentNode;
var result = Kekule.HtmlElementUtils.getElemClientDimension(elem);
var pos = this.getClientScrollPosition();
result.x1 = pos.x;
result.y1 = pos.y;
result.x2 = result.x1 + result.width;
result.y2 = result.y1 + result.height;
return result;
},
/**
* Returns the context box (x1, y1, x2, y2, in a specified coord system) of current visible client area in editor.
* @param {Int} coordSys
* @returns {Hash}
*/
getVisibleClientBoxOfSys: function(coordSys)
{
var screenBox = this.getVisibleClientScreenBox();
var coords = Kekule.BoxUtils.getMinMaxCoords(screenBox);
var c1 = this.translateCoord(coords.min, Kekule.Editor.CoordSys.SCREEN, coordSys);
var c2 = this.translateCoord(coords.max, Kekule.Editor.CoordSys.SCREEN, coordSys);
var result = Kekule.BoxUtils.createBox(c1, c2);
return result;
},
/**
* Returns the context box (x1, y1, x2, y2, in object coord system) of current visible client area in editor.
* @param {Int} coordSys
* @returns {Hash}
*/
getVisibleClientObjBox: function(coordSys)
{
return this.getVisibleClientBoxOfSys(Kekule.Editor.CoordSys.CHEM);
},
/**
* Returns whether the chem object inside editor has been modified since load.
* @returns {Bool}
*/
isDirty: function()
{
if (this.getEnableOperHistory())
return this.getOperHistory().getCurrIndex() >= 0;
else
return this._objChanged;
},
/**
* Returns srcInfo of chemObj. If editor is dirty (object been modified), srcInfo will be unavailable.
* @param {Kekule.ChemObject} chemObj
* @returns {Object}
*/
getChemObjSrcInfo: function(chemObj)
{
if (this.isDirty())
return null;
else
return chemObj.getSrcInfo? chemObj.getSrcInfo(): null;
},
/* @private */
/*
_calcPreferedTransformOptions: function()
{
var drawOptions = this.getDrawOptions();
return this.getPainter().calcPreferedTransformOptions(
this.getObjContext(), this.calcDrawBaseCoord(drawOptions), drawOptions);
},
*/
/** @private */
getActualDrawOptions: function(/*$super*/)
{
var old = this.tryApplySuper('getActualDrawOptions') /* $super() */;
if (this._initialRenderTransformParams)
{
var result = Object.extend({}, this._initialRenderTransformParams);
result = Object.extend(result, old);
//var result = Object.create(old);
//result.initialRenderTransformParams = this._initialRenderTransformParams;
//console.log('extended', this._initialRenderTransformParams, result);
return result;
}
else
return old;
},
/** @ignore */
/*
getDrawClientDimension: function()
{
},
*/
/** @ignore */
repaint: function(/*$super, */overrideOptions)
{
var ops = overrideOptions;
//console.log('repaint called', overrideOptions);
//console.log('repaint', this._initialRenderTransformParams);
/*
if (this._initialRenderTransformParams)
{
ops = Object.create(overrideOptions || {});
//console.log(this._initialRenderTransformParams);
ops = Object.extend(ops, this._initialRenderTransformParams);
}
else
{
ops = overrideOptions;
//this._initialRenderTransformParams = this._calcPreferedTransformOptions();
//console.log('init params: ', this._initialRenderTransformParams, drawOptions);
}
*/
var result = this.tryApplySuper('repaint', [ops]) /* $super(ops) */;
if (this.isRenderable())
{
// after paint the new obj the first time, save up the transform params (especially the translates)
if (!this._initialRenderTransformParams)
{
this._initialRenderTransformParams = this.getPainter().getActualInitialRenderTransformOptions(this.getObjContext());
/*
if (transParam)
{
var trans = {}
var unitLength = transParam.unitLength || 1;
if (Kekule.ObjUtils.notUnset(transParam.translateX))
trans.translateX = transParam.translateX / unitLength;
if (Kekule.ObjUtils.notUnset(transParam.translateY))
trans.translateY = transParam.translateY / unitLength;
if (Kekule.ObjUtils.notUnset(transParam.translateZ))
trans.translateZ = transParam.translateZ / unitLength;
if (transParam.center)
trans.center = transParam.center;
//var zoom = transParam.zoom || 1;
var zoom = 1;
trans.scaleX = transParam.scaleX / zoom;
trans.scaleY = transParam.scaleY / zoom;
trans.scaleZ = transParam.scaleZ / zoom;
this._initialRenderTransformParams = trans;
console.log(this._initialRenderTransformParams, this);
}
*/
}
// redraw ui markers
this.recalcUiMarkers();
}
return result;
},
/**
* Create a new object and load it in editor.
*/
newDoc: function()
{
//if (this.getEnableCreateNewDoc()) // enable property only affects UI, always could create new doc in code
this.load(this.doCreateNewDocObj());
},
/**
* Create a new object for new document.
* Descendants may override this method.
* @private
*/
doCreateNewDocObj: function()
{
return new Kekule.Molecule();
},
/**
* Returns array of classes that can be exported (saved) from editor.
* Descendants can override this method.
* @returns {Array}
*/
getExportableClasses: function()
{
var obj = this.getChemObj();
if (!obj)
return [];
else
return obj.getClass? [obj.getClass()]: [];
},
/**
* Returns exportable object for specified class.
* Descendants can override this method.
* @param {Class} objClass Set null to export default object.
* @returns {Object}
*/
exportObj: function(objClass)
{
return this.exportObjs(objClass)[0];
},
/**
* Returns all exportable objects for specified class.
* Descendants can override this method.
* @param {Class} objClass Set null to export default object.
* @returns {Array}
*/
exportObjs: function(objClass)
{
var obj = this.getChemObj();
if (!objClass)
return [obj];
else
{
return (obj && (obj instanceof objClass))? [obj]: [];
}
},
/** @private */
doLoad: function(/*$super, */chemObj)
{
// deselect all old objects first
this.deselectAll();
this._initialRenderTransformParams = null;
// clear rendererMap so that all old renderer info is removed
this.getObjRendererMap().clear();
if (this.getOperHistory())
this.getOperHistory().clear();
this.tryApplySuper('doLoad', [chemObj]) /* $super(chemObj) */;
this._objChanged = false;
// clear hovered object cache
this.setPropStoreFieldValue('hoveredBasicObjs', null);
// clear issue results
this.setIssueCheckResults(null);
this.requestAutoCheckIssuesIfNecessary();
},
/** @private */
doLoadEnd: function(/*$super, */chemObj)
{
this.tryApplySuper('doLoadEnd') /* $super() */;
//console.log('loadend: ', chemObj);
if (!chemObj)
this._initialRenderTransformParams = null;
/*
else
{
// after load the new obj the first time, save up the transform params (especially the translates)
var transParam = this.getPainter().getActualRenderTransformParams(this.getObjContext());
if (transParam)
{
var trans = {}
var unitLength = transParam.unitLength || 1;
if (Kekule.ObjUtils.notUnset(transParam.translateX))
trans.translateX = transParam.translateX / unitLength;
if (Kekule.ObjUtils.notUnset(transParam.translateY))
trans.translateY = transParam.translateY / unitLength;
if (Kekule.ObjUtils.notUnset(transParam.translateZ))
trans.translateZ = transParam.translateZ / unitLength;
this._initialRenderTransformParams = trans;
console.log(this._initialRenderTransformParams, this);
}
}
*/
},
/** @private */
doResize: function(/*$super*/)
{
//console.log('doResize');
this._initialRenderTransformParams = null; // transform should be recalculated after resize
this.tryApplySuper('doResize') /* $super() */;
},
/** @ignore */
geometryOptionChanged: function(/*$super*/)
{
var zoom = this.getDrawOptions().zoom;
this.zoomChanged(zoom);
// clear some length related caches
this._clearLengthCaches();
this.tryApplySuper('geometryOptionChanged') /* $super() */;
},
/** @private */
zoomChanged: function(zoomLevel)
{
// do nothing here
},
/** @private */
_clearLengthCaches: function()
{
this._lengthCaches = {};
},
/**
* @private
*/
chemObjChanged: function(/*$super, */newObj, oldObj)
{
this.tryApplySuper('chemObjChanged', [newObj, oldObj]) /* $super(newObj, oldObj) */;
if (newObj !== oldObj)
{
if (oldObj)
this._uninstallChemObjEventListener(oldObj);
if (newObj)
this._installChemObjEventListener(newObj);
}
},
/** @private */
_installChemObjEventListener: function(chemObj)
{
chemObj.addEventListener('change', this.reactChemObjChange, this);
},
/** @private */
_uninstallChemObjEventListener: function(chemObj)
{
chemObj.removeEventListener('change', this.reactChemObjChange, this);
},
/**
* Create a transparent div element above all other elems of editor,
* this element is used to receive all UI events.
*/
createUiEventReceiverElem: function()
{
var parent = this.getCoreElement();
if (parent)
{
var result = parent.ownerDocument.createElement('div');
result.className = CCNS.EDITOR_UIEVENT_RECEIVER;
/*
result.id = 'overlayer';
*/
/*
var style = result.style;
style.background = 'transparent';
//style.background = 'yellow';
//style.opacity = 0;
style.position = 'absolute';
style.left = 0;
style.top = 0;
style.width = '100%';
style.height = '100%';
*/
//style.zIndex = 1000;
parent.appendChild(result);
EU.addClass(result, CNS.DYN_CREATED);
this.setPropStoreFieldValue('uiEventReceiverElem', result);
return result;
}
},
/** @private */
createContextParentElems: function()
{
var parent = this.getCoreElement();
if (parent)
{
var doc = parent.ownerDocument;
this._createContextParentElem(doc, parent, 'objContextParentElem');
this._createContextParentElem(doc, parent, 'operContextParentElem');
this._createContextParentElem(doc, parent, 'uiContextParentElem');
}
},
/** @private */
_createContextParentElem: function(doc, parentElem, contextElemPropName)
{
var result = doc.createElement('div');
result.style.position = 'absolute';
result.style.width = '100%';
result.style.height = '100%';
result.className = contextElemPropName + ' ' + CNS.DYN_CREATED; // debug
this.setPropStoreFieldValue(contextElemPropName, result);
parentElem.appendChild(result);
return result;
},
/** @private */
createNewPainter: function(/*$super, */chemObj)
{
var result = this.tryApplySuper('createNewPainter', [chemObj]) /* $super(chemObj) */;
if (result)
{
result.setCanModifyTargetObj(true);
this.installPainterEventHandlers(result);
/* Moved up to class ChemObjDisplayer
// create new bound info recorder
this.createNewBoundInfoRecorder(this.getPainter());
*/
}
return result;
},
/** @private */
/* Moved up to class ChemObjDisplayer
createNewBoundInfoRecorder: function(renderer)
{
var old = this.getPropStoreFieldValue('boundInfoRecorder');
if (old)
old.finalize();
var recorder = new Kekule.Render.BoundInfoRecorder(renderer);
//recorder.setTargetContext(this.getObjContext());
this.setPropStoreFieldValue('boundInfoRecorder', recorder);
},
*/
/** @private */
getDrawContextParentElem: function()
{
return this.getObjContextParentElem();
},
/** @private */
createUiDrawBridge: function()
{
// UI marker will always be in 2D
var result = Kekule.Render.DrawBridge2DMananger.getPreferredBridgeInstance();
if (!result) // can not find suitable draw bridge
{
//Kekule.error(Kekule.$L('ErrorMsg.DRAW_BRIDGE_NOT_SUPPORTED'));
var errorMsg = Kekule.Render.DrawBridge2DMananger.getUnavailableMessage() || Kekule.error(Kekule.$L('ErrorMsg.DRAW_BRIDGE_NOT_SUPPORTED'));
if (errorMsg)
this.reportException(errorMsg, Kekule.ExceptionLevel.NOT_FATAL_ERROR);
}
return result;
},
/* @private */
/*
refitDrawContext: function($super, doNotRepaint)
{
//var dim = Kekule.HtmlElementUtils.getElemScrollDimension(this.getElement());
var dim = Kekule.HtmlElementUtils.getElemClientDimension(this.getElement());
//this._resizeContext(this.getObjDrawContext(), this.getObjDrawBridge(), dim.width, dim.height);
this._resizeContext(this.getOperContext(), this.getObjDrawBridge(), dim.width, dim.height);
this._resizeContext(this.getUiContext(), this.getUiDrawBridge(), dim.width, dim.height);
$super(doNotRepaint);
},
*/
/** @private */
changeContextDimension: function(/*$super, */newDimension)
{
var result = this.tryApplySuper('changeContextDimension', [newDimension]) /* $super(newDimension) */;
if (result)
{
this._resizeContext(this.getOperContext(), this.getObjDrawBridge(), newDimension.width, newDimension.height);
this._resizeContext(this.getUiContext(), this.getUiDrawBridge(), newDimension.width, newDimension.height);
}
return result;
},
/** @private */
_clearSpecContext: function(context, bridge)
{
if (bridge && context)
bridge.clearContext(context);
},
/** @private */
_renderSpecContext: function(context, bridge)
{
if (bridge && context)
bridge.renderContext(context);
},
/**
* Clear the main context.
* @private
*/
clearObjContext: function()
{
//console.log('clear obj context', this.getObjContext() === this.getDrawContext());
this._clearSpecContext(this.getObjContext(), this.getDrawBridge());
if (this.getBoundInfoRecorder())
this.getBoundInfoRecorder().clear(this.getObjContext());
},
/**
* Clear the operating context.
* @private
*/
clearOperContext: function()
{
this._clearSpecContext(this.getOperContext(), this.getDrawBridge());
},
/**
* Clear the UI layer context.
* @private
*/
clearUiContext: function()
{
this._clearSpecContext(this.getUiContext(), this.getUiDrawBridge());
},
/** @private */
clearContext: function()
{
this.clearObjContext();
if (this._operatingRenderers)
this.clearOperContext();
},
/**
* Repaint the operating context only (not the whole obj context).
* @private
*/
repaintOperContext: function(ignoreUiMarker)
{
if (this._operatingRenderers && this._operatingObjs)
{
this.clearOperContext();
try
{
var options = {'partialDrawObjs': this._operatingObjs, 'doNotClear': true};
this.repaint(options);
}
finally
{
this._renderSpecContext(this.getOperContext(), this.getDrawBridge()); // important, done the rendering of oper context
}
/*
var context = this.getObjContext();
//console.log(this._operatingRenderers.length);
for (var i = 0, l = this._operatingRenderers.length; i < l; ++i)
{
var renderer = this._operatingRenderers[i];
console.log('repaint oper', renderer.getClassName(), renderer.getChemObj().getId(), !!renderer.getRedirectContext(), this._operatingRenderers.length);
renderer.redraw(context);
}
if (!ignoreUiMarker)
this.recalcUiMarkers();
*/
}
},
/** @private */
getOperatingRenderers: function()
{
if (!this._operatingRenderers)
this._operatingRenderers = [];
return this._operatingRenderers;
},
/** @private */
setOperatingRenderers: function(value)
{
this._operatingRenderers = value;
},
//////////////////////////////////////////////////////////////////////
/////////////////// methods about painter ////////////////////////////
/** @private */
installPainterEventHandlers: function(painter)
{
painter.addEventListener('prepareDrawing', this.reactChemObjPrepareDrawing, this);
painter.addEventListener('clear', this.reactChemObjClear, this);
},
/** @private */
reactChemObjPrepareDrawing: function(e)
{
var ctx = e.context;
var obj = e.obj;
if (obj && ((ctx === this.getObjContext()) || (ctx === this.getOperContext())))
{
var renderer = e.target;
this.getObjRendererMap().set(obj, renderer);
//console.log('object drawn', obj, obj.getClassName(), renderer, renderer.getClassName());
// check if renderer should be redirected to oper context
if (this.getEnableOperContext())
{
var operObjs = this._operatingObjs || [];
var needRedirect = false;
for (var i = 0, l = operObjs.length; i < l; ++i)
{
if (this._isChemObjDirectlyRenderedByRenderer(this.getObjContext(), operObjs[i], renderer))
{
needRedirect = true;
break;
}
}
if (needRedirect)
{
this._setRendererToOperContext(renderer);
//console.log('do redirect', renderer.getClassName(), obj && obj.getId && obj.getId());
AU.pushUnique(this.getOperatingRenderers(), renderer);
}
/*
else
{
this._unsetRendererToOperContext(renderer);
console.log('unset redirect', renderer.getClassName(), obj && obj.getId && obj.getId());
}
*/
}
}
},
/** @private */
reactChemObjClear: function(e)
{
var ctx = e.context;
var obj = e.obj;
if (obj && ((ctx === this.getObjContext()) || (ctx === this.getOperContext())))
{
var renderer = e.target;
this.getObjRendererMap().remove(obj);
AU.remove(this.getOperatingRenderers(), renderer);
}
},
//////////////////////////////////////////////////////////////////////
/////////////////// event handlers of nested objects ///////////////////////
/**
* React to change event of loaded chemObj.
* @param {Object} e
*/
reactChemObjChange: function(e)
{
var target = e.target;
var propNames = e.changedPropNames || [];
var bypassPropNames = ['id', 'owner', 'ownedObjs']; // these properties do not affect rendering
propNames = Kekule.ArrayUtils.exclude(propNames, bypassPropNames);
if (propNames.length || !e.changedPropNames) // when changedPropNames is not set, may be change event invoked by parent when suppressing child objects
{
//console.log('chem obj change', target.getClassName(), propNames, e);
this.objectChanged(target, propNames);
}
},
/** @private */
reactOperHistoryPush: function(e)
{
this.invokeEvent('operPush', e);
},
/** @private */
reactOperHistoryPop: function(e)
{
this.invokeEvent('operPop', e);
},
/** @private */
reactOperHistoryUndo: function(e)
{
this.invokeEvent('operUndo', e);
},
/** @private */
reactOperHistoryRedo: function(e)
{
this.invokeEvent('operRedo', e);
},
reactOperHistoryClear: function(e)
{
this.invokeEvent('operHistoryClear', e);
},
reactOperHistoryChange: function(e)
{
this.invokeEvent('operChange', e);
},
/////////////////////////////////////////////////////////////////////////////
///////////// Methods about object changing notification ////////////////////
/**
* Call this method to temporarily suspend object change notification.
*/
beginUpdateObject: function()
{
if (this._objectUpdateFlag >= 0)
{
this.invokeEvent('beginUpdateObject');
}
--this._objectUpdateFlag;
},
/**
* Call this method to indicate the update process is over and objectChanged will be immediately called.
*/
endUpdateObject: function()
{
++this._objectUpdateFlag;
if (!this.isUpdatingObject())
{
if ((this._updatedObjectDetails && this._updatedObjectDetails.length))
{
this.objectsChanged(this._updatedObjectDetails);
this._updatedObjectDetails = [];
}
this._execAfterUpdateObjectProcs();
this.invokeEvent('endUpdateObject'/*, {'details': Object.extend({}, this._updatedObjectDetails)}*/);
}
},
/**
* Check if beginUpdateObject is called and should not send object change notification immediately.
*/
isUpdatingObject: function()
{
return (this._objectUpdateFlag < 0);
},
/**
* Register suspended function called right after endUpdateObject method.
* @private
*/
_registerAfterUpdateObjectProc: function(proc)
{
if (this.isUpdatingObject())
{
if (!this.endUpdateObject.suspendedProcs)
this.endUpdateObject.suspendedProcs = [];
this.endUpdateObject.suspendedProcs.push(proc);
}
else
proc.apply(this);
},
/** @private */
_execAfterUpdateObjectProcs: function()
{
var procs = this.endUpdateObject.suspendedProcs;
if (procs)
{
while (procs.length)
{
var proc = procs.shift();
if (proc)
proc.apply(this);
}
}
},
/** @private */
_mergeObjUpdatedDetails: function(dest, target)
{
for (var i = 0, l = target.length; i < l; ++i)
{
this._mergeObjUpdatedDetailItem(dest, target[i]);
}
},
/** @private */
_mergeObjUpdatedDetailItem: function(dest, targetItem)
{
for (var i = 0, l = dest.length; i < l; ++i)
{
var destItem = dest[i];
// can merge
if (destItem.obj === targetItem.obj)
{
if (!destItem.propNames)
destItem.propNames = [];
if (targetItem.propNames)
Kekule.ArrayUtils.pushUnique(destItem.propNames, targetItem.propNames);
return;
}
}
// can not merge
dest.push(targetItem);
},
/** @private */
_logUpdatedDetail: function(details)
{
var msg = '';
details.forEach(function(d){
msg += 'Obj: ' + d.obj.getId() + '[' + d.obj.getClassName() + '] ';
msg += 'Props: [' + d.propNames.join(', ') + ']';
msg += '\n';
});
console.log(msg);
},
/**
* Notify the object(s) property has been changed and need to be updated.
* @param {Variant} obj An object or a object array.
* @param {Array} changedPropNames
* @private
*/
objectChanged: function(obj, changedPropNames)
{
var data = {'obj': obj, 'propNames': changedPropNames};
//console.log('obj changed', obj.getClassName(), obj.getId(), changedPropNames);
var result = this.objectsChanged(data);
this.invokeEvent('editObjChanged', Object.extend({}, data)); // avoid change data
return result;
},
/**
* Notify the object(s) property has been changed and need to be updated.
* @param {Variant} objDetails An object detail or an object detail array.
* @private
*/
objectsChanged: function(objDetails)
{
var a = DataType.isArrayValue(objDetails)? objDetails: [objDetails];
if (this.isUpdatingObject()) // suspend notification, just push objs in cache
{
//Kekule.ArrayUtils.pushUnique(this._updatedObjectDetails, a);
this._mergeObjUpdatedDetails(this._updatedObjectDetails, a);
//console.log('updating objects, suspending...', this._updatedObjectDetails);
//this._logUpdatedDetail(this._updatedObjectDetails);
}
else
{
//console.log('object changed', objDetails);
var updateObjs = Kekule.Render.UpdateObjUtils._extractObjsOfUpdateObjDetails(a);
this.doObjectsChanged(a, updateObjs);
// IMPORTANT: must do issue check after the doObjectsChanged method (invoking repainting)
this.requestAutoCheckIssuesIfNecessary();
this.invokeEvent('editObjsUpdated', {'details': Object.extend({}, objDetails)});
}
this._objChanged = true; // mark object changed
this.invokeEvent('editObjsChanged', {'details': Object.extend({}, objDetails)});
var selectedObjs = this.getSelection();
if (selectedObjs && updateObjs)
{
var changedSelectedObjs = AU.intersect(selectedObjs, updateObjs);
if (changedSelectedObjs.length)
this.invokeEvent('selectedObjsUpdated', {'objs': changedSelectedObjs});
}
},
/**
* Do actual job of objectsChanged. Descendants should override this method.
* @private
*/
doObjectsChanged: function(objDetails, updateObjs)
{
var oDetails = Kekule.ArrayUtils.clone(objDetails);
if (!updateObjs)
updateObjs = Kekule.Render.UpdateObjUtils._extractObjsOfUpdateObjDetails(oDetails);
//console.log('origin updateObjs', updateObjs);
var additionalObjs = this._getAdditionalRenderRelatedObjs(updateObjs);
// also push related objects into changed objs list
if (additionalObjs.length)
{
var additionalDetails = Kekule.Render.UpdateObjUtils._createUpdateObjDetailsFromObjs(additionalObjs);
Kekule.ArrayUtils.pushUnique(oDetails, additionalDetails);
}
// merge updateObjs and additionalObjs
//updateObjs = updateObjs.concat(additionalObjs);
Kekule.ArrayUtils.pushUnique(updateObjs, additionalObjs);
//console.log('changed objects', updateObjs);
var operRenderers = this._operatingRenderers;
var updateOperContextOnly = operRenderers && this._isAllObjsRenderedByRenderers(this.getObjContext(), updateObjs, operRenderers);
var canDoPartialUpdate = this.canModifyPartialGraphic();
//console.log('update objs and operRenderers', updateObjs, operRenderers);
//console.log('object changed', updateOperContextOnly, canDoPartialUpdate);
if (canDoPartialUpdate) // partial update
{
//var updateObjDetails = Kekule.Render.UpdateObjUtils._createUpdateObjDetailsFromObjs(updateObjs);
this.getRootRenderer().modify(this.getObjContext(),/* updateObjDetails*/oDetails);
// always repaint UI markers
this.recalcUiMarkers();
//console.log('partial update', oDetails);
}
else // update whole context
{
if (updateOperContextOnly)
{
//console.log('repaint oper context only');
this.repaintOperContext();
}
else // need to update whole context
{
//console.log('[repaint whole]');
this.repaint();
/*
var self = this;
(function(){ self.repaint(); }).defer();
*/
}
}
},
/**
* Call this method to indicate a continuous manipulation operation is doing (e.g. moving or rotating objects).
*/
beginManipulateObject: function()
{
//console.log('[BEGIN MANIPULATE]');
//console.log('[Call begin update]', this._objectManipulateFlag);
if (this._objectManipulateFlag >= 0)
{
this.invokeEvent('beginManipulateObject');
}
--this._objectManipulateFlag;
},
/**
* Call this method to indicate the update process is over and objectChanged will be immediately called.
*/
endManipulateObject: function()
{
++this._objectManipulateFlag;
//console.log('[END MANIPULATE]');
if (!this.isManipulatingObject())
{
this._objectManipulateFlag = 0;
this.doManipulationEnd();
this.requestAutoCheckIssuesIfNecessary();
//console.log('[MANIPULATE DONE]');
//this.invokeEvent('endManipulateObject'/*, {'details': Object.extend({}, this._updatedObjectDetails)}*/);
}
},
/**
* Check if beginUpdateObject is called and should not send object change notification immediately.
*/
isManipulatingObject: function()
{
return (this._objectManipulateFlag < 0);
},
/**
* Called when endManipulateObject is called and the object manipulation is really done.
* @private
*/
doManipulationEnd: function()
{
//console.log('[MANIPULATE END]');
this.setOperationsInCurrManipulation([]);
this.invokeEvent('endManipulateObject'/*, {'details': Object.extend({}, this._updatedObjectDetails)}*/);
},
/**
* A combination of method beginUpdateObject/beginManipulateObject.
*/
beginManipulateAndUpdateObject: function()
{
this.beginManipulateObject();
this.beginUpdateObject();
},
/**
* A combination of method endUpdateObject/endManipulateObject.
*/
endManipulateAndUpdateObject: function()
{
this.endUpdateObject();
this.endManipulateObject();
},
/*
* Try apply modification to a series of objects in editor.
* @param {Variant} modificationOrName Modification object or name.
* @param {Variant} targets Target object or objects.
* @returns {Kekule.Operation} operation actually done.
*/
/*
applyModification: function(modificationOrName, targets)
{
var objs = AU.toArray(targets);
var modification = (typeof(modificationOrName) === 'string')? Kekule.Editor.ChemObjModificationManager.getModification(modificationOrName): modificationOrName;
if (objs.length && modification)
{
var opers = [];
for (var i = 0, l = objs.length; i < l; ++i)
{
if (modification.match(objs[i], this))
{
var oper = modification.createOperation(objs[i], this);
if (oper)
opers.push(oper);
}
}
}
if (opers.length)
{
var finalOper = (opers.length === 1) ? opers[0] : new Kekule.MacroOperation(opers);
this.execOperation(finalOper);
return finalOper;
}
else
return null;
},
*/
/** @private */
_needToCanonicalizeBeforeSaving: function()
{
return true; // !!this.getStandardizeObjectsBeforeSaving();
},
/** @private */
_getAdditionalRenderRelatedObjs: function(objs)
{
var result = [];
for (var i = 0, l = objs.length; i < l; ++i)
{
var obj = objs[i];
//Kekule.ArrayUtils.pushUnique(result, obj);
var relatedObjs = obj.getCoordDeterminateObjects? obj.getCoordDeterminateObjects(): [];
//console.log('obj', obj.getClassName(), 'related', relatedObjs);
Kekule.ArrayUtils.pushUnique(result, relatedObjs);
}
return result;
},
/** @private */
_isAllObjsRenderedByRenderers: function(context, objs, renders)
{
//console.log('check objs by renderers', objs, renders);
for (var i = 0, l = objs.length; i < l; ++i)
{
var obj = objs[i];
var isRendered = false;
for (var j = 0, k = renders.length; j < k; ++j)
{
var renderer = renders[j];
if (renderer.isChemObjRenderedBySelf(context, obj))
{
isRendered = true;
break;
}
}
if (!isRendered)
return false;
}
return true;
},
/////////////////////////////////////////////////////////////////////////////
////////////// Method about operContext rendering ///////////////////////////
/**
* Prepare to do a modification work in editor (e.g., move some atoms).
* The objs to be modified will be rendered in operContext separately (if enableOperContext is true).
* @param {Array} objs
*/
prepareOperatingObjs: function(objs)
{
// Check if already has old operating renderers. If true, just end them.
if (this._operatingRenderers && this._operatingRenderers.length)
this.endOperatingObjs(true);
if (this.getEnableOperContext())
{
// prepare operating renderers
this._prepareRenderObjsInOperContext(objs);
this._operatingObjs = objs;
//console.log('oper objs', this._operatingObjs);
//console.log('oper renderers', this._operatingRenderers);
}
// finally force repaint the whole client area, both objContext and operContext
this.repaint();
},
/**
* Modification work in editor (e.g., move some atoms) is done.
* The objs to be modified will be rendered back into objContext.
* @param {Bool} noRepaint
*/
endOperatingObjs: function(noRepaint)
{
// notify to render all objs in main context
if (this.getEnableOperContext())
{
this._endRenderObjsInOperContext();
this._operatingObjs = null;
if (!noRepaint)
{
//console.log('end operation objs');
this.repaint();
}
}
},
/** @private */
_isChemObjDirectlyRenderedByRenderer: function(context, obj, renderer)
{
var standaloneObj = obj.getStandaloneAncestor? obj.getStandaloneAncestor(): obj;
return renderer.isChemObjRenderedDirectlyBySelf(context, standaloneObj);
},
/** @private */
_getStandaloneRenderObjsInOperContext: function(objs)
{
var standAloneObjs = [];
for (var i = 0, l = objs.length; i < l; ++i)
{
var obj = objs[i];
if (obj.getStandaloneAncestor)
obj = obj.getStandaloneAncestor();
Kekule.ArrayUtils.pushUnique(standAloneObjs, obj);
}
return standAloneObjs;
},
/** @private */
_prepareRenderObjsInOperContext: function(objs)
{
//console.log('redirect objs', objs);
var renderers = [];
var map = this.getObjRendererMap();
var rs = map.getValues();
var context = this.getObjContext();
var standAloneObjs = this._getStandaloneRenderObjsInOperContext(objs);
/*
if (standAloneObjs.length)
console.log(standAloneObjs[0].getId(), standAloneObjs);
else
console.log('(no standalone)');
*/
//for (var i = 0, l = objs.length; i < l; ++i)
for (var i = 0, l = standAloneObjs.length; i < l; ++i)
{
//var obj = objs[i];
var obj = standAloneObjs[i];
for (var j = 0, k = rs.length; j < k; ++j)
{
var renderer = rs[j];
//if (renderer.isChemObjRenderedBySelf(context, obj))
if (renderer.isChemObjRenderedDirectlyBySelf(context, obj))
{
//console.log('direct rendered by', obj.getClassName(), renderer.getClassName());
Kekule.ArrayUtils.pushUnique(renderers, renderer);
}
/*
if (parentFragment && renderer.isChemObjRenderedDirectlyBySelf(context, parentFragment))
Kekule.ArrayUtils.pushUnique(renderers, renderer); // when modify node or connector, mol will also be changed
*/
}
/*
var renderer = map.get(objs[i]);
if (renderer)
Kekule.ArrayUtils.pushUnique(renderers, renderer);
*/
}
//console.log('oper renderers', renderers);
if (renderers.length > 0)
{
for (var i = 0, l = renderers.length; i < l; ++i)
{
var renderer = renderers[i];
this._setRendererToOperContext(renderer);
//console.log('begin context redirect', renderer.getClassName());
//console.log(renderer.getRedirectContext());
}
this._operatingRenderers = renderers;
}
else
this._operatingRenderers = null;
//console.log('<total renderer count>', rs.length, '<redirected>', renderers.length);
/*
if (renderers.length)
console.log('redirected obj 0: ', renderers[0].getChemObj().getId());
*/
},
/** @private */
_endRenderObjsInOperContext: function()
{
var renderers = this._operatingRenderers;
if (renderers && renderers.length)
{
for (var i = 0, l = renderers.length; i < l; ++i)
{
var renderer = renderers[i];
//renderer.setRedirectContext(null);
this._unsetRendererToOperContext(renderer);
//console.log('end context redirect', renderer.getClassName());
}
this.clearOperContext();
}
this._operatingRenderers = null;
},
/** @private */
_setRendererToOperContext: function(renderer)
{
renderer.setRedirectContext(this.getOperContext());
},
/** @private */
_unsetRendererToOperContext: function(renderer)
{
renderer.setRedirectContext(null);
},
/////////////////////////////////////////////////////////////////////////////
//////////////////////// methods about bound maps ////////////////////////////
/**
* Returns bound inflation for interaction with a certain pointer device (mouse, touch, etc.)
* @param {String} pointerType
*/
getInteractionBoundInflation: function(pointerType)
{
var cache = this._lengthCaches.interactionBoundInflations;
var cacheKey = pointerType || 'default';
if (cache)
{
if (cache[cacheKey])
{
//console.log('cached!')
return cache[cacheKey];
}
}
// no cache, calculate
var iaConfigs = this.getEditorConfigs().getInteractionConfigs();
var defRatioPropName = 'objBoundTrackInflationRatio';
var typedRatio, defRatio = iaConfigs.getPropValue(defRatioPropName);
if (pointerType)
{
var sPointerType = pointerType.upperFirst();
var typedRatioPropName = defRatioPropName + sPointerType;
if (iaConfigs.hasProperty(typedRatioPropName))
typedRatio = iaConfigs.getPropValue(typedRatioPropName);
}
var actualRatio = typedRatio || defRatio;
var ratioValue = actualRatio && this.getDefBondScreenLength() * actualRatio;
var minValuePropName = 'objBoundTrackMinInflation';
var typedMinValue, defMinValue = iaConfigs.getPropValue(minValuePropName);
if (pointerType)
{
var typedMinValuePropName = minValuePropName + sPointerType;
if (iaConfigs.hasProperty(typedMinValuePropName))
typedMinValue = iaConfigs.getPropValue(typedMinValuePropName);
}
var actualMinValue = typedMinValue || defMinValue;
var actualValue = Math.max(ratioValue || 0, actualMinValue);
// stores to cache
if (!cache)
{
cache = {};
this._lengthCaches.interactionBoundInflations = cache;
}
cache[cacheKey] = actualValue;
//console.log('to cache');
return actualValue;
},
/**
* Returns all bound map item at x/y.
* Input coord is based on the screen coord system.
* @returns {Array}
* @private
*/
getBoundInfosAtCoord: function(screenCoord, filterFunc, boundInflation)
{
/*
if (!boundInflation)
throw 'boundInflation not set!';
*/
var delta = boundInflation || this.getCurrBoundInflation() || this.getEditorConfigs().getInteractionConfigs().getObjBoundTrackMinInflation();
return this.tryApplySuper('getBoundInfosAtCoord', [screenCoord, filterFunc, delta]);
/*
var boundRecorder = this.getBoundInfoRecorder();
var delta = boundInflation || this.getCurrBoundInflation() || this.getEditorConfigs().getInteractionConfigs().getObjBoundTrackMinInflation();
//var coord = this.getObjDrawBridge().transformScreenCoordToContext(this.getObjContext(), screenCoord);
var coord = this.screenCoordToContext(screenCoord);
var refCoord = (this.getRenderType() === Kekule.Render.RendererType.R3D)? {'x': 0, 'y': 0}: null;
//console.log(coord, delta);
var matchedInfos = boundRecorder.getIntersectionInfos(this.getObjContext(), coord, refCoord, delta, filterFunc);
return matchedInfos;
*/
},
/**
* returns the topmost bound map item at x/y.
* Input coord is based on the screen coord system.
* @param {Hash} screenCoord
* @param {Array} excludeObjs Objects in this array will not be returned.
* @returns {Object}
*/
getTopmostBoundInfoAtCoord: function(screenCoord, excludeObjs, boundInflation)
{
var enableTrackNearest = this.getEditorConfigs().getInteractionConfigs().getEnableTrackOnNearest();
if (!enableTrackNearest)
//return this.findTopmostBoundInfo(this.getBoundInfosAtCoord(screenCoord, null, boundInflation), excludeObjs, boundInflation);
return this.tryApplySuper('getTopmostBoundInfoAtCoord', [screenCoord, excludeObjs, boundInflation]);
// else, track on nearest
// new approach, find nearest boundInfo at coord
var SU = Kekule.Render.MetaShapeUtils;
var boundInfos = this.getBoundInfosAtCoord(screenCoord, null, boundInflation);
//var filteredBoundInfos = [];
var result, lastShapeInfo, lastDistance;
var setResult = function(boundInfo, shapeInfo, distance)
{
result = boundInfo;
lastShapeInfo = shapeInfo || boundInfo.boundInfo;
if (Kekule.ObjUtils.notUnset(distance))
lastDistance = distance;
else
lastDistance = SU.getDistance(screenCoord, lastShapeInfo);
};
for (var i = boundInfos.length - 1; i >= 0; --i)
{
var info = boundInfos[i];
if (excludeObjs && (excludeObjs.indexOf(info.obj) >= 0))
continue;
if (!result)
setResult(info);
else
{
var shapeInfo = info.boundInfo;
if (shapeInfo.shapeType < lastShapeInfo.shapeType)
setResult(info, shapeInfo);
else if (shapeInfo.shapeType === lastShapeInfo.shapeType)
{
var currDistance = SU.getDistance(screenCoord, shapeInfo);
if (currDistance < lastDistance)
{
//console.log('distanceCompare', currDistance, lastDistance);
setResult(info, shapeInfo, currDistance);
}
}
}
}
return result;
},
/**
* Returns all basic drawn object at coord (with inflation) based on screen system.
* @params {Hash} screenCoord
* @param {Number} boundInflation
* @param {Array} excludeObjs
* @param {Array} filterObjClasses If this param is set, only obj match these types will be returned
* @returns {Array}
* @private
*/
getBasicObjectsAtCoord: function(screenCoord, boundInflation, excludeObjs, filterObjClasses)
{
var boundInfos = this.getBoundInfosAtCoord(screenCoord, null, boundInflation);
var result = [];
if (boundInfos)
{
if (excludeObjs)
{
boundInfos = AU.filter(boundInfos, function(boundInfo){
var obj = boundInfos[i].obj;
if (!obj)
return false;
if (obj)
{
if (excludeObjs && excludeObjs.indexOf(obj) >= 0)
return false;
}
return true;
});
}
// the coord sticked obj should be firstly selected for unstick operation, even it is under the back layer
var _getStickLevel = function(obj){
var stickTarget = obj && obj.getCoordStickTarget && obj.getCoordStickTarget();
return stickTarget? 1: 0;
};
boundInfos.sort(function(b1, b2){
var stickLevel1 = _getStickLevel(b1.obj);
var stickLevel2 = _getStickLevel(b2.obj);
return (stickLevel1 - stickLevel2);
});
var enableTrackNearest = this.getEditorConfigs().getInteractionConfigs().getEnableTrackOnNearest();
if (enableTrackNearest) // sort by bound distances to screenCoord
{
var SU = Kekule.Render.MetaShapeUtils;
boundInfos.sort(function(b1, b2){ // the topmost boundinfo at tail
var result = 0;
var shapeInfo1 = b1.boundInfo;
var shapeInfo2 = b2.boundInfo;
result = -(shapeInfo1.shapeType - shapeInfo2.shapeType);
if (!result)
{
var d1 = SU.getDistance(screenCoord, shapeInfo1);
var d2 = SU.getDistance(screenCoord, shapeInfo2);
result = -(d1 - d2);
}
return result;
});
}
for (var i = boundInfos.length - 1; i >= 0; --i)
{
var obj = boundInfos[i].obj;
if (obj)
{
if (excludeObjs && excludeObjs.indexOf(obj) >= 0)
continue;
}
result.push(obj);
}
if (result && filterObjClasses) // filter
{
result = AU.filter(result, function(obj)
{
for (var i = 0, l = filterObjClasses.length; i < l; ++i)
{
if (obj instanceof filterObjClasses[i])
return true;
}
return false;
});
}
}
return result;
},
/**
* Returns the topmost basic drawn object at coord based on screen system.
* @params {Hash} screenCoord
* @param {Number} boundInflation
* @param {Array} filterObjClasses If this param is set, only obj match these types will be returned
* @returns {Object}
* @private
*/
getTopmostBasicObjectAtCoord: function(screenCoord, boundInflation, filterObjClasses)
{
/*
var boundItem = this.getTopmostBoundInfoAtCoord(screenCoord, null, boundInflation);
return boundItem? boundItem.obj: null;
*/
var objs = this.getBasicObjectsAtCoord(screenCoord, boundInflation, null, filterObjClasses);
return objs && objs[0];
},
/**
* Returns geometry bounds of a obj in editor.
* @param {Kekule.ChemObject} obj
* @param {Number} boundInflation
* @returns {Array}
*/
getChemObjBounds: function(obj, boundInflation)
{
var bounds = [];
var infos = this.getBoundInfoRecorder().getBelongedInfos(this.getObjContext(), obj);
if (infos && infos.length)
{
for (var j = 0, k = infos.length; j < k; ++j)
{
var info = infos[j];
var bound = info.boundInfo;
if (bound)
{
// inflate
bound = Kekule.Render.MetaShapeUtils.inflateShape(bound, boundInflation);
bounds.push(bound);
}
}
}
return bounds;
},
//////////////////// methods about UI markers ///////////////////////////////
/**
* Returns the ui markers at screen coord.
* @param {Hash} screenCoord
* @param {Float} boundInflation
* @param {Array} filterClasses
* @returns {Array}
*/
getUiMarkersAtCoord: function(screenCoord, boundInflation, filterClasses)
{
var markers = this.getUiMarkers();
var filterFunc = (filterClasses && filterClasses.length)? function(marker) {
for (var i = 0, l = filterClasses.length; i < l; ++i)
{
if (marker instanceof filterClasses[i])
return true;
}
return false;
}: null;
var SU = Kekule.Render.MetaShapeUtils;
var result = [];
for (var i = markers.getMarkerCount() - 1; i >= 0; --i)
{
var marker = markers.getMarkerAt(i);
if (marker.getVisible())
{
if (!filterFunc || filterFunc(marker))
{
var shapeInfo = marker.shapeInfo;
if (SU.isCoordInside(screenCoord, shapeInfo, boundInflation))
result.push(marker);
}
}
}
return result;
},
/**
* Notify that currently is modifing UI markers and the editor need not to repaint them.
*/
beginUpdateUiMarkers: function()
{
--this._uiMarkerUpdateFlag;
},
/**
* Call this method to indicate the UI marker update process is over and should be immediately updated.
*/
endUpdateUiMarkers: function()
{
++this._uiMarkerUpdateFlag;
if (!this.isUpdatingUiMarkers())
this.repaintUiMarker();
},
/** Check if the editor is under continuous UI marker update. */
isUpdatingUiMarkers: function()
{
return (this._uiMarkerUpdateFlag < 0);
},
/**
* Called when transform has been made to objects and UI markers need to be modified according to it.
* The UI markers will also be repainted.
* @private
*/
recalcUiMarkers: function()
{
//this.setHotTrackedObj(null);
if (this.getUiDrawBridge())
{
this.beginUpdateUiMarkers();
try
{
this.recalcHotTrackMarker();
this.recalcSelectionAreaMarker();
this.recalcIssueCheckUiMarkers();
} finally
{
this.endUpdateUiMarkers();
}
}
},
/** @private */
repaintUiMarker: function()
{
if (this.isUpdatingUiMarkers())
return;
if (this.getUiDrawBridge() && this.getUiContext())
{
this.clearUiContext();
var drawParams = this.calcDrawParams();
this.getUiPainter().draw(this.getUiContext(), drawParams.baseCoord, drawParams.drawOptions);
}
},
/**
* Create a new marker based on shapeInfo.
* @private
*/
createShapeBasedMarker: function(markerPropName, shapeInfo, drawStyles, updateRenderer)
{
var marker = new Kekule.ChemWidget.MetaShapeUIMarker();
if (shapeInfo)
marker.setShapeInfo(shapeInfo);
if (drawStyles)
marker.setDrawStyles(drawStyles);
this.setPropStoreFieldValue(markerPropName, marker);
this.getUiMarkers().addMarker(marker);
if (updateRenderer)
{
//var updateType = Kekule.Render.ObjectUpdateType.ADD;
//this.getUiRenderer().update(this.getUiContext(), this.getUiMarkers(), marker, updateType);
this.repaintUiMarker();
}
return marker;
},
/**
* Change the shape info of a meta shape based marker, or create a new marker based on shape info.
* @private
*/
modifyShapeBasedMarker: function(marker, newShapeInfo, drawStyles, updateRenderer)
{
var updateType = Kekule.Render.ObjectUpdateType.MODIFY;
if (newShapeInfo)
marker.setShapeInfo(newShapeInfo);
if (drawStyles)
marker.setDrawStyles(drawStyles);
// notify change and update renderer
if (updateRenderer)
{
//this.getUiPainter().redraw();
//this.getUiRenderer().update(this.getUiContext(), this.getUiMarkers(), marker, updateType);
this.repaintUiMarker();
}
},
/**
* Hide a UI marker.
* @param marker
*/
hideUiMarker: function(marker, updateRenderer)
{
marker.setVisible(false);
// notify change and update renderer
if (updateRenderer)
{
//this.getUiRenderer().update(this.getUiContext(), this.getUiMarkers(), marker, Kekule.Render.ObjectUpdateType.MODIFY);
this.repaintUiMarker();
}
},
/**
* Show an UI marker.
* @param marker
* @param updateRenderer
*/
showUiMarker: function(marker, updateRenderer)
{
marker.setVisible(true);
if (updateRenderer)
{
//this.getUiRenderer().update(this.getUiContext(), this.getUiMarkers(), marker, Kekule.Render.ObjectUpdateType.MODIFY);
this.repaintUiMarker();
}
},
/**
* Remove a marker from collection.
* @private
*/
removeUiMarker: function(marker)
{
if (marker)
{
this.getUiMarkers().removeMarker(marker);
//this.getUiRenderer().update(this.getUiContext(), this.getUiMarkers(), marker, Kekule.Render.ObjectUpdateType.REMOVE);
this.repaintUiMarker();
}
},
/**
* Clear all UI markers.
* @private
*/
clearUiMarkers: function()
{
this.getUiMarkers().clearMarkers();
//this.getUiRenderer().redraw(this.getUiContext());
//this.redraw();
this.repaintUiMarker();
},
/**
* Modify hot track marker to bind to newBoundInfos.
* @private
*/
changeHotTrackMarkerBounds: function(newBoundInfos)
{
var infos = Kekule.ArrayUtils.toArray(newBoundInfos);
//var updateType = Kekule.Render.ObjectUpdateType.MODIFY;
var styleConfigs = this.getEditorConfigs().getUiMarkerConfigs();
var drawStyles = {
'color': styleConfigs.getHotTrackerColor(),
'opacity': styleConfigs.getHotTrackerOpacity()
};
var inflation = this.getCurrBoundInflation() || this.getEditorConfigs().getInteractionConfigs().getObjBoundTrackMinInflation();
var bounds = [];
for (var i = 0, l = infos.length; i < l; ++i)
{
var boundInfo = infos[i];
var bound = inflation? Kekule.Render.MetaShapeUtils.inflateShape(boundInfo, inflation): boundInfo;
//console.log('inflate', bound);
if (bound)
bounds.push(bound);
}
var tracker = this.getUiHotTrackMarker();
//console.log('change hot track', bound, drawStyles);
tracker.setVisible(true);
this.modifyShapeBasedMarker(tracker, bounds, drawStyles, true);
return this;
},
/**
* Hide hot track marker.
* @private
*/
hideHotTrackMarker: function()
{
var tracker = this.getUiHotTrackMarker();
if (tracker)
{
this.hideUiMarker(tracker, true);
}
return this;
},
/**
* Show hot track marker.
* @private
*/
showHotTrackMarker: function()
{
var tracker = this.getUiHotTrackMarker();
if (tracker)
{
this.showUiMarker(tracker, true);
}
return this;
},
/////////////////////////////////////////////////////////////////////////////
// methods about hot track marker
/**
* Try hot track object on coord.
* @param {Hash} screenCoord Coord based on screen system.
*/
hotTrackOnCoord: function(screenCoord)
{
if (this.getEditorConfigs().getInteractionConfigs().getEnableHotTrack())
{
/*
var boundItem = this.getTopmostBoundInfoAtCoord(screenCoord);
if (boundItem) // mouse move into object
{
var obj = boundItem.obj;
if (obj)
this.setHotTrackedObj(obj);
//this.changeHotTrackMarkerBound(boundItem.boundInfo);
}
else // mouse move out from object
{
this.setHotTrackedObj(null);
}
*/
//console.log('hot track here');
this.setHotTrackedObj(this.getTopmostBasicObjectAtCoord(screenCoord, this.getCurrBoundInflation()));
}
return this;
},
/**
* Hot try on a basic drawn object.
* @param {Object} obj
*/
hotTrackOnObj: function(obj)
{
this.setHotTrackedObj(obj);
return this;
},
/**
* Remove all hot track markers.
* @param {Bool} doNotClearHotTrackedObjs If false, the hotTrackedObjs property will also be set to empty.
*/
hideHotTrack: function(doNotClearHotTrackedObjs)
{
this.hideHotTrackMarker();
if (!doNotClearHotTrackedObjs)
this.clearHotTrackedObjs();
return this;
},
/**
* Set hot tracked objects to empty.
*/
clearHotTrackedObjs: function()
{
this.setHotTrackedObjs([]);
},
/**
* Add a obj to hot tracked objects.
* @param {Object} obj
*/
addHotTrackedObj: function(obj)
{
var olds = this.getHotTrackedObjs() || [];
Kekule.ArrayUtils.pushUnique(olds, obj);
this.setHotTrackedObjs(olds);
return this;
},
/** @private */
recalcHotTrackMarker: function()
{
this.setHotTrackedObjs(this.getHotTrackedObjs());
},
// method about chem object hint
/**
* Update the hint of chem object.
* This method may be called when hovering on some basic objects
* @param {String} hint
* @private
*/
updateHintForChemObject: function(hint)
{
var elem = this.getEditClientElem();
if (elem)
{
var sHint = hint || '';
if (sHint && this.updateHintForChemObject._cache == sHint) // same as last non-empty hint, must explicitly change hint a little, otherwise the hint may not be displayed in client area
{
sHint += '\f'; // add a hidden char
}
elem.title = sHint;
if (sHint)
this.updateHintForChemObject._cache = sHint;
}
},
/**
* Returns the hint value from a chem object.
* Descendants may override this method.
* @param {Kekule.ChemObject} obj
* @returns {String}
*/
getChemObjHint: function(obj)
{
return null;
},
/**
* Returns the hint value from a set of chem objects.
* Descendants may override this method.
* @param {Kekule.ChemObject} obj
* @returns {String}
*/
getChemObjsHint: function(objs)
{
var hints = [];
for (var i = 0, l = objs.length; i < l; ++i)
{
var hint = this.getChemObjHint(objs[i]);
if (hint)
{
// hints.push(hint);
AU.pushUnique(hints, hint); // avoid show duplicated hint texts
}
}
// issue hints
var issueHints = this._getChemObjsRelatedIssuesHints(objs);
if (issueHints.length)
{
//hints = hints.concat(issueHints);
AU.pushUnique(hints, issueHints); // avoid show duplicated hint texts
}
return hints.length? hints.join('\n'): null;
},
/** @private */
_getChemObjsRelatedIssuesHints: function(objs)
{
var result = [];
if (this.getEnableIssueMarkerHint() && this.getEnableIssueCheck())
{
var issueItems = this.getObjectsRelatedIssueItems(objs, true) || [];
for (var i = 0, l = issueItems.length; i < l; ++i)
{
var msg = issueItems[i].getMessage();
AU.pushUnique(result, msg); // avoid show duplicated hint texts
}
}
return result;
},
// methods about issue markers
/** @private */
issueCheckResultsChanged: function()
{
this.recalcIssueCheckUiMarkers();
var activeIssueResult = this.getActiveIssueCheckResult();
if (activeIssueResult && this.getEnableAutoScrollToActiveIssue()) // need to auto scroll to active issue objects?
{
var targets = activeIssueResult.getTargets();
if (targets && targets.length)
{
var objs = this._getExposedSelfOrParentObjs(targets);
if (objs && objs.length)
{
var interState = this.getObjectsBoxAndClientBoxRelation(objs);
if (interState !== Kekule.IntersectionState.CONTAINED)
this.scrollClientToObject(objs);
}
}
}
},
/** @private */
_needShowInactiveIssueMarkers: function()
{
var result = this.getEnableIssueCheck() && this.getShowAllIssueMarkers();
return result;
},
/** @private */
recalcIssueCheckUiMarkers: function()
{
if (!this.getChemObjLoaded()) // if no content in editor, just bypass
return;
this.beginUpdateUiMarkers();
try
{
var checkResults = this.getIssueCheckResults() || [];
if (!checkResults.length || !this.getEnableIssueCheck())
this.hideIssueCheckUiMarkers();
else
{
var showAll = this._needShowInactiveIssueMarkers();
var activeResult = this.getActiveIssueCheckResult();
// hide or show active marker
this.getActiveIssueCheckUiMarker().setVisible(!!activeResult);
var issueObjGroup = this._groupUpIssueObjects(checkResults, activeResult, true); // only returns exposed objs in editor
//var boundGroup = {};
var inflation = this.getEditorConfigs().getInteractionConfigs().getSelectionMarkerInflation();
// TODO: the inflation of issue markers are now borrowed from selection markers
var levels = Kekule.ErrorLevel.getAllLevels();
var groupKeys = levels.concat('active'); // Kekule.ObjUtils.getOwnedFieldNames(issueObjGroup);
for (var i = 0, ii = groupKeys.length; i < ii; ++i)
{
var currBounds = [];
var key = groupKeys[i];
var group = issueObjGroup[key];
var errorLevel = group? group.level: key;
if (group)
{
var isActive = key === 'active';
if (isActive || showAll) // when showAll is not true, bypass all inactive issue markers
{
var objs = group.objs;
for (var j = 0, jj = objs.length; j < jj; ++j)
{
var obj = objs[j];
var infos = this.getBoundInfoRecorder().getBelongedInfos(this.getObjContext(), obj);
for (var k = 0, kk = infos.length; k < kk; ++k)
{
var info = infos[k];
var bound = info.boundInfo;
if (bound)
{
// inflate
bound = Kekule.Render.MetaShapeUtils.inflateShape(bound, inflation);
currBounds.push(bound);
}
}
}
}
}
this.changeIssueCheckUiMarkerBound(errorLevel, currBounds, isActive);
}
}
}
finally
{
this.endUpdateUiMarkers();
}
},
/** @private */
changeIssueCheckUiMarkerBound: function(issueLevel, bounds, isActive)
{
var marker = isActive? this.getActiveIssueCheckUiMarker(): this.getIssueCheckUiMarker(issueLevel);
if (marker)
{
//console.log(issueLevel, marker, bounds.length);
if (bounds && bounds.length)
{
var levelName = Kekule.ErrorLevel.levelToString(issueLevel);
var configs = this.getEditorConfigs().getUiMarkerConfigs();
var styleConfigs = configs.getIssueCheckMarkerColors()[levelName];
var drawStyles = {
'strokeColor': isActive ? styleConfigs.activeStrokeColor : styleConfigs.strokeColor,
'strokeWidth': isActive ? configs.getIssueCheckActiveMarkerStrokeWidth() : configs.getIssueCheckMarkerStrokeWidth(),
'fillColor': isActive ? styleConfigs.activeFillColor : styleConfigs.fillColor,
'opacity': isActive ? configs.getIssueCheckActiveMarkerOpacity() : configs.getIssueCheckMarkerOpacity()
};
//console.log(drawStyles);
marker.setVisible(true);
this.modifyShapeBasedMarker(marker, bounds, drawStyles, true);
}
else
marker.setVisible(false);
}
return this;
},
/** @private */
_getExposedSelfOrParentObjs: function(objs)
{
var result = [];
for (var i = 0, l = objs.length; i < l; ++i)
{
var obj = objs[i];
if (!obj.isExposed || obj.isExposed())
result.push(obj);
else if (obj.getExposedAncestor)
{
var ancestor = obj.getExposedAncestor();
if (ancestor)
result.push(ancestor);
}
}
return result;
},
/** @private */
_groupUpIssueObjects: function(checkResults, activeResult, requireExposing)
{
var result = {};
for (var i = 0, l = checkResults.length; i < l; ++i)
{
var r = checkResults[i];
var objs = r.getTargets();
var level = r.getLevel();
if (activeResult && r === activeResult)
{
result['active'] = {
'level': level,
'objs': requireExposing? this._getExposedSelfOrParentObjs(objs): objs
};
}
else
{
if (!result[level])
result[level] = {'level': level, 'objs': []};
result[level].objs = result[level].objs.concat(requireExposing ? this._getExposedSelfOrParentObjs(objs) : objs);
}
}
// different error levels have different priorities, a object in higher level need not to be marked in lower level
var EL = Kekule.ErrorLevel;
var priorities = [EL.LOG, EL.NOTE, EL.WARNING, EL.ERROR, 'active'];
for (var i = 0, l = priorities.length; i < l; ++i)
{
var currPriotyObjs = result[i] && result[i].objs;
if (currPriotyObjs)
{
for (var j = i + 1, k = priorities.length; j < k; ++j)
{
var highPriotyObjs = result[j] && result[j].objs;
if (highPriotyObjs)
{
var filteredObjs = AU.exclude(currPriotyObjs, highPriotyObjs);
result[i].objs = filteredObjs;
}
}
}
}
return result;
},
/** @private */
hideIssueCheckUiMarkers: function()
{
var markers = this.getAllIssueCheckUiMarkers() || [];
for (var i = 0, l = markers.length; i < l; ++i)
this.hideUiMarker(markers[i]);
},
/**
* Check if obj (or its ancestor) is (one of) the targets of issueResult.
* @param {Kekule.ChemObject} obj
* @param {Kekule.IssueCheck.CheckResult} issueResult
* @param {Bool} checkAncestors
* @returns {Bool}
*/
isObjectRelatedToIssue: function(obj, issueResult, checkAncestors)
{
var targets = issueResult.getTargets() || [];
if (targets.indexOf(obj) >= 0)
return true;
else if (checkAncestors)
{
var parent = obj.getParent();
return parent? this.isObjectRelatedToIssue(parent, issueResult, checkAncestors): false;
}
},
/**
* Returns the issue items related to obj.
* @param {Kekule.ChemObject} obj
* @param {Bool} checkAncestors
* @returns {Array}
*/
getObjectIssueItems: function(obj, checkAncestors)
{
var checkResults = this.getIssueCheckResults() || [];
if (!checkResults.length || !this.getEnableIssueCheck())
return [];
else
{
var result = [];
for (var i = 0, l = checkResults.length; i < l; ++i)
{
var item = checkResults[i];
if (this.isObjectRelatedToIssue(obj, item, checkAncestors))
result.push(item);
}
return result;
}
},
/**
* Returns the issue items related to a series of objects.
* @param {Array} objs
* @param {Bool} checkAncestors
* @returns {Array}
*/
getObjectsRelatedIssueItems: function(objs, checkAncestors)
{
var checkResults = this.getIssueCheckResults() || [];
if (!checkResults.length || !this.getEnableIssueCheck())
return [];
var result = [];
// build the while object set that need to check issue items
var totalObjs = [].concat(objs);
if (checkAncestors)
{
for (var i = 0, l = objs.length; i < l; ++i)
{
AU.pushUnique(totalObjs, objs[i].getParentList());
}
}
for (var i = 0, l = totalObjs.length; i < l; ++i)
{
var issueItems = this.getObjectIssueItems(totalObjs[i], false);
AU.pushUnique(result, issueItems);
}
return result;
},
// methods about selecting marker
/**
* Modify hot track marker to bind to newBoundInfo.
* @private
*/
changeSelectionAreaMarkerBound: function(newBoundInfo, drawStyles)
{
var styleConfigs = this.getEditorConfigs().getUiMarkerConfigs();
if (!drawStyles)
drawStyles = {
'strokeColor': styleConfigs.getSelectionMarkerStrokeColor(),
'strokeWidth': styleConfigs.getSelectionMarkerStrokeWidth(),
'fillColor': styleConfigs.getSelectionMarkerFillColor(),
'opacity': styleConfigs.getSelectionMarkerOpacity()
};
//console.log(drawStyles);
var marker = this.getUiSelectionAreaMarker();
if (marker)
{
marker.setVisible(true);
this.modifyShapeBasedMarker(marker, newBoundInfo, drawStyles, true);
}
return this;
},
/** @private */
hideSelectionAreaMarker: function()
{
var marker = this.getUiSelectionAreaMarker();
if (marker)
{
this.hideUiMarker(marker, true);
}
},
/** @private */
showSelectionAreaMarker: function()
{
var marker = this.getUiSelectionAreaMarker();
if (marker)
{
this.showUiMarker(marker, true);
}
},
/**
* Recalculate and repaint selection marker.
* @private
*/
recalcSelectionAreaMarker: function(doRepaint)
{
this.beginUpdateUiMarkers();
try
{
// debug
var selection = this.getSelection();
var count = selection.length;
if (count <= 0)
this.hideSelectionAreaMarker();
else
{
var bounds = [];
var containerBox = null;
var inflation = this.getEditorConfigs().getInteractionConfigs().getSelectionMarkerInflation();
for (var i = 0; i < count; ++i)
{
var obj = selection[i];
var infos = this.getBoundInfoRecorder().getBelongedInfos(this.getObjContext(), obj);
if (infos && infos.length)
{
for (var j = 0, k = infos.length; j < k; ++j)
{
var info = infos[j];
var bound = info.boundInfo;
if (bound)
{
// inflate
bound = Kekule.Render.MetaShapeUtils.inflateShape(bound, inflation);
bounds.push(bound);
var box = Kekule.Render.MetaShapeUtils.getContainerBox(bound);
containerBox = containerBox? Kekule.BoxUtils.getContainerBox(containerBox, box): box;
}
}
}
}
//var containerBox = this.getSelectionContainerBox(inflation);
this.setUiSelectionAreaContainerBox(containerBox);
// container box
if (containerBox)
{
var containerShape = Kekule.Render.MetaShapeUtils.createShapeInfo(
Kekule.Render.MetaShapeType.RECT,
[{'x': containerBox.x1, 'y': containerBox.y1}, {'x': containerBox.x2, 'y': containerBox.y2}]
);
bounds.push(containerShape);
}
else // containerBox disappear, may be a node or connector merge, hide selection area
this.hideSelectionAreaMarker();
//console.log(bounds.length, bounds);
if (bounds.length)
this.changeSelectionAreaMarkerBound(bounds);
}
}
finally
{
this.endUpdateUiMarkers();
}
},
/** @private */
_highlightSelectionAreaMarker: function()
{
var styleConfigs = this.getEditorConfigs().getUiMarkerConfigs();
var highlightStyles = {
'strokeColor': styleConfigs.getSelectionMarkerStrokeColor(),
'strokeWidth': styleConfigs.getSelectionMarkerStrokeWidth(),
'fillColor': styleConfigs.getSelectionMarkerFillColor(),
'opacity': styleConfigs.getSelectionMarkerEmphasisOpacity()
};
this.changeSelectionAreaMarkerBound(null, highlightStyles); // change draw styles without the modification of bound
},
/** @private */
_restoreSelectionAreaMarker: function()
{
var styleConfigs = this.getEditorConfigs().getUiMarkerConfigs();
var highlightStyles = {
'strokeColor': styleConfigs.getSelectionMarkerStrokeColor(),
'strokeWidth': styleConfigs.getSelectionMarkerStrokeWidth(),
'fillColor': styleConfigs.getSelectionMarkerFillColor(),
'opacity': styleConfigs.getSelectionMarkerOpacity()
};
this.changeSelectionAreaMarkerBound(null, highlightStyles); // change draw styles without the modification of bound
},
/**
* Pulse selection marker several times to get the attention of user.
* @param {Int} duration Duration of the whole process, in ms.
* @param {Int} pulseCount The times of highlighting marker.
*/
pulseSelectionAreaMarker: function(duration, pulseCount)
{
if (this.getUiSelectionAreaMarker())
{
if (!duration)
duration = this.getEditorConfigs().getInteractionConfigs().getSelectionMarkerDefPulseDuration() || 0;
if (!pulseCount)
pulseCount = this.getEditorConfigs().getInteractionConfigs().getSelectionMarkerDefPulseCount() || 1;
if (!duration)
return;
var interval = duration / pulseCount;
this.doPulseSelectionAreaMarker(interval, pulseCount);
}
return this;
},
/** @private */
doPulseSelectionAreaMarker: function(interval, pulseCount)
{
this._highlightSelectionAreaMarker();
//if (pulseCount <= 1)
setTimeout(this._restoreSelectionAreaMarker.bind(this), interval);
if (pulseCount > 1)
setTimeout(this.doPulseSelectionAreaMarker.bind(this, interval, pulseCount - 1), interval * 2);
},
///////////////////////// Methods about selecting region ////////////////////////////////////
/**
* Start a selecting operation from coord.
* @param {Hash} coord
* @param {Bool} toggleFlag If true, the selecting region will toggle selecting state inside it rather than select them directly.
*/
startSelecting: function(screenCoord, toggleFlag)
{
if (toggleFlag === undefined)
toggleFlag = this.getIsToggleSelectOn();
if (!toggleFlag)
this.deselectAll();
var M = Kekule.Editor.SelectMode;
var mode = this.getSelectMode();
this._currSelectMode = mode;
return (mode === M.POLYLINE || mode === M.POLYGON)?
this.startSelectingCurveDrag(screenCoord, toggleFlag):
this.startSelectingBoxDrag(screenCoord, toggleFlag);
},
/**
* Add a new anchor coord of selecting region.
* This method is called when pointer device moving in selecting.
* @param {Hash} screenCoord
*/
addSelectingAnchorCoord: function(screenCoord)
{
var M = Kekule.Editor.SelectMode;
var mode = this._currSelectMode;
return (mode === M.POLYLINE || mode === M.POLYGON)?
this.dragSelectingCurveToCoord(screenCoord):
this.dragSelectingBoxToCoord(screenCoord);
},
/**
* Selecting operation end.
* @param {Hash} coord
* @param {Bool} toggleFlag If true, the selecting region will toggle selecting state inside it rather than select them directly.
*/
endSelecting: function(screenCoord, toggleFlag)
{
if (toggleFlag === undefined)
toggleFlag = this.getIsToggleSelectOn();
var M = Kekule.Editor.SelectMode;
var mode = this._currSelectMode;
var enablePartial = this.getEditorConfigs().getInteractionConfigs().getEnablePartialAreaSelecting();
var objs;
if (mode === M.POLYLINE || mode === M.POLYGON)
{
var polygonCoords = this._selectingCurveCoords;
// simplify the polygon first
var threshold = this.getEditorConfigs().getInteractionConfigs().getSelectingCurveSimplificationDistanceThreshold();
var simpilfiedCoords = Kekule.GeometryUtils.simplifyCurveToLineSegments(polygonCoords, threshold);
//console.log('simplify selection', polygonCoords.length, simpilfiedCoords.length);
this.endSelectingCurveDrag(screenCoord, toggleFlag);
if (mode === M.POLYLINE)
{
var lineWidth = this.getEditorConfigs().getInteractionConfigs().getSelectingBrushWidth();
objs = this.getObjectsIntersetExtendedPolyline(simpilfiedCoords, lineWidth);
}
else // if (mode === M.POLYGON)
{
objs = this.getObjectsInPolygon(simpilfiedCoords, enablePartial);
this.endSelectingCurveDrag(screenCoord, toggleFlag);
}
}
else // M.RECT or M.ANCESTOR
{
var startCoord = this._selectingBoxStartCoord;
var box = Kekule.BoxUtils.createBox(startCoord, screenCoord);
objs = this.getObjectsInScreenBox(box, enablePartial);
this.endSelectingBoxDrag(screenCoord, toggleFlag);
}
/*
if (objs && objs.length)
{
if (this._isInAncestorSelectMode()) // need to change to select standalone ancestors
{
objs = this._getAllStandaloneAncestorObjs(objs); // get standalone ancestors (e.g. molecule)
//objs = this._getAllCoordDependantObjs(objs); // but select there coord dependant children (e.g. atoms and bonds)
}
if (toggleFlag)
this.toggleSelectingState(objs);
else
this.select(objs);
}
*/
objs = this._getActualSelectedObjsInSelecting(objs);
if (toggleFlag)
this.toggleSelectingState(objs);
else
this.select(objs);
this.hideSelectingMarker();
},
/**
* Cancel current selecting operation.
*/
cancelSelecting: function()
{
this.hideSelectingMarker();
},
/** @private */
_getActualSelectedObjsInSelecting: function(objs)
{
if (objs && objs.length)
{
if (this._isInAncestorSelectMode()) // need to change to select standalone ancestors
{
objs = this._getAllStandaloneAncestorObjs(objs); // get standalone ancestors (e.g. molecule)
//objs = this._getAllCoordDependantObjs(objs); // but select there coord dependant children (e.g. atoms and bonds)
}
return objs;
}
else
return [];
},
/** @private */
_isInAncestorSelectMode: function()
{
return this.getSelectMode() === Kekule.Editor.SelectMode.ANCESTOR;
},
/** @private */
_getAllStandaloneAncestorObjs: function(objs)
{
var result = [];
for (var i = 0, l = objs.length; i < l; ++i)
{
var obj = objs[i];
if (obj && obj.getStandaloneAncestor)
obj = obj.getStandaloneAncestor();
AU.pushUnique(result, obj);
}
return result;
},
/* @private */
/*
_getAllCoordDependantObjs: function(objs)
{
var result = [];
for (var i = 0, l = objs.length; i < l; ++i)
{
var obj = objs[i];
if (obj && obj.getCoordDependentObjects)
AU.pushUnique(result, obj.getCoordDependentObjects());
}
return result;
},
*/
/**
* Start to drag a selecting box from coord.
* @param {Hash} coord
* @param {Bool} toggleFlag If true, the box will toggle selecting state inside it rather than select them directly.
*/
startSelectingBoxDrag: function(screenCoord, toggleFlag)
{
//this.setInteractionStartCoord(screenCoord);
this._selectingBoxStartCoord = screenCoord;
/*
if (!toggleFlag)
this.deselectAll();
*/
//this.setEditorState(Kekule.Editor.EditorState.SELECTING);
},
/**
* Drag selecting box to a new coord.
* @param {Hash} screenCoord
*/
dragSelectingBoxToCoord: function(screenCoord)
{
//var startCoord = this.getInteractionStartCoord();
var startCoord = this._selectingBoxStartCoord;
var endCoord = screenCoord;
this.changeSelectingMarkerBox(startCoord, endCoord);
},
/**
* Selecting box drag end.
* @param {Hash} coord
* @param {Bool} toggleFlag If true, the box will toggle selecting state inside it rather than select them directly.
*/
endSelectingBoxDrag: function(screenCoord, toggleFlag)
{
//var startCoord = this.getInteractionStartCoord();
var startCoord = this._selectingBoxStartCoord;
//this.setInteractionEndCoord(coord);
this._selectingBoxEndCoord = screenCoord;
/*
var box = Kekule.BoxUtils.createBox(startCoord, screenCoord);
var enablePartial = this.getEditorConfigs().getInteractionConfigs().getEnablePartialAreaSelecting();
if (toggleFlag)
this.toggleSelectingStateOfObjectsInScreenBox(box, enablePartial);
else
this.selectObjectsInScreenBox(box, enablePartial);
this.hideSelectingMarker();
*/
},
/**
* Start to drag a selecting curve from coord.
* @param {Hash} coord
* @param {Bool} toggleFlag If true, the box will toggle selecting state inside it rather than select them directly.
*/
startSelectingCurveDrag: function(screenCoord, toggleFlag)
{
//this.setInteractionStartCoord(screenCoord);
this._selectingCurveCoords = [screenCoord];
//this.setEditorState(Kekule.Editor.EditorState.SELECTING);
},
/**
* Drag selecting curve to a new coord.
* @param {Hash} screenCoord
*/
dragSelectingCurveToCoord: function(screenCoord)
{
//var startCoord = this.getInteractionStartCoord();
this._selectingCurveCoords.push(screenCoord);
this.changeSelectingMarkerCurve(this._selectingCurveCoords, this._currSelectMode === Kekule.Editor.SelectMode.POLYGON);
},
/**
* Selecting curve drag end.
* @param {Hash} coord
* @param {Bool} toggleFlag If true, the box will toggle selecting state inside it rather than select them directly.
*/
endSelectingCurveDrag: function(screenCoord, toggleFlag)
{
this._selectingCurveCoords.push(screenCoord);
/*
var box = Kekule.BoxUtils.createBox(startCoord, screenCoord);
var enablePartial = this.getEditorConfigs().getInteractionConfigs().getEnablePartialAreaSelecting();
if (toggleFlag)
this.toggleSelectingStateOfObjectsInScreenBox(box, enablePartial);
else
this.selectObjectsInScreenBox(box, enablePartial);
this.hideSelectingMarker();
*/
},
/**
* Try select a object on coord directly.
* @param {Hash} coord
* @param {Bool} toggleFlag If true, the box will toggle selecting state inside it rather than select them directly.
*/
selectOnCoord: function(coord, toggleFlag)
{
if (toggleFlag === undefined)
toggleFlag = this.getIsToggleSelectOn();
//console.log('select on coord');
var obj = this.getTopmostBasicObjectAtCoord(coord, this.getCurrBoundInflation());
if (obj)
{
var objs = this._getActualSelectedObjsInSelecting([obj]);
if (objs)
{
if (toggleFlag)
this.toggleSelectingState(objs);
else
this.select(objs);
}
}
},
// about selection area marker
/**
* Modify hot track marker to bind to newBoundInfo.
* @private
*/
changeSelectingMarkerBound: function(newBoundInfo, drawStyles)
{
var styleConfigs = this.getEditorConfigs().getUiMarkerConfigs();
if (!drawStyles) // use the default one
drawStyles = {
'strokeColor': styleConfigs.getSelectingMarkerStrokeColor(),
'strokeWidth': styleConfigs.getSelectingMarkerStrokeWidth(),
'strokeDash': styleConfigs.getSelectingMarkerStrokeDash(),
'fillColor': styleConfigs.getSelectingMarkerFillColor(),
'opacity': styleConfigs.getSelectingMarkerOpacity()
};
var marker = this.getUiSelectingMarker();
marker.setVisible(true);
//console.log('change hot track', bound, drawStyles);
this.modifyShapeBasedMarker(marker, newBoundInfo, drawStyles, true);
return this;
},
changeSelectingMarkerCurve: function(screenCoords, isPolygon)
{
var ctxCoords = [];
for (var i = 0, l = screenCoords.length - 1; i < l; ++i)
{
ctxCoords.push(this.screenCoordToContext(screenCoords[i]));
}
var shapeInfo = Kekule.Render.MetaShapeUtils.createShapeInfo(
isPolygon? Kekule.Render.MetaShapeType.POLYGON: Kekule.Render.MetaShapeType.POLYLINE,
ctxCoords
);
var drawStyle;
if (!isPolygon)
{
var styleConfigs = this.getEditorConfigs().getUiMarkerConfigs();
drawStyle = {
'strokeColor': styleConfigs.getSelectingBrushMarkerStrokeColor(),
'strokeWidth': this.getEditorConfigs().getInteractionConfigs().getSelectingBrushWidth(),
'strokeDash': styleConfigs.getSelectingBrushMarkerStrokeDash(),
//'fillColor': styleConfigs.getSelectingMarkerFillColor(),
'lineCap': styleConfigs.getSelectingBrushMarkerStrokeLineCap(),
'lineJoin': styleConfigs.getSelectingBrushMarkerStrokeLineJoin(),
'opacity': styleConfigs.getSelectingBrushMarkerOpacity()
};
}
return this.changeSelectingMarkerBound(shapeInfo, drawStyle);
},
/**
* Change the rect box of selection marker.
* Coord is based on screen system.
* @private
*/
changeSelectingMarkerBox: function(screenCoord1, screenCoord2)
{
//var coord1 = this.getObjDrawBridge().transformScreenCoordToContext(this.getObjContext(), screenCoord1);
//var coord2 = this.getObjDrawBridge().transformScreenCoordToContext(this.getObjContext(), screenCoord2);
var coord1 = this.screenCoordToContext(screenCoord1);
var coord2 = this.screenCoordToContext(screenCoord2);
var shapeInfo = Kekule.Render.MetaShapeUtils.createShapeInfo(
Kekule.Render.MetaShapeType.RECT,
[{'x': Math.min(coord1.x, coord2.x), 'y': Math.min(coord1.y, coord2.y)},
{'x': Math.max(coord1.x, coord2.x), 'y': Math.max(coord1.y, coord2.y)}]
);
return this.changeSelectingMarkerBound(shapeInfo);
},
/** @private */
hideSelectingMarker: function()
{
var marker = this.getUiSelectingMarker();
if (marker)
{
this.hideUiMarker(marker, true);
}
},
/** @private */
showSelectingMarker: function()
{
var marker = this.getUiSelectingMarker();
if (marker)
{
this.showUiMarker(marker, true);
}
},
// methods about selection marker
/**
* Returns the region of screenCoord relative to selection marker.
* @private
*/
getCoordRegionInSelectionMarker: function(screenCoord, edgeInflation)
{
var R = Kekule.Editor.BoxRegion;
var CU = Kekule.CoordUtils;
var coord = this.screenCoordToContext(screenCoord);
var marker = this.getUiSelectionAreaMarker();
if (marker && marker.getVisible())
{
var box = this.getUiSelectionAreaContainerBox();
if (Kekule.ObjUtils.isUnset(edgeInflation))
edgeInflation = this.getEditorConfigs().getInteractionConfigs().getSelectionMarkerEdgeInflation();
var halfInf = (edgeInflation / 2) || 0;
var coord1 = CU.substract({'x': box.x1, 'y': box.y1}, {'x': halfInf, 'y': halfInf});
var coord2 = CU.add({'x': box.x2, 'y': box.y2}, {'x': halfInf, 'y': halfInf});
if ((coord.x < coord1.x) || (coord.y < coord1.y) || (coord.x > coord2.x) || (coord.y > coord2.y))
return R.OUTSIDE;
//coord2 = CU.substract(coord2, coord1);
var delta1 = CU.substract(coord, coord1);
var delta2 = CU.substract(coord2, coord);
var dx1 = delta1.x;
var dx2 = delta2.x;
var dy1 = delta1.y;
var dy2 = delta2.y;
if (dy1 < dy2) // on top half
{
if (dx1 < dx2) // on left part
{
if (dy1 <= edgeInflation)
return (dx1 <= edgeInflation)? R.CORNER_TL: R.EDGE_TOP;
else if (dx1 <= edgeInflation)
return R.EDGE_LEFT;
else
return R.INSIDE;
}
else // on right part
{
if (dy1 <= edgeInflation)
return (dx2 <= edgeInflation)? R.CORNER_TR: R.EDGE_TOP;
else if (dx2 <= edgeInflation)
return R.EDGE_RIGHT;
else
return R.INSIDE;
}
}
else // on bottom half
{
if (dx1 < dx2) // on left part
{
if (dy2 <= edgeInflation)
return (dx1 <= edgeInflation)? R.CORNER_BL: R.EDGE_BOTTOM;
else if (dx1 <= edgeInflation)
return R.EDGE_LEFT;
else
return R.INSIDE;
}
else // on right part
{
if (dy2 <= edgeInflation)
return (dx2 <= edgeInflation)? R.CORNER_BR: R.EDGE_BOTTOM;
else if (dx2 <= edgeInflation)
return R.EDGE_RIGHT;
else
return R.INSIDE;
}
}
}
return R.OUTSIDE;
},
/**
* Check if a point coord based on screen inside selection marker.
* @private
*/
isCoordInSelectionMarkerBound: function(screenCoord)
{
/*
//var coord = this.getObjDrawBridge().transformScreenCoordToContext(this.getObjContext(), screenCoord);
var coord = this.screenCoordToContext(screenCoord);
var marker = this.getUiSelectionAreaMarker();
if (marker && marker.getVisible())
{
var shapeInfo = marker.getShapeInfo();
return shapeInfo? Kekule.Render.MetaShapeUtils.isCoordInside(coord, shapeInfo): false;
}
else
return false;
*/
return (this.getCoordRegionInSelectionMarker(screenCoord) !== Kekule.Editor.BoxRegion.OUTSIDE);
},
//////////////////////////////////////////////////////////////////////////////
/////////////////////// methods about selection ////////////////////////////
/**
* Returns override render options that need to be applied to each selected objects.
* Descendants should override this method.
* @returns {Hash}
* @private
*/
getObjSelectedRenderOptions: function()
{
// debug
/*
if (!this._selectedRenderOptions)
this._selectedRenderOptions = {'color': '#000055', 'strokeWidth': 2, 'atomRadius': 5};
*/
return this._selectedRenderOptions;
},
/**
* Returns method to add render option override item of chemObj.
* In 2D render mode, this method should returns chemObj.addOverrideRenderOptionItem,
* in 3D render mode, this method should returns chemObj.addOverrideRender3DOptionItem.
* @private
*/
_getObjRenderOptionItemAppendMethod: function(chemObj)
{
return (this.getRenderType() === Kekule.Render.RendererType.R3D)?
chemObj.addOverrideRender3DOptionItem:
chemObj.addOverrideRenderOptionItem;
},
/**
* Returns method to remove render option override item of chemObj.
* In 2D render mode, this method should returns chemObj.removeOverrideRenderOptionItem,
* in 3D render mode, this method should returns chemObj.removeOverrideRender3DOptionItem.
* @private
*/
_getObjRenderOptionItemRemoveMethod: function(chemObj)
{
return (this.getRenderType() === Kekule.Render.RendererType.R3D )?
chemObj.removeOverrideRender3DOptionItem:
chemObj.removeOverrideRenderOptionItem;
},
/** @private */
_addSelectRenderOptions: function(chemObj)
{
var selOps = this.getObjSelectedRenderOptions();
if (selOps)
{
//console.log('_addSelectRenderOptions', chemObj, selOps);
var method = this._getObjRenderOptionItemAppendMethod(chemObj);
//if (!method)
//console.log(chemObj.getClassName());
return method.apply(chemObj, [selOps]);
}
else
return null;
},
/** @private */
_removeSelectRenderOptions: function(chemObj)
{
var selOps = this.getObjSelectedRenderOptions();
if (selOps)
{
//console.log('_removeSelectRenderOptions', chemObj);
var method = this._getObjRenderOptionItemRemoveMethod(chemObj);
return method.apply(chemObj, [this.getObjSelectedRenderOptions()]);
}
else
return null;
},
/** Notify that a continuous selection update is underway. UI need not to be changed. */
beginUpdateSelection: function()
{
this.beginUpdateObject();
--this._objSelectFlag;
},
/** Notify that a continuous selection update is done. UI need to be changed. */
endUpdateSelection: function()
{
++this._objSelectFlag;
if (this._objSelectFlag >= 0)
{
this.selectionChanged();
}
this.endUpdateObject();
},
/** Check if the editor is under continuous selection update. */
isUpdatingSelection: function()
{
return (this._objSelectFlag < 0);
},
/**
* Notify selection is changed or object in selection has changed.
* @private
*/
selectionChanged: function()
{
/*
var selection = this.getSelection();
if (selection && selection.length) // at least one selected object
{
var obj, boundItem, bound, box;
var containBox;
// calc out bound box to contain all selected objects
for (var i = 0, l = selection.length; i < l; ++i)
{
obj = selection[i];
boundItem = this.findBoundMapItem(obj);
if (boundItem)
{
bound = boundItem.boundInfo;
if (bound)
{
box = Kekule.Render.MetaShapeUtils.getContainerBox(bound);
if (box)
{
if (!containBox)
containBox = box;
else
containBox = Kekule.BoxUtils.getContainerBox(containBox, box);
}
}
}
}
if (containBox)
{
var inflation = this.getEditorConfigs().getInteractionConfigs().getSelectionMarkerInflation() || 0;
if (inflation)
containBox = Kekule.BoxUtils.inflateBox(containBox, inflation);
this.changeSelectionMarkerBox(containBox);
}
else // no selected
this.removeSelectionMarker();
}
else // no selected
{
this.removeSelectionMarker();
}
*/
if (!this.isUpdatingSelection())
{
this.notifyPropSet('selection', this.getSelection());
this.invokeEvent('selectionChange');
return this.doSelectionChanged();
}
},
/**
* Do actual work of method selectionChanged.
* Descendants may override this method.
*/
doSelectionChanged: function()
{
this.recalcSelectionAreaMarker();
},
/**
* Check if an object is in selection.
* @param {Kekule.ChemObject} obj
* @returns {Bool}
*/
isInSelection: function(obj)
{
if (!obj)
return false;
return this.getSelection().indexOf(obj) >= 0;
},
/**
* Add an object to selection.
* Descendants can override this method.
* @param {Kekule.ChemObject} obj
*/
addObjToSelection: function(obj)
{
if (!obj)
return this;
var selection = this.getSelection();
Kekule.ArrayUtils.pushUnique(selection, obj.getNearestSelectableObject());
this._addSelectRenderOptions(obj);
this.selectionChanged();
return this;
},
/**
* Remove an object (and all its child objects) from selection.
* Descendants can override this method.
* @param {Kekule.ChemObject} obj
*/
removeObjFromSelection: function(obj, doNotNotifySelectionChange)
{
if (!obj)
return this;
var selection = this.getSelection();
var relObj = obj.getNearestSelectableObject && obj.getNearestSelectableObject();
if (relObj === obj)
relObj === null;
Kekule.ArrayUtils.remove(selection, obj);
this._removeSelectRenderOptions(obj);
if (relObj)
{
Kekule.ArrayUtils.remove(selection, relObj);
this._removeSelectRenderOptions(relObj);
}
// remove possible child objects
for (var i = selection.length - 1; i >= 0; --i)
{
var remainObj = selection[i];
if (remainObj.isChildOf && (remainObj.isChildOf(obj) || (relObj && remainObj.isChildOf(relObj))))
this.removeObjFromSelection(remainObj, true);
}
if (!doNotNotifySelectionChange)
this.selectionChanged();
return this;
},
/**
* Select all first-level objects in editor.
*/
selectAll: function()
{
var selection = [];
var obj = this.getChemObj();
if (obj)
{
var children = obj.getChildren() || [];
if (children.length)
selection = children;
else
selection = [obj];
}
return this.select(selection);
},
/**
* Deselect all objects in selection
*/
deselectAll: function()
{
var selection = this.getSelection();
return this.removeFromSelection(selection);
},
/**
* Make a obj or set of objs be selected.
* @param {Variant} objs A object or an array of objects.
*/
select: function(objs)
{
this.beginUpdateSelection();
try
{
this.deselectAll();
this.addToSelection(objs);
}
finally
{
//console.log(this.getPainter().getRenderer().getClassName(), this.getPainter().getRenderer().getRenderCache(this.getDrawContext()));
this.endUpdateSelection();
}
return this;
},
/**
* Add object or an array of objects to selection.
* @param {Variant} param A object or an array of objects.
*/
addToSelection: function(param)
{
if (!param)
return;
var objs = DataType.isArrayValue(param)? param: [param];
this.beginUpdateSelection();
try
{
for (var i = 0, l = objs.length; i < l; ++i)
{
this.addObjToSelection(objs[i]);
}
}
finally
{
this.endUpdateSelection();
}
return this;
},
/**
* Remove object or an array of objects from selection.
* @param {Variant} param A object or an array of objects.
*/
removeFromSelection: function(param)
{
if (!param || !param.length)
return;
var objs = DataType.isArrayValue(param)? param: [param];
this.beginUpdateSelection();
try
{
for (var i = objs.length - 1; i >= 0; --i)
{
this.removeObjFromSelection(objs[i]);
}
}
finally
{
this.endUpdateSelection();
}
return this;
},
/**
* Toggle selection state of object or an array of objects.
* @param {Variant} param A object or an array of objects.
*/
toggleSelectingState: function(param)
{
if (!param)
return;
var objs = DataType.isArrayValue(param)? param: [param];
this.beginUpdateSelection();
try
{
for (var i = 0, l = objs.length; i < l; ++i)
{
var obj = objs[i];
var relObj = obj.getNearestSelectableObject && obj.getNearestSelectableObject();
if (this.isInSelection(obj))
this.removeObjFromSelection(obj);
else if (relObj && this.isInSelection(relObj))
this.removeObjFromSelection(relObj);
else
this.addObjToSelection(obj);
}
}
finally
{
this.endUpdateSelection();
}
return this;
},
/**
* Check if there is objects selected currently.
* @returns {Bool}
*/
hasSelection: function()
{
return !!this.getSelection().length;
},
/**
* Delete and free all selected objects.
*/
deleteSelectedObjs: function()
{
// TODO: unfinished
},
/** @private */
requestAutoCheckIssuesIfNecessary: function()
{
if (this.getEnableAutoIssueCheck())
this._tryCheckIssueOnIdle();
},
/**
* Check issues when not updating/manipulating objects in editor.
* @private
*/
_tryCheckIssueOnIdle: function()
{
//console.log('try check', this.isUpdatingObject(), this.isManipulatingObject());
if (!this.isUpdatingObject() && !this.isManipulatingObject())
{
return this.checkIssues();
}
else
return null;
},
/**
* Create a default issue check executor instance.
* Descendants may override this method.
* @returns {Kekule.IssueCheck.Executor}
* @private
*/
createIssueCheckExecutor: function()
{
return new Kekule.IssueCheck.ExecutorForEditor(this);
},
/**
* Check potential issues for objects in editor.
* @param {Kekule.ChemObject} rootObj Root object to check. If this param is ommited, all objects in editor will be checked.
* @returns {Array} Array of error check report result.
*/
checkIssues: function(rootObj)
{
var result = null;
var root = rootObj || this.getChemObj();
if (root && this.getEnableIssueCheck())
{
var checkExecutor = this.getIssueCheckExecutor(true);
result = checkExecutor.execute(root);
}
return result;
},
/**
* Get all objects interset a polyline defined by a set of screen coords.
* Here Object partial in the polyline width range will also be put in result.
* @param {Array} polylineScreenCoords
* @param {Number} lineWidth
* @returns {Array} All interseting objects.
*/
getObjectsIntersetExtendedPolyline: function(polylineScreenCoords, lineWidth)
{
var ctxCoords = [];
for (var i = 0, l = polylineScreenCoords.length; i < l; ++i)
{
ctxCoords.push(this.screenCoordToContext(polylineScreenCoords[i]));
}
var objs = [];
var boundInfos = this.getBoundInfoRecorder().getAllRecordedInfoOfContext(this.getObjContext());
var compareFunc = Kekule.Render.MetaShapeUtils.isIntersectingPolyline;
for (var i = 0, l = boundInfos.length; i < l; ++i)
{
var boundInfo = boundInfos[i];
var shapeInfo = boundInfo.boundInfo;
/*
if (!shapeInfo)
console.log(boundInfo);
*/
if (shapeInfo)
if (compareFunc(shapeInfo, ctxCoords, lineWidth))
objs.push(boundInfo.obj);
}
//console.log('selected', objs);
return objs;
},
/**
* Get all objects inside a polygon defined by a set of screen coords.
* @param {Array} polygonScreenCoords
* @param {Bool} allowPartialAreaSelecting If this value is true, object partial in the box will also be selected.
* @returns {Array} All inside objects.
*/
getObjectsInPolygon: function(polygonScreenCoords, allowPartialAreaSelecting)
{
var ctxCoords = [];
for (var i = 0, l = polygonScreenCoords.length; i < l; ++i)
{
ctxCoords.push(this.screenCoordToContext(polygonScreenCoords[i]));
}
var objs = [];
var boundInfos = this.getBoundInfoRecorder().getAllRecordedInfoOfContext(this.getObjContext());
var compareFunc = allowPartialAreaSelecting? Kekule.Render.MetaShapeUtils.isIntersectingPolygon: Kekule.Render.MetaShapeUtils.isInsidePolygon;
for (var i = 0, l = boundInfos.length; i < l; ++i)
{
var boundInfo = boundInfos[i];
var shapeInfo = boundInfo.boundInfo;
/*
if (!shapeInfo)
console.log(boundInfo);
*/
if (shapeInfo)
if (compareFunc(shapeInfo, ctxCoords))
objs.push(boundInfo.obj);
}
//console.log('selected', objs);
return objs;
},
/**
* Get all objects inside a screen box.
* @param {Hash} screenBox
* @param {Bool} allowPartialAreaSelecting If this value is true, object partial in the box will also be selected.
* @returns {Array} All inside objects.
*/
getObjectsInScreenBox: function(screenBox, allowPartialAreaSelecting)
{
var box = this.screenBoxToContext(screenBox);
var objs = [];
var boundInfos = this.getBoundInfoRecorder().getAllRecordedInfoOfContext(this.getObjContext());
var compareFunc = allowPartialAreaSelecting? Kekule.Render.MetaShapeUtils.isIntersectingBox: Kekule.Render.MetaShapeUtils.isInsideBox;
for (var i = 0, l = boundInfos.length; i < l; ++i)
{
var boundInfo = boundInfos[i];
var shapeInfo = boundInfo.boundInfo;
/*
if (!shapeInfo)
console.log(boundInfo);
*/
if (shapeInfo)
if (compareFunc(shapeInfo, box))
objs.push(boundInfo.obj);
}
//console.log('selected', objs);
return objs;
},
/**
* Select all objects inside a screen box.
* @param {Hash} box
* @param {Bool} allowPartialAreaSelecting If this value is true, object partial in the box will also be selected.
* @returns {Array} All inside objects.
*/
selectObjectsInScreenBox: function(screenBox, allowPartialAreaSelecting)
{
var objs = this.getObjectsInScreenBox(screenBox, allowPartialAreaSelecting);
if (objs && objs.length)
this.select(objs);
return objs;
},
/**
* Add objects inside a screen box to selection.
* @param {Hash} box
* @param {Bool} allowPartialAreaSelecting If this value is true, object partial in the box will also be selected.
* @returns {Array} All inside objects.
*/
addObjectsInScreenBoxToSelection: function(screenBox, allowPartialAreaSelecting)
{
var objs = this.getObjectsInScreenBox(screenBox, allowPartialAreaSelecting);
if (objs && objs.length)
this.addToSelection(objs);
return objs;
},
/**
* Remove objects inside a screen box from selection.
* @param {Hash} box
* @param {Bool} allowPartialAreaSelecting If this value is true, object partial in the box will also be deselected.
* @returns {Array} All inside objects.
*/
removeObjectsInScreenBoxFromSelection: function(screenBox, allowPartialAreaSelecting)
{
var objs = this.getObjectsInScreenBox(screenBox, allowPartialAreaSelecting);
if (objs && objs.length)
this.removeFromSelection(objs);
return objs;
},
/**
* Toggle selection state of objects inside a screen box.
* @param {Hash} box
* @param {Bool} allowPartialAreaSelecting If this value is true, object partial in the box will also be toggled.
* @returns {Array} All inside objects.
*/
toggleSelectingStateOfObjectsInScreenBox: function(screenBox, allowPartialAreaSelecting)
{
var objs = this.getObjectsInScreenBox(screenBox, allowPartialAreaSelecting);
if (objs && objs.length)
this.toggleSelectingState(objs);
return objs;
},
/**
* Returns a minimal box (in screen coord system) containing all objects' bounds in editor.
* @param {Array} objects
* @param {Float} objBoundInflation Inflation of each object's bound.
* @returns {Hash}
*/
getObjectsContainerBox: function(objects, objBoundInflation)
{
var objs = Kekule.ArrayUtils.toArray(objects);
var inf = objBoundInflation || 0;
var bounds = [];
var containerBox = null;
for (var i = 0, l = objs.length; i < l; ++i)
{
var obj = objs[i];
var infos = this.getBoundInfoRecorder().getBelongedInfos(this.getObjContext(), obj);
if (infos && infos.length)
{
for (var j = 0, k = infos.length; j < k; ++j)
{
var info = infos[j];
var bound = info.boundInfo;
if (bound)
{
// inflate
if (inf)
bound = Kekule.Render.MetaShapeUtils.inflateShape(bound, inf);
var box = Kekule.Render.MetaShapeUtils.getContainerBox(bound);
containerBox = containerBox? Kekule.BoxUtils.getContainerBox(containerBox, box): box;
}
}
}
}
return containerBox;
},
/**
* Returns the intersection state of object container box and editor client box.
* @param {Array} objs
* @returns {Int} Value from {@link Kekule.IntersectionState}
*/
getObjectsBoxAndClientBoxRelation: function(objs)
{
var containerBox = this.getObjectsContainerBox(objs);
if (containerBox)
{
var editorBox = this.getVisibleClientScreenBox();
return Kekule.BoxUtils.getIntersectionState(containerBox, editorBox);
}
},
/**
* Returns container box (in screen coord system) that contains all objects in selection.
* @param {Number} objBoundInflation
* @returns {Hash}
*/
getSelectionContainerBox: function(objBoundInflation)
{
return this.getObjectsContainerBox(this.getSelection(), objBoundInflation);
},
/**
* Returns whether current selected objects can be seen from screen (not all of them
* are in hidden scroll area).
*/
isSelectionVisible: function()
{
var selectionBox = this.getSelectionContainerBox();
if (selectionBox)
{
/*
var editorDim = this.getClientDimension();
var editorOffset = this.getClientScrollPosition();
var editorBox = {
'x1': editorOffset.x, 'y1': editorOffset.y,
'x2': editorOffset.x + editorDim.width, 'y2': editorOffset.y + editorDim.height
};
*/
var editorBox = this.getVisibleClientScreenBox();
//console.log(selectionBox, editorBox, Kekule.BoxUtils.getIntersection(selectionBox, editorBox));
return Kekule.BoxUtils.hasIntersection(selectionBox, editorBox);
}
else
return false;
},
/////////// methods about object manipulations /////////////////////////////
/**
* Returns width and height info of obj.
* @param {Object} obj
* @returns {Hash}
*/
getObjSize: function(obj)
{
return this.doGetObjSize(obj);
},
/**
* Do actual job of getObjSize. Descendants may override this method.
* @param {Object} obj
* @returns {Hash}
*/
doGetObjSize: function(obj)
{
var coordMode = this.getCoordMode();
//var allowCoordBorrow = this.getAllowCoordBorrow();
return obj.getSizeOfMode? obj.getSizeOfMode(coordMode):
null;
},
/**
* Set dimension of obj.
* @param {Object} obj
* @param {Hash} size
*/
setObjSize: function(obj, size)
{
this.doSetObjSize(obj, size);
this.objectChanged(obj);
},
/**
* Do actual work of setObjSize.
* @param {Object} obj
* @param {Hash} size
*/
doSetObjSize: function(obj, dimension)
{
if (obj.setSizeOfMode)
obj.setSizeOfMode(dimension, this.getCoordMode());
},
/**
* Returns own coord of obj.
* @param {Object} obj
* @param {Int} coordPos Value from {@link Kekule.Render.CoordPos}, relative position of coord in object.
* @returns {Hash}
*/
getObjCoord: function(obj, coordPos)
{
return this.doGetObjCoord(obj, coordPos);
},
/**
* Do actual job of getObjCoord. Descendants may override this method.
* @private
*/
doGetObjCoord: function(obj, coordPos)
{
if (!obj)
return null;
var coordMode = this.getCoordMode();
var allowCoordBorrow = this.getAllowCoordBorrow();
var result = obj.getAbsBaseCoord? obj.getAbsBaseCoord(coordMode, allowCoordBorrow):
obj.getAbsCoordOfMode? obj.getAbsCoordOfMode(coordMode, allowCoordBorrow):
obj.getCoordOfMode? obj.getCoordOfMode(coordMode, allowCoordBorrow):
null;
if (coordMode === Kekule.CoordMode.COORD2D && Kekule.ObjUtils.notUnset(coordPos)) // appoint coord pos, need further calculation
{
var baseCoordPos = Kekule.Render.CoordPos.DEFAULT;
if (coordPos !== baseCoordPos)
{
var allowCoordBorrow = this.getAllowCoordBorrow();
var box = obj.getExposedContainerBox? obj.getExposedContainerBox(coordMode, allowCoordBorrow):
obj.getContainerBox? obj.getContainerBox(coordMode, allowCoordBorrow): null;
//console.log(obj.getClassName(), coordPos, objBasePos, box);
if (box)
{
if (coordPos === Kekule.Render.CoordPos.CORNER_TL)
{
var delta = {x: (box.x2 - box.x1) / 2, y: (box.y2 - box.y1) / 2};
result.x = result.x - delta.x;
result.y = result.y + delta.y;
}
}
}
}
return result;
/*
return obj.getAbsBaseCoord2D? obj.getAbsBaseCoord2D(allowCoordBorrow):
obj.getAbsCoord2D? obj.getAbsCoord2D(allowCoordBorrow):
obj.getCoord2D? obj.getCoord2D(allowCoordBorrow):
null;
*/
},
/**
* Set own coord of obj.
* @param {Object} obj
* @param {Hash} coord
* @param {Int} coordPos Value from {@link Kekule.Render.CoordPos}, relative position of coord in object.
*/
setObjCoord: function(obj, coord, coordPos)
{
this.doSetObjCoord(obj, coord, coordPos);
this.objectChanged(obj);
},
/**
* Do actual job of setObjCoord. Descendants can override this method.
* @private
*/
doSetObjCoord: function(obj, coord, coordPos)
{
var newCoord = Object.create(coord);
var coordMode = this.getCoordMode();
//console.log(obj.setAbsBaseCoord, obj.setAbsCoordOfMode, obj.setAbsCoordOfMode);
if (coordMode === Kekule.CoordMode.COORD2D && Kekule.ObjUtils.notUnset(coordPos)) // appoint coord pos, need further calculation
{
//var baseCoordPos = obj.getCoordPos? obj.getCoordPos(coordMode): Kekule.Render.CoordPos.DEFAULT;
var baseCoordPos = Kekule.Render.CoordPos.DEFAULT;
if (coordPos !== baseCoordPos)
{
var allowCoordBorrow = this.getAllowCoordBorrow();
var box = obj.getExposedContainerBox? obj.getExposedContainerBox(coordMode, allowCoordBorrow):
obj.getContainerBox? obj.getContainerBox(coordMode, allowCoordBorrow): null;
//console.log(obj.getClassName(), coordPos, objBasePos, box);
if (box)
{
var delta = {x: (box.x2 - box.x1) / 2, y: (box.y2 - box.y1) / 2};
if (coordPos === Kekule.Render.CoordPos.CORNER_TL)
// base coord on center and set coord as top left
{
newCoord.x = coord.x + delta.x;
newCoord.y = coord.y - delta.y;
}
}
}
}
if (obj.setAbsBaseCoord)
{
obj.setAbsBaseCoord(newCoord, coordMode);
}
else if (obj.setAbsCoordOfMode)
{
obj.setAbsCoordOfMode(newCoord, coordMode);
}
else if (obj.setAbsCoordOfMode)
{
obj.setCoordOfMode(newCoord, coordMode);
}
},
/**
* Get object's coord on context.
* @param {Object} obj
* @returns {Hash}
*/
getObjectContextCoord: function(obj, coordPos)
{
var coord = this.getObjCoord(obj, coordPos);
return this.objCoordToContext(coord);
},
/**
* Change object's coord on context.
* @param {Object} obj
* @param {Hash} contextCoord
*/
setObjectContextCoord: function(obj, contextCoord, coordPos)
{
var coord = this.contextCoordToObj(contextCoord);
if (coord)
this.setObjCoord(obj, coord, coordPos);
},
/**
* Get object's coord on screen.
* @param {Object} obj
* @returns {Hash}
*/
getObjectScreenCoord: function(obj, coordPos)
{
var coord = this.getObjCoord(obj, coordPos);
return this.objCoordToScreen(coord);
},
/**
* Change object's coord on screen.
* @param {Object} obj
* @param {Hash} contextCoord
*/
setObjectScreenCoord: function(obj, screenCoord, coordPos)
{
var coord = this.screenCoordToObj(screenCoord);
if (coord)
this.setObjCoord(obj, coord, coordPos);
},
/**
* Get coord of obj.
* @param {Object} obj
* @param {Int} coordSys Value from {@link Kekule.Render.CoordSystem}. Only CONTEXT and CHEM are available here.
* @returns {Hash}
*/
getCoord: function(obj, coordSys, coordPos)
{
/*
if (coordSys === Kekule.Render.CoordSystem.CONTEXT)
return this.getObjectContextCoord(obj);
else
return this.getObjCoord(obj);
*/
var objCoord = this.getObjCoord(obj, coordPos);
return this.translateCoord(objCoord, Kekule.Editor.CoordSys.OBJ, coordSys);
},
/**
* Set coord of obj.
* @param {Object} obj
* @param {Hash} value
* @param {Int} coordSys Value from {@link Kekule.Render.CoordSystem}. Only CONTEXT and CHEM are available here.
*/
setCoord: function(obj, value, coordSys, coordPos)
{
/*
if (coordSys === Kekule.Render.CoordSystem.CONTEXT)
this.setObjectContextCoord(obj, value);
else
this.setObjCoord(obj, value);
*/
var objCoord = this.translateCoord(value, coordSys, Kekule.Editor.CoordSys.OBJ);
this.setObjCoord(obj, objCoord, coordPos);
},
/**
* Get size of obj.
* @param {Object} obj
* @param {Int} coordSys Value from {@link Kekule.Render.CoordSystem}. Only CONTEXT and CHEM are available here.
* @returns {Hash}
*/
getSize: function(obj, coordSys)
{
var objSize = this.getObjSize(obj);
return this.translateCoord(objSize, Kekule.Editor.CoordSys.OBJ, coordSys);
},
/**
* Set size of obj.
* @param {Object} obj
* @param {Hash} value
* @param {Int} coordSys Value from {@link Kekule.Render.CoordSystem}. Only CONTEXT and CHEM are available here.
*/
setSize: function(obj, value, coordSys)
{
var objSize = this.translateCoord(value, coordSys, Kekule.Editor.CoordSys.OBJ);
this.setObjSize(obj, objSize);
},
// Coord translate methods
/*
* Translate coord to value of another coord system.
* @param {Hash} coord
* @param {Int} fromSys
* @param {Int} toSys
*/
/*
translateCoord: function(coord, fromSys, toSys)
{
if (!coord)
return null;
var S = Kekule.Editor.CoordSys;
if (fromSys === S.SCREEN)
{
if (toSys === S.SCREEN)
return coord;
else if (toSys === S.CONTEXT)
return this.getObjDrawBridge()? this.getObjDrawBridge().transformScreenCoordToContext(this.getObjContext(), coord): coord;
else // S.OBJ
{
var contextCoord = this.getObjDrawBridge()? this.getObjDrawBridge().transformScreenCoordToContext(this.getObjContext(), coord): coord;
return this.getObjContext()? this.getRootRenderer().transformCoordToObj(this.getObjContext(), this.getChemObj(), contextCoord): coord;
}
}
else if (fromSys === S.CONTEXT)
{
if (toSys === S.SCREEN)
return this.getObjDrawBridge()? this.getObjDrawBridge().transformContextCoordToScreen(this.getObjContext(), coord): coord;
else if (toSys === S.CONTEXT)
return coord;
else // S.OBJ
return this.getObjContext()? this.getRootRenderer().transformCoordToObj(this.getObjContext(), this.getChemObj(), coord): coord;
}
else // fromSys === S.OBJ
{
if (toSys === S.SCREEN)
{
var contextCoord = this.getRootRenderer().transformCoordToContext(this.getObjContext(), this.getChemObj(), coord);
return this.getObjDrawBridge()? this.getObjDrawBridge().transformContextCoordToScreen(this.getObjContext(), contextCoord): coord;
}
else if (toSys === S.CONTEXT)
return this.getObjContext()? this.getRootRenderer().transformCoordToContext(this.getObjContext(), this.getChemObj(), coord): coord;
else // S.OBJ
return coord;
}
},
*/
/*
* Translate a distance value to a distance in another coord system.
* @param {Hash} coord
* @param {Int} fromSys
* @param {Int} toSys
*/
/*
translateDistance: function(distance, fromSys, toSys)
{
var coord0 = {'x': 0, 'y': 0, 'z': 0};
var coord1 = {'x': distance, 'y': 0, 'z': 0};
var transCoord0 = this.translateCoord(coord0, fromSys, toSys);
var transCoord1 = this.translateCoord(coord1, fromSys, toSys);
return Kekule.CoordUtils.getDistance(transCoord0, transCoord1);
},
*/
/**
* Transform sizes and coords of objects based on coord sys of current editor.
* @param {Array} objects
* @param {Hash} transformParams
* @private
*/
transformCoordAndSizeOfObjects: function(objects, transformParams)
{
var coordMode = this.getCoordMode();
var allowCoordBorrow = this.getAllowCoordBorrow();
var matrix = (coordMode === Kekule.CoordMode.COORD3D)?
Kekule.CoordUtils.calcTransform3DMatrix(transformParams):
Kekule.CoordUtils.calcTransform2DMatrix(transformParams);
var childTransformParams = Object.extend({}, transformParams);
childTransformParams = Object.extend(childTransformParams, {
'translateX': 0,
'translateY': 0,
'translateZ': 0,
'center': {'x': 0, 'y': 0, 'z': 0}
});
var childMatrix = (coordMode === Kekule.CoordMode.COORD3D)?
Kekule.CoordUtils.calcTransform3DMatrix(childTransformParams):
Kekule.CoordUtils.calcTransform2DMatrix(childTransformParams);
for (var i = 0, l = objects.length; i < l; ++i)
{
var obj = objects[i];
obj.transformAbsCoordByMatrix(matrix, childMatrix, coordMode, true, allowCoordBorrow);
obj.scaleSize(transformParams.scale, coordMode, true, allowCoordBorrow);
}
},
/*
* Turn obj coord to context one.
* @param {Hash} objCoord
* @returns {Hash}
*/
/*
objCoordToContext: function(objCoord)
{
var S = Kekule.Editor.CoordSys;
return this.translateCoord(objCoord, S.OBJ, S.CONTEXT);
},
*/
/*
* Turn context coord to obj one.
* @param {Hash} contextCoord
* @returns {Hash}
*/
/*
contextCoordToObj: function(contextCoord)
{
var S = Kekule.Editor.CoordSys;
return this.translateCoord(contextCoord, S.CONTEXT, S.OBJ);
},
*/
/*
* Turn obj coord to screen one.
* @param {Hash} objCoord
* @returns {Hash}
*/
/*
objCoordToScreen: function(objCoord)
{
var S = Kekule.Editor.CoordSys;
return this.translateCoord(objCoord, S.OBJ, S.SCREEN);
},
*/
/*
* Turn screen coord to obj one.
* @param {Hash} contextCoord
* @returns {Hash}
*/
/*
screenCoordToObj: function(screenCoord)
{
var S = Kekule.Editor.CoordSys;
return this.translateCoord(screenCoord, S.SCREEN, S.OBJ);
},
*/
/*
* Turn screen based coord to context one.
* @param {Hash} screenCoord
* @returns {Hash}
*/
/*
screenCoordToContext: function(screenCoord)
{
var S = Kekule.Editor.CoordSys;
return this.translateCoord(screenCoord, S.SCREEN, S.CONTEXT);
},
*/
/*
* Turn context based coord to screen one.
* @param {Hash} screenCoord
* @returns {Hash}
*/
/*
contextCoordToScreen: function(screenCoord)
{
var S = Kekule.Editor.CoordSys;
return this.translateCoord(screenCoord, S.CONTEXT, S.SCREEN);
},
*/
/*
* Turn box coords based on screen system to context one.
* @param {Hash} screenCoord
* @returns {Hash}
*/
/*
screenBoxToContext: function(screenBox)
{
var coord1 = this.screenCoordToContext({'x': screenBox.x1, 'y': screenBox.y1});
var coord2 = this.screenCoordToContext({'x': screenBox.x2, 'y': screenBox.y2});
return {'x1': coord1.x, 'y1': coord1.y, 'x2': coord2.x, 'y2': coord2.y};
},
*/
///////////////////////////////////////////////////////
/**
* Create a default node at coord and append it to parent.
* @param {Hash} coord
* @param {Int} coordType Value from {@link Kekule.Editor.CoordType}
* @param {Kekule.StructureFragment} parent
* @returns {Kekule.ChemStructureNode}
* @private
*/
createDefaultNode: function(coord, coordType, parent)
{
var isoId = this.getEditorConfigs().getStructureConfigs().getDefIsotopeId();
var atom = new Kekule.Atom();
atom.setIsotopeId(isoId);
if (parent)
parent.appendNode(atom);
this.setCoord(atom, coord, coordType);
return atom;
},
/////////////////////////////////////////////////////////////////////////////
// methods about undo/redo and operation histories
/**
* Called after a operation is executed or reversed. Notify object has changed.
* @param {Object} operation
*/
operationDone: function(operation)
{
this.doOperationDone(operation);
},
/**
* Do actual job of {@link Kekule.Editor.AbstractEditor#operationDone}. Descendants should override this method.
* @private
*/
doOperationDone: function(operation)
{
// do nothing here
},
/**
* Pop all operations and empty history list.
*/
clearOperHistory: function()
{
var h = this.getOperHistory();
if (h)
h.clear();
},
/**
* Manually append an operation to the tail of operation history.
* @param {Kekule.Operation} operation
* @param {Bool} autoExec Whether execute the operation after pushing it.
*/
pushOperation: function(operation, autoExec)
{
// console.log('push operation');
if (operation)
{
var h = this.getOperHistory();
if (h)
{
h.push(operation);
}
this.getOperationsInCurrManipulation().push(operation);
if (autoExec)
{
this.beginUpdateObject();
try
{
operation.execute();
}
finally
{
this.endUpdateObject();
}
}
}
},
/**
* Manually pop an operation from the tail of operation history.
* @param {Bool} autoReverse Whether undo the operation after popping it.
* @returns {Kekule.Operation} Operation popped.
*/
popOperation: function(autoReverse)
{
var r;
var h = this.getOperHistory();
if (h)
{
r = h.pop();
if (autoReverse)
{
this.beginUpdateObject();
try
{
r.reverse();
}
finally
{
this.endUpdateObject();
}
}
// if r in operationsInCurrManipulation, removes it
var currOpers = this.getOperationsInCurrManipulation();
var index = currOpers.indexOf(r);
if (index >= 0)
currOpers.splice(index, 1);
return r;
}
else
return null;
},
/**
* Execute an operation in editor.
* @param {Kekule.Operation} operation A single operation, or an array of operations.
*/
execOperation: function(operation)
{
//this.beginUpdateObject();
var opers = AU.toArray(operation);
this.beginManipulateAndUpdateObject();
try
{
for (var i = 0, l = opers.length; i < l; ++i)
{
var o = opers[i];
o.execute();
if (this.getEnableOperHistory())
this.pushOperation(o, false); // push but not execute
}
//operation.execute();
}
finally
{
//this.endUpdateObject();
this.endManipulateAndUpdateObject();
}
/*
if (this.getEnableOperHistory())
this.pushOperation(operation, false); // push but not execute
*/
return this;
},
/**
* Execute a series of operations in editor.
* @param {Array} operations
*/
execOperations: function(opers)
{
if (opers.length === 1)
return this.execOperation(opers[0]);
else
{
var oper = new Kekule.MacroOperation(opers);
return this.execOperation(oper);
}
},
/**
* Replace an operation in operation history.
* @param {Kekule.Operation} oldOperation
* @param {Kekule.Operation} newOperation
* @returns {Kekule.Operation} The replaced old operation object.
*/
replaceOperationInHistory: function(oldOperation, newOperation)
{
var h = this.getOperHistory();
return h && h.replaceOperation(oldOperation, newOperation);
},
/**
* Undo last operation.
*/
undo: function()
{
var o;
var h = this.getOperHistory();
if (h)
{
this.beginUpdateObject();
try
{
o = h.undo();
}
finally
{
this.endUpdateObject();
if (o)
this.operationDone(o);
}
}
return o;
},
/**
* Redo last operation.
*/
redo: function()
{
var o;
var h = this.getOperHistory();
if (h)
{
this.beginUpdateObject();
try
{
o = h.redo();
}
finally
{
this.endUpdateObject();
if (o)
this.operationDone(o)
}
}
return o;
},
/**
* Undo all operations.
*/
undoAll: function()
{
var o;
var h = this.getOperHistory();
if (h)
{
this.beginUpdateObject();
try
{
o = h.undoAll();
}
finally
{
this.endUpdateObject();
}
}
return o;
},
/**
* Check if an undo action can be taken.
* @returns {Bool}
*/
canUndo: function()
{
var h = this.getOperHistory();
return h? h.canUndo(): false;
},
/**
* Check if an undo action can be taken.
* @returns {Bool}
*/
canRedo: function()
{
var h = this.getOperHistory();
return h? h.canRedo(): false;
},
/**
* Modify properties of objects in editor.
* @param {Variant} objOrObjs A object or an array of objects.
* @param {Hash} modifiedPropInfos A hash of property: value pairs.
* @param {Bool} putInOperHistory If set to true, the modification will be put into history and can be undone.
*/
modifyObjects: function(objOrObjs, modifiedPropInfos, putInOperHistory)
{
var objs = Kekule.ArrayUtils.toArray(objOrObjs);
try
{
var macro = new Kekule.MacroOperation();
for (var i = 0, l = objs.length; i < l; ++i)
{
var obj = objs[i];
var oper = new Kekule.ChemObjOperation.Modify(obj, modifiedPropInfos, this);
macro.add(oper);
}
macro.execute();
}
finally
{
if (putInOperHistory && this.getEnableOperHistory() && macro.getChildCount())
this.pushOperation(macro);
}
return this;
},
/**
* Modify render options of objects in editor.
* @param {Variant} objOrObjs A object or an array of objects.
* @param {Hash} modifiedValues A hash of name: value pairs.
* @param {Bool} is3DOption Change renderOptions or render3DOptions.
* @param {Bool} putInOperHistory If set to true, the modification will be put into history and can be undone.
*/
modifyObjectsRenderOptions: function(objOrObjs, modifiedValues, is3DOption, putInOperHistory)
{
var objs = Kekule.ArrayUtils.toArray(objOrObjs);
var renderPropName = is3DOption? 'render3DOptions': 'renderOptions';
var getterName = is3DOption? 'getRender3DOptions': 'getRenderOptions';
try
{
var macro = new Kekule.MacroOperation();
for (var i = 0, l = objs.length; i < l; ++i)
{
var obj = objs[i];
if (obj[getterName])
{
var old = obj[getterName]();
var newOps = Object.extend({}, old);
newOps = Object.extend(newOps, modifiedValues);
var hash = {};
hash[renderPropName] = newOps;
var oper = new Kekule.ChemObjOperation.Modify(obj, hash, this);
//oper.execute();
macro.add(oper);
}
}
this.beginManipulateAndUpdateObject();
macro.execute();
}
finally
{
if (putInOperHistory && this.getEnableOperHistory() && macro.getChildCount())
this.pushOperation(macro);
this.endManipulateAndUpdateObject();
}
return this;
},
/**
* Returns the dimension of current visible client area of editor.
*/
getClientDimension: function()
{
var elem = this.getElement();
return {
'width': elem.clientWidth,
'height': elem.clientHeight
};
},
/**
* Returns current scroll position of edit client element.
* @returns {Hash} {x, y}
*/
getClientScrollPosition: function()
{
var elem = this.getEditClientElem().parentNode;
return elem? {
'x': elem.scrollLeft,
'y': elem.scrollTop
}: null;
},
/**
* Returns the top left corner coord of client in coordSys.
* @param {Int} coordSys
* @returns {Hash}
*/
getClientScrollCoord: function(coordSys)
{
var screenCoord = this.getClientScrollPosition();
if (OU.isUnset(coordSys) || coordSys === Kekule.Editor.CoordSys.SCREEN)
return screenCoord;
else
return this.translateCoord(screenCoord, Kekule.Editor.CoordSys.SCREEN, coordSys);
},
/**
* Returns the screen rect/box of editor client element.
* @returns {Hash} {x1, y1, x2, y2, left, top, width, height}
*/
getClientVisibleRect: function()
{
var result = this.getClientDimension();
var p = this.getClientScrollPosition();
result.x1 = result.left = p.x;
result.y1 = result.top = p.y;
result.x2 = result.x1 + result.width;
result.y2 = result.y1 + result.height;
return result;
},
/**
* Scroll edit client to a position.
* @param {Int} yPosition, in px.
* @param {Int} xPosition, in px.
*/
scrollClientTo: function(yPosition, xPosition)
{
/*
var elem = this.getEditClientElem().parentNode;
if (Kekule.ObjUtils.notUnset(yPosition))
elem.scrollTop = yPosition;
if (Kekule.ObjUtils.notUnset(xPosition))
elem.scrollLeft = xPosition;
return this;
*/
return this.scrollClientToCoord({'y': yPosition, 'x': xPosition});
},
/**
* Scroll edit client to top.
*/
scrollClientToTop: function()
{
return this.scrollClientTo(0, null);
},
/**
* Scroll edit client to bottom.
*/
scrollClientToBottom: function()
{
var elem = this.getEditClientElem();
var dim = Kekule.HtmlElementUtils.getElemClientDimension(elem);
return this.scrollClientTo(dim.height, null);
},
/**
* Scroll edit client to coord (based on coordSys).
* @param {Hash} coord
* @param {Int} coordSys If not set, screen coord system will be used.
* @param {Hash} options A hash object that contains the options of scrolling.
* Currently it may has one field: scrollToCenter. If scrollToCenter is true,
* the coord will be at the center of edit area rather than top-left.
*/
scrollClientToCoord: function(coord, coordSys, options)
{
var scrollX = OU.notUnset(coord.x);
var scrollY = OU.notUnset(coord.y);
var scrollToCenter = options && options.scrollToCenter;
var screenCoord;
if (OU.isUnset(coordSys))
screenCoord = coord;
else
screenCoord = this.translateCoord(coord, coordSys, Kekule.Editor.CoordSys.SCREEN);
if (scrollToCenter)
{
var visibleClientBox = this.getVisibleClientScreenBox();
var delta = {'x': visibleClientBox.width / 2, 'y': visibleClientBox.height / 2};
screenCoord = Kekule.CoordUtils.substract(screenCoord, delta);
}
var elem = this.getEditClientElem().parentNode;
if (scrollY)
elem.scrollTop = screenCoord.y;
if (scrollX)
elem.scrollLeft = screenCoord.x;
return this;
},
/**
* Scroll edit client to target object or objects in editor.
* @param {Variant} targetObjOrObjs Target object or objects array.
* @param {Hash} options Scroll options, can including two fields: scrollToCenter, coverMostObjs.
* The default value of both of those options are true.
*/
scrollClientToObject: function(targetObjOrObjs, options)
{
var BU = Kekule.BoxUtils;
if (!targetObjOrObjs)
return this;
var rootObj = this.getChemObj();
if (!rootObj)
return this;
var objs = AU.toArray(targetObjOrObjs);
/*
var containerBoxes = [];
var totalContainerBox = null;
for (var i = 0, l = objs.length; i < l; ++i)
{
var obj = objs[i];
if (obj.getExposedContainerBox && obj.isChildOf && obj.isChildOf(rootObj))
{
var box = obj.getExposedContainerBox(this.getCoordMode());
if (box)
{
containerBoxes.push(box);
if (!totalContainerBox)
totalContainerBox = box;
else
totalContainerBox = BU.getContainerBox(totalContainerBox, box);
}
}
}
*/
var boxInfo = this._getTargetObjsExposedContainerBoxInfo(objs, rootObj);
var totalContainerBox = boxInfo.totalBox;
var containerBoxes = boxInfo.boxes;
if (totalContainerBox)
{
var ops = Object.extend({scrollToCenter: true, coverMostObjs: true}, options || {});
/*
var actualBox;
// if scroll to centerCoord and none of the obj can be seen in current state, we need another approach
var visibleBox = this.getVisibleClientBoxOfSys(Kekule.Editor.CoordSys.CHEM);
if (((totalContainerBox.x2 - totalContainerBox.x1 > visibleBox.x2 - visibleBox.x1)
|| (totalContainerBox.y2 - totalContainerBox.y1 > visibleBox.y2 - visibleBox.y1))
&& ops.coverMostObjs)
{
actualBox = this._getMostIntersectedContainerBox(visibleBox.x2 - visibleBox.x1, visibleBox.y2 - visibleBox.y1, containerBoxes, totalContainerBox);
}
else
actualBox = totalContainerBox;
var scrollCoord = ops.scrollToCenter? BU.getCenterCoord(actualBox): {x: actualBox.x1, y: actualBox.y2};
return this.scrollClientToCoord(scrollCoord, Kekule.Editor.CoordSys.CHEM, ops);
*/
return this._scrollClientToContainerBox(totalContainerBox, containerBoxes, ops);
}
else
return this;
},
/** @private */
_scrollClientToContainerBox: function(totalContainerBox, allContainerBoxes, options)
{
var BU = Kekule.BoxUtils;
var actualBox;
// if scroll to centerCoord and none of the obj can be seen in current state, we need another approach
var visibleBox = this.getVisibleClientBoxOfSys(Kekule.Editor.CoordSys.CHEM);
if (((totalContainerBox.x2 - totalContainerBox.x1 > visibleBox.x2 - visibleBox.x1)
|| (totalContainerBox.y2 - totalContainerBox.y1 > visibleBox.y2 - visibleBox.y1))
&& options.coverMostObjs)
{
actualBox = this._getMostIntersectedContainerBox(visibleBox.x2 - visibleBox.x1, visibleBox.y2 - visibleBox.y1, allContainerBoxes, totalContainerBox);
}
else
actualBox = totalContainerBox;
var scrollCoord = options.scrollToCenter? BU.getCenterCoord(actualBox): {x: actualBox.x1, y: actualBox.y2};
return this.scrollClientToCoord(scrollCoord, Kekule.Editor.CoordSys.CHEM, options);
},
/**
* Returns the exposed container box of each object and the total container box.
* @param {Array} objs
* @returns {Hash}
* @private
*/
_getTargetObjsExposedContainerBoxInfo: function(objs, rootObj)
{
var BU = Kekule.BoxUtils;
if (!rootObj)
rootObj = this.getChemObj();
if (rootObj)
{
var totalContainerBox = null;
var containerBoxes = [];
for (var i = 0, l = objs.length; i < l; ++i)
{
var obj = objs[i];
if (obj.getExposedContainerBox && obj.isChildOf && obj.isChildOf(rootObj))
{
var box = obj.getExposedContainerBox(this.getCoordMode());
if (box)
{
containerBoxes.push(box);
if (!totalContainerBox)
totalContainerBox = box;
else
totalContainerBox = BU.getContainerBox(totalContainerBox, box);
}
}
}
}
return {'totalBox': totalContainerBox, 'boxes': containerBoxes};
},
/** @private */
_getMostIntersectedContainerBox: function(width, height, boxes, totalContainerBox)
{
var BU = Kekule.BoxUtils;
var generateTestBox = function(startingCoord, directions)
{
var endingCoord = CU.add(startingCoord, {'x': width * directions.x, 'y': height * directions.y});
if (endingCoord.x < totalContainerBox.x1)
endingCoord.x = totalContainerBox.x1;
else if (endingCoord.x > totalContainerBox.x2)
endingCoord.x = totalContainerBox.x2;
if (endingCoord.y < totalContainerBox.y1)
endingCoord.y = totalContainerBox.y1;
else if (endingCoord.y > totalContainerBox.y2)
endingCoord.y = totalContainerBox.y2;
var actualStartingCoord = CU.add(endingCoord, {'x': -width * directions.x, 'y': -height * directions.y});
var result = BU.createBox(actualStartingCoord, endingCoord);
return result;
};
var getIntersectedBoxCount = function(testBox, boxes)
{
var result = 0;
for (var i = 0, l = boxes.length; i < l; ++i)
{
var box = boxes[i];
if (BU.hasIntersection(box, testBox))
++result;
}
return result;
};
var maxIntersectCount = 0;
var currContainerBox;
for (var i = 0, l = boxes.length; i < l; ++i)
{
var corners = BU.getCornerCoords(boxes[i]);
var testBoxes = [
generateTestBox(corners[0], {x: 1, y: 1}),
generateTestBox(corners[1], {x: 1, y: -1}),
generateTestBox(corners[2], {x: -1, y: 1}),
generateTestBox(corners[3], {x: -1, y: -1}),
];
for (var j = 0, k = testBoxes.length; j < k; ++j)
{
var count = getIntersectedBoxCount(testBoxes[j], boxes);
if (count > maxIntersectCount)
{
maxIntersectCount = count;
currContainerBox = testBoxes[j];
}
}
}
return currContainerBox;
},
/////// Event handle //////////////////////
/** @ignore */
doBeforeDispatchUiEvent: function(/*$super, */e)
{
// get pointer type information here
var evType = e.getType();
if (['pointerdown', 'pointermove', 'pointerup'].indexOf(evType) >= 0)
{
this.setCurrPointerType(e.pointerType);
if (evType === 'pointermove' && this.isRenderable())
{
var coord = this._getEventMouseCoord(e, this.getCoreElement()); // coord based on editor client element
var hoveredObjs = this.getBasicObjectsAtCoord(coord, this.getCurrBoundInflation()) || [];
var oldHoveredObjs = this.getHoveredBasicObjs();
this.setPropStoreFieldValue('hoveredBasicObjs', hoveredObjs);
// if there are differences between oldHoveredObjs and hoveredObjs
if (!oldHoveredObjs || hoveredObjs.length !== oldHoveredObjs.length || AU.intersect(oldHoveredObjs, hoveredObjs).length !== hoveredObjs.length)
{
this.notifyHoverOnObjs(hoveredObjs);
}
}
}
return this.tryApplySuper('doBeforeDispatchUiEvent', [e]) /* $super(e) */;
},
/**
* Called when the pointer hover on/off objects.
* @param {Array} hoveredObjs An empty array means move out off objects.
* @private
*/
notifyHoverOnObjs: function(hoveredObjs)
{
// when hovering on objects, update the chem object hint of editor
var hint = this.getChemObjsHint(hoveredObjs);
this.updateHintForChemObject(hint);
this.invokeEvent('hoverOnObjs', {'objs': hoveredObjs});
},
/**
* React to a HTML event to find if it is a registered hotkey, then react to it when necessary.
* @param {HTMLEvent} e
* @returns {Bool} Returns true if a hot key is found and handled.
* @private
*/
reactHotKeys: function(e)
{
var editor = this;
// react to hotkeys
if (this.getEditorConfigs().getInteractionConfigs().getEnableHotKey())
{
var hotKeys = this.getEditorConfigs().getHotKeyConfigs().getHotKeys();
var srcParams = Kekule.Widget.KeyboardUtils.getKeyParamsFromEvent(e);
var done = false;
var pendingOperations = [];
for (var i = hotKeys.length - 1; i >= 0; --i)
{
var keyParams = Kekule.Widget.KeyboardUtils.shortcutLabelToKeyParams(hotKeys[i].key, null, false);
keyParams.repeat = hotKeys[i].repeat;
if (Kekule.Widget.KeyboardUtils.matchKeyParams(srcParams, keyParams, false)) // not strict match
{
var actionId = hotKeys[i].action;
if (actionId)
{
var action = editor.getChildAction(actionId, true);
if (action)
{
if (action instanceof Kekule.Editor.ActionOperationCreate.Base) // operation creation actions, handles differently
{
var opers = action.createOperations(editor);
done = !!(opers && opers.length) || done;
if (done)
pendingOperations = pendingOperations.concat(opers);
}
else
done = action.execute(editor, e) || done;
}
}
}
}
if (pendingOperations.length)
editor.execOperations(pendingOperations);
if (done)
{
e.stopPropagation();
e.preventDefault();
return true; // already do the modification, returns a flag
}
}
},
/** @ignore */
react_keydown: function(e)
{
var handled = this.tryApplySuper('react_keydown', [e]);
if (!handled)
return this.reactHotKeys(e);
}
});
/**
* A special class to give a setting facade for BaseEditor.
* Do not use this class alone.
* @class
* @augments Kekule.ChemWidget.ChemObjDisplayer.Settings
* @ignore
*/
Kekule.Editor.BaseEditor.Settings = Class.create(Kekule.ChemWidget.ChemObjDisplayer.Settings,
/** @lends Kekule.Editor.BaseEditor.Settings# */
{
/** @private */
CLASS_NAME: 'Kekule.Editor.BaseEditor.Settings',
/** @private */
initProperties: function()
{
this.defineProp('enableCreateNewDoc', {'dataType': DataType.BOOL, 'serializable': false,
'getter': function() { return this.getEditor().getEnableCreateNewDoc(); },
'setter': function(value) { this.getEditor().setEnableCreateNewDoc(value); }
});
this.defineProp('initOnNewDoc', {'dataType': DataType.BOOL, 'serializable': false,
'getter': function() { return this.getEditor().getInitOnNewDoc(); },
'setter': function(value) { this.getEditor().setInitOnNewDoc(value); }
});
this.defineProp('enableOperHistory', {'dataType': DataType.BOOL, 'serializable': false,
'getter': function() { return this.getEditor().getEnableOperHistory(); },
'setter': function(value) { this.getEditor().setEnableOperHistory(value); }
});
this.defineProp('enableIssueCheck', {'dataType': DataType.BOOL, 'serializable': false,
'getter': function() { return this.getEditor().getEnableIssueCheck(); },
'setter': function(value) { this.getEditor().setEnableIssueCheck(value); }
});
},
/** @private */
getEditor: function()
{
return this.getDisplayer();
}
});
/**
* A class to register all available IA controllers for editor.
* @class
*/
Kekule.Editor.IaControllerManager = {
/** @private */
_controllerMap: new Kekule.MapEx(true),
/**
* Register a controller, the controller can be used in targetEditorClass or its descendants.
* @param {Class} controllerClass
* @param {Class} targetEditorClass
*/
register: function(controllerClass, targetEditorClass)
{
ICM._controllerMap.set(controllerClass, targetEditorClass);
},
/**
* Unregister a controller.
* @param {Class} controllerClass
*/
unregister: function(controllerClass)
{
ICM._controllerMap.remove(controllerClass);
},
/**
* Returns all registered controller classes.
* @returns {Array}
*/
getAllControllerClasses: function()
{
return ICM._controllerMap.getKeys();
},
/**
* Returns controller classes can be used for editorClass.
* @param {Class} editorClass
* @returns {Array}
*/
getAvailableControllerClasses: function(editorClass)
{
var result = [];
var controllerClasses = ICM.getAllControllerClasses();
for (var i = 0, l = controllerClasses.length; i < l; ++i)
{
var cc = controllerClasses[i];
var ec = ICM._controllerMap.get(cc);
if (!ec || ClassEx.isOrIsDescendantOf(editorClass, ec))
result.push(cc);
}
return result;
}
};
var ICM = Kekule.Editor.IaControllerManager;
/**
* Base controller class for BaseEditor.
* This is a base class and should not be used directly.
* @class
* @augments Kekule.Widget.InteractionController
*
* @param {Kekule.Editor.BaseEditor} editor Editor of current object being installed to.
*/
Kekule.Editor.BaseEditorBaseIaController = Class.create(Kekule.Widget.InteractionController,
/** @lends Kekule.Editor.BaseEditorBaseIaController# */
{
/** @private */
CLASS_NAME: 'Kekule.Editor.BaseEditorIaController',
/** @constructs */
initialize: function(/*$super, */editor)
{
this.tryApplySuper('initialize', [editor]) /* $super(editor) */;
},
/** @private */
_defineEditorConfigBasedProperty: function(propName, configPath, options)
{
var defOps = {
'dataType': DataType.VARIANT,
'serializable': false,
/*
'setter': function(value)
{
var configs = this.getEditorConfigs();
configs.setCascadePropValue(configPath, value);
}
*/
'setter': null
};
if (options && options.overwrite)
{
defOps.getter = function ()
{
var v = this.getPropStoreFieldValue(propName);
var configs = this.getEditorConfigs();
return v && configs.getCascadePropValue(configPath);
};
defOps.setter = function(value)
{
this.setPropStoreFieldValue(propName, value);
}
}
else
{
defOps.getter = function () {
var configs = this.getEditorConfigs();
return configs.getCascadePropValue(configPath);
};
}
var ops = Object.extend(defOps, options || {});
this.defineProp(propName, ops);
},
/**
* Returns the preferred id for this controller.
*/
getDefId: function()
{
return Kekule.ClassUtils.getLastClassName(this.getClassName());
},
/**
* Return associated editor.
* @returns {Kekule.ChemWidget.BaseEditor}
*/
getEditor: function()
{
return this.getWidget();
},
/**
* Set associated editor.
* @param {Kekule.ChemWidget.BaseEditor} editor
*/
setEditor: function(editor)
{
return this.setWidget(editor);
},
/**
* Get config object of editor.
* @returns {Object}
*/
getEditorConfigs: function()
{
var editor = this.getEditor();
return editor? editor.getEditorConfigs(): null;
}
});
/**
* Base Controller class for editor.
* @class
* @augments Kekule.Editor.BaseEditorBaseIaController
*
* @param {Kekule.Editor.BaseEditor} editor Editor of current object being installed to.
*
* @property {Bool} manuallyHotTrack If set to false, hot track will be auto shown in mousemove event listener.
*/
Kekule.Editor.BaseEditorIaController = Class.create(Kekule.Editor.BaseEditorBaseIaController,
/** @lends Kekule.Editor.BaseEditorIaController# */
{
/** @private */
CLASS_NAME: 'Kekule.Editor.BaseEditorIaController',
/** @constructs */
initialize: function(/*$super, */editor)
{
this.tryApplySuper('initialize', [editor]) /* $super(editor) */;
},
initProperties: function()
{
this.defineProp('manuallyHotTrack', {'dataType': DataType.BOOL, 'serializable': false});
// in mouse or touch interaction, we may have different bound inflation
this.defineProp('currBoundInflation', {'dataType': DataType.NUMBER, 'serializable': false,
'getter': function() { return this.getEditor().getCurrBoundInflation(); },
'setter': null // function(value) { return this.getEditor().setCurrBoundInflation(value); }
});
this.defineProp('activePointerType', {'dataType': DataType.BOOL, 'serializable': false,
'getter': function()
{
var editor = this.getEditor();
return (editor && editor.getCurrPointerType()) || this.getPropStoreFieldValue('activePointerType');
},
'setter': function(value)
{
var editor = this.getEditor();
if (editor)
editor.setCurrPointerType(value);
else
this.setStoreFieldValue('activePointerType', value);
}
}); // private
},
/**
* Call the beginManipulateObjects method of editor.
* @private
*/
notifyEditorBeginManipulateObjects: function()
{
var editor = this.getEditor();
editor.beginManipulateObject();
},
/**
* Call the endManipulateObjects method of editor.
* @private
*/
notifyEditorEndManipulateObjects: function()
{
var editor = this.getEditor();
editor.endManipulateObject();
},
/** @private */
getInteractionBoundInflation: function(pointerType)
{
return this.getEditor().getInteractionBoundInflation(pointerType);
},
/** @ignore */
handleUiEvent: function(/*$super, */e)
{
var handle = false;
var targetElem = (e.getTarget && e.getTarget()) || e.target; // hammer event does not have getTarget method
var uiElem = this.getEditor().getUiEventReceiverElem();
if (uiElem)
{
// only handles event on event receiver element
// otherwise scrollbar on editor may cause problem
if ((targetElem === uiElem) || Kekule.DomUtils.isDescendantOf(targetElem, uiElem))
handle = true;
}
else
handle = true;
if (handle)
this.tryApplySuper('handleUiEvent', [e]) /* $super(e) */;
},
/**
* Returns if this IaController can interact with obj.
* If true, when mouse moving over obj, a hot track marker will be drawn.
* Descendants should override this method.
* @param {Object} obj
* @return {Bool}
* @private
*/
canInteractWithObj: function(obj)
{
return !!obj;
},
/**
* Returns all interactable object classes for this IA controller in editor.
* If can interact will all objects, simply returns null.
* Descendants may override this method.
* @private
*/
getInteractableTargetClasses: function()
{
return null;
},
/* @private */
getAllInteractableObjsAtScreenCoord: function(coord)
{
return this.getEditor().getBasicObjectsAtCoord(coord, this.getCurrBoundInflation(), null, this.getInteractableTargetClasses());
},
/** @private */
getTopmostInteractableObjAtScreenCoord: function(coord)
{
var objs = this.getAllInteractableObjsAtScreenCoord(coord);
if (objs)
{
for (var i = 0, l = objs.length; i < l; ++i)
{
if (this.canInteractWithObj(objs[i]))
return objs[i];
}
}
return null;
},
/** @private */
getTopmostInteractableObjAtCurrPointerPos: function()
{
var objs = this.getEditor().getHoveredBasicObjs();
if (objs)
{
for (var i = 0, l = objs.length; i < l; ++i)
{
if (this.canInteractWithObj(objs[i]))
return objs[i];
}
}
return null;
},
/**
* Show a hot track marker on obj in editor.
* @param {Kekule.ChemObject} obj
*/
hotTrackOnObj: function(obj)
{
this.getEditor().hotTrackOnObj(obj);
},
// zoom functions
/** @private */
zoomEditor: function(zoomLevel, zoomCenterCoord)
{
if (zoomLevel > 0)
this.getEditor().zoomIn(zoomLevel, zoomCenterCoord);
else if (zoomLevel < 0)
this.getEditor().zoomOut(-zoomLevel, zoomCenterCoord);
},
/** @private */
/*
updateCurrBoundInflation: function(evt)
{
*/
/*
var editor = this.getEditor();
var pointerType = evt && evt.pointerType;
var iaConfigs = this.getEditorConfigs().getInteractionConfigs();
var defRatio = iaConfigs.getObjBoundTrackInflationRatio();
var currRatio, ratioValue;
if (pointerType === 'mouse')
currRatio = iaConfigs.getObjBoundTrackInflationRatioMouse();
else if (pointerType === 'pen')
currRatio = iaConfigs.getObjBoundTrackInflationRatioPen();
else if (pointerType === 'touch')
currRatio = iaConfigs.getObjBoundTrackInflationRatioTouch();
currRatio = currRatio || defRatio;
if (currRatio)
{
var bondScreenLength = editor.getDefBondScreenLength();
ratioValue = bondScreenLength * currRatio;
}
var defMinValue = iaConfigs.getObjBoundTrackMinInflation();
var currMinValue;
if (pointerType === 'mouse')
currMinValue = iaConfigs.getObjBoundTrackMinInflationMouse();
else if (pointerType === 'pen')
currMinValue = iaConfigs.getObjBoundTrackMinInflationPen();
else if (pointerType === 'touch')
currMinValue = iaConfigs.getObjBoundTrackMinInflationTouch();
currMinValue = currMinValue || defMinValue;
var actualValue = Math.max(ratioValue || 0, currMinValue);
*/
/*
//this.setCurrBoundInflation(actualValue);
var value = this.getEditor().getInteractionBoundInflation(evt && evt.pointerType);
this.setCurrBoundInflation(value);
//console.log('update bound inflation', pointerType, this.getCurrBoundInflation());
},
*/
/** @private */
_filterBasicObjectsInEditor: function(objs)
{
var editor = this.getEditor();
var rootObj = editor.getChemObj();
var result = [];
for (var i = 0, l = objs.length; i < l; ++i)
{
var obj = objs[i];
if (obj.isChildOf(rootObj))
result.push(obj);
}
return result;
},
/**
* Notify the manipulation is done and objs are inserted into or modified in editor.
* This method should be called by descendants at the end of their manipulation.
* Objs will be automatically selected if autoSelectNewlyInsertedObjects option is true.
* @param {Array} objs
* @private
*/
doneInsertOrModifyBasicObjects: function(objs)
{
if (this.needAutoSelectNewlyInsertedObjects())
{
var filteredObjs = this._filterBasicObjectsInEditor(objs);
this.getEditor().select(filteredObjs);
}
},
/** @private */
needAutoSelectNewlyInsertedObjects: function()
{
var pointerType = this.getActivePointerType();
var ic = this.getEditorConfigs().getInteractionConfigs();
return (ic.getAutoSelectNewlyInsertedObjectsOnTouch() && pointerType === 'touch')
|| ic.getAutoSelectNewlyInsertedObjects();
},
/** @private */
react_pointerdown: function(e)
{
//this.updateCurrBoundInflation(e);
//this.getEditor().setCurrPointerType(e.pointerType);
this.setActivePointerType(e.pointerType);
e.preventDefault();
return true;
},
/** @private */
react_pointermove: function(e)
{
//if (!this.getCurrBoundInflation())
//this.updateCurrBoundInflation(e);
//this.getEditor().setCurrPointerType(e.pointerType);
//console.log(e.getTarget().id);
/*
var coord = this._getEventMouseCoord(e);
var obj = this.getTopmostInteractableObjAtScreenCoord(coord);
*/
var obj = this.getTopmostInteractableObjAtCurrPointerPos(); // read the hovered obj directly from the editor's cached property
if (!this.getManuallyHotTrack())
{
/*
if (obj)
console.log('point to', obj.getClassName(), obj.getId());
*/
if (obj /* && this.canInteractWithObj(obj)*/) // canInteractWithObj check now already done in getTopmostInteractableObjAtScreenCoord
{
this.hotTrackOnObj(obj);
}
else
{
this.hotTrackOnObj(null);
}
//e.preventDefault();
}
e.preventDefault();
return true;
},
/** @private */
react_mousewheel: function(e)
{
if (e.getCtrlKey())
{
var currScreenCoord = this._getEventMouseCoord(e);
//this.getEditor().setZoomCenter(currScreenCoord);
try
{
var delta = e.wheelDeltaY || e.wheelDelta;
if (delta)
delta /= 120;
//console.log('zoom', this.getEditor().getZoomCenter())
this.zoomEditor(delta, currScreenCoord);
}
finally
{
//this.getEditor().setZoomCenter(null);
}
e.preventDefault();
return true;
}
},
/** @private */
_getEventMouseCoord: function(/*$super, */e, clientElem)
{
var elem = clientElem || this.getWidget().getCoreElement(); // defaultly base on client element, not widget element
return this.tryApplySuper('_getEventMouseCoord', [e, elem]) /* $super(e, elem) */;
}
});
/**
* Controller for drag and scroll (by mouse, touch...) client element in editor.
* @class
* @augments Kekule.Widget.BaseEditorIaController
*
* @param {Kekule.Editor.BaseEditor} widget Editor of current object being installed to.
*/
Kekule.Editor.ClientDragScrollIaController = Class.create(Kekule.Editor.BaseEditorIaController,
/** @lends Kekule.Editor.ClientDragScrollIaController# */
{
/** @private */
CLASS_NAME: 'Kekule.Editor.ClientDragScrollIaController',
/** @constructs */
initialize: function(/*$super, */widget)
{
this.tryApplySuper('initialize', [widget]) /* $super(widget) */;
this._isExecuting = false;
},
/** @ignore */
canInteractWithObj: function(obj)
{
return false; // do not interact directly with objects in editor
},
/** @ignore */
doTestMouseCursor: function(coord, e)
{
//console.log(this.isExecuting(), coord);
return this.isExecuting()?
['grabbing', '-webkit-grabbing', '-moz-grabbing', 'move']:
['grab', '-webkit-grab', '-moz-grab', 'pointer'];
//return this.isExecuting()? '-webkit-grabbing': '-webkit-grab';
},
/** @private */
isExecuting: function()
{
return this._isExecuting;
},
/** @private */
startScroll: function(screenCoord)
{
this._startCoord = screenCoord;
this._originalScrollPos = this.getEditor().getClientScrollPosition();
this._isExecuting = true;
},
/** @private */
endScroll: function()
{
this._isExecuting = false;
this._startCoord = null;
this._originalScrollPos = null;
},
/** @private */
scrollTo: function(screenCoord)
{
if (this.isExecuting())
{
var startCoord = this._startCoord;
var delta = Kekule.CoordUtils.substract(startCoord, screenCoord);
var newScrollPos = Kekule.CoordUtils.add(this._originalScrollPos, delta);
this.getEditor().scrollClientTo(newScrollPos.y, newScrollPos.x); // note the params of this method is y, x
}
},
/** @private */
react_pointerdown: function(e)
{
this.setActivePointerType(e.pointerType);
if (e.getButton() === Kekule.X.Event.MouseButton.LEFT) // begin scroll
{
if (!this.isExecuting())
{
var coord = {x: e.getScreenX(), y: e.getScreenY()};
this.startScroll(coord);
e.preventDefault();
}
}
else if (e.getButton() === Kekule.X.Event.MouseButton.RIGHT)
{
if (this.isExecuting())
{
this.endScroll();
e.preventDefault();
}
}
},
/** @private */
react_pointerup: function(e)
{
if (e.getButton() === Kekule.X.Event.MouseButton.LEFT)
{
if (this.isExecuting())
{
this.endScroll();
e.preventDefault();
}
}
},
/** @private */
react_pointermove: function(/*$super, */e)
{
this.tryApplySuper('react_pointermove', [e]) /* $super(e) */;
if (this.isExecuting())
{
var coord = {x: e.getScreenX(), y: e.getScreenY()};
this.scrollTo(coord);
e.preventDefault();
}
return true;
}
});
/** @ignore */
Kekule.Editor.IaControllerManager.register(Kekule.Editor.ClientDragScrollIaController, Kekule.Editor.BaseEditor);
/**
* Controller for deleting objects in editor.
* @class
* @augments Kekule.Widget.BaseEditorIaController
*
* @param {Kekule.Editor.BaseEditor} widget Editor of current object being installed to.
*/
Kekule.Editor.BasicEraserIaController = Class.create(Kekule.Editor.BaseEditorIaController,
/** @lends Kekule.Editor.BasicEraserIaController# */
{
/** @private */
CLASS_NAME: 'Kekule.Editor.BasicEraserIaController',
/** @constructs */
initialize: function(/*$super, */widget)
{
this.tryApplySuper('initialize', [widget]) /* $super(widget) */;
this._isExecuting = false;
},
/** @ignore */
canInteractWithObj: function(obj)
{
return !!obj; // every thing can be deleted
},
//methods about remove
/** @private */
removeObjs: function(objs)
{
if (objs && objs.length)
{
var editor = this.getEditor();
editor.beginUpdateObject();
try
{
var actualObjs = this.doGetActualRemovedObjs(objs);
this.doRemoveObjs(actualObjs);
}
finally
{
editor.endUpdateObject();
}
}
},
/** @private */
doRemoveObjs: function(objs)
{
// do actual remove job
},
doGetActualRemovedObjs: function(objs)
{
return objs;
},
/**
* Remove selected objects in editor.
*/
removeSelection: function()
{
var editor = this.getEditor();
this.removeObjs(editor.getSelection());
// the selection is currently empty
editor.deselectAll();
},
/**
* Remove object on screen coord.
* @param {Hash} coord
*/
removeOnScreenCoord: function(coord)
{
var obj = this.getEditor().getTopmostBasicObjectAtCoord(coord);
if (obj)
{
this.removeObjs([obj]);
return true;
}
else
return false;
},
/** @private */
startRemove: function()
{
this._isExecuting = true;
},
/** @private */
endRemove: function()
{
this._isExecuting = false;
},
/** @private */
isRemoving: function()
{
return this._isExecuting;
},
/** @ignore */
reactUiEvent: function(/*$super, */e)
{
var result = this.tryApplySuper('reactUiEvent', [e]) /* $super(e) */;
var evType = e.getType();
// prevent default touch action (may change UI) in mobile browsers
if (['touchstart', 'touchend', 'touchcancel', 'touchmove'].indexOf(evType) >= 0)
e.preventDefault();
return result;
},
/** @private */
react_pointerdown: function(e)
{
this.setActivePointerType(e.pointerType);
if (e.getButton() === Kekule.X.Event.MOUSE_BTN_LEFT)
{
this.startRemove();
var coord = this._getEventMouseCoord(e);
this.removeOnScreenCoord(coord);
e.preventDefault();
}
else if (e.getButton() === Kekule.X.Event.MOUSE_BTN_RIGHT)
{
this.endRemove();
e.preventDefault();
}
},
/** @private */
react_pointerup: function(e)
{
if (e.getButton() === Kekule.X.Event.MOUSE_BTN_LEFT)
{
this.endRemove();
e.preventDefault();
}
},
/** @private */
react_pointermove: function(/*$super, */e)
{
this.tryApplySuper('react_pointermove', [e]) /* $super(e) */;
if (this.isRemoving())
{
var coord = this._getEventMouseCoord(e);
this.removeOnScreenCoord(coord);
e.preventDefault();
}
return true;
}
});
/** @ignore */
Kekule.Editor.IaControllerManager.register(Kekule.Editor.BasicEraserIaController, Kekule.Editor.BaseEditor);
/**
* Controller for selecting, moving or rotating objects in editor.
* @class
* @augments Kekule.Widget.BaseEditorIaController
*
* @param {Kekule.Editor.BaseEditor} widget Editor of current object being installed to.
*
* @property {Int} selectMode Set the selectMode property of editor.
* @property {Bool} enableSelect Whether select function is enabled.
* @property {Bool} enableMove Whether move function is enabled.
* //@property {Bool} enableRemove Whether remove function is enabled.
* @property {Bool} enableResize Whether resize of selection is allowed.
* @property {Bool} enableRotate Whether rotate of selection is allowed.
* @property {Bool} enableGestureManipulation Whether rotate and resize by touch gestures are allowed.
*/
Kekule.Editor.BasicManipulationIaController = Class.create(Kekule.Editor.BaseEditorIaController,
/** @lends Kekule.Editor.BasicManipulationIaController# */
{
/** @private */
CLASS_NAME: 'Kekule.Editor.BasicManipulationIaController',
/** @constructs */
initialize: function(/*$super, */widget)
{
this.tryApplySuper('initialize', [widget]) /* $super(widget) */;
this.setState(Kekule.Editor.BasicManipulationIaController.State.NORMAL);
/*
this.setEnableSelect(false);
this.setEnableGestureManipulation(false);
this.setEnableMove(true);
this.setEnableResize(true);
this.setEnableAspectRatioLockedResize(true);
this.setEnableRotate(true);
*/
this._suppressConstrainedResize = false;
this._manipulationStepBuffer = {};
this._suspendedOperations = null;
this.execManipulationStepBind = this.execManipulationStep.bind(this);
},
/** @private */
initProperties: function()
{
this.defineProp('selectMode', {'dataType': DataType.INT, 'serializable': false});
this.defineProp('enableSelect', {'dataType': DataType.BOOL, 'serializable': false});
this.defineProp('enableMove', {'dataType': DataType.BOOL, 'serializable': false});
//this.defineProp('enableRemove', {'dataType': DataType.BOOL, 'serializable': false});
this.defineProp('enableResize', {'dataType': DataType.BOOL, 'serializable': false});
this.defineProp('enableRotate', {'dataType': DataType.BOOL, 'serializable': false});
this.defineProp('enableGestureManipulation', {'dataType': DataType.BOOL, 'serializable': false});
this.defineProp('state', {'dataType': DataType.INT, 'serializable': false});
// the screen coord that start this manipulation, since startCoord may be changed during rotation, use this
// to get the inital coord of mouse down
this.defineProp('baseCoord', {'dataType': DataType.HASH, 'serializable': false});
this.defineProp('startCoord', {'dataType': DataType.HASH, 'serializable': false/*,
'setter': function(value)
{
console.log('set startCoord', value);
console.log(arguments.callee.caller.caller.caller.toString());
this.setPropStoreFieldValue('startCoord', value);
}*/
});
this.defineProp('endCoord', {'dataType': DataType.HASH, 'serializable': false});
this.defineProp('startBox', {'dataType': DataType.HASH, 'serializable': false});
this.defineProp('endBox', {'dataType': DataType.HASH, 'serializable': false});
this.defineProp('lastRotateAngle', {'dataType': DataType.FLOAT, 'serializable': false}); // private
// private, such as {x: 1, y: 0}, plays as the initial base direction of rotation
this.defineProp('rotateRefCoord', {'dataType': DataType.HASH, 'serializable': false});
this.defineProp('rotateCenter', {'dataType': DataType.HASH, 'serializable': false,
'getter': function()
{
var result = this.getPropStoreFieldValue('rotateCenter');
if (!result)
{
/*
var box = this.getStartBox();
result = box? {'x': (box.x1 + box.x2) / 2, 'y': (box.y1 + box.y2) / 2}: null;
*/
var centerCoord = this._getManipulateObjsCenterCoord();
result = this.getEditor().objCoordToScreen(centerCoord);
this.setPropStoreFieldValue('rotateCenter', result);
//console.log(result, result2);
}
return result;
}
});
this.defineProp('resizeStartingRegion', {'dataType': DataType.INT, 'serializable': false}); // private
this.defineProp('enableAspectRatioLockedResize', {'dataType': DataType.BOOL, 'serializable': false});
this.defineProp('rotateStartingRegion', {'dataType': DataType.INT, 'serializable': false}); // private
this.defineProp('manipulateOriginObjs', {'dataType': DataType.ARRAY, 'serializable': false}); // private, the direct object user act on
this.defineProp('manipulateObjs', {'dataType': DataType.ARRAY, 'serializable': false, // actual manipulated objects
'setter': function(value)
{
this.setPropStoreFieldValue('manipulateObjs', value);
//console.log('set manipulate', value);
if (!value)
this.getEditor().endOperatingObjs();
else
this.getEditor().prepareOperatingObjs(value);
}
});
this.defineProp('manipulateObjInfoMap', {'dataType': DataType.OBJECT, 'serializable': false, 'setter': null,
'getter': function()
{
var result = this.getPropStoreFieldValue('manipulateObjInfoMap');
if (!result)
{
result = new Kekule.MapEx(true);
this.setPropStoreFieldValue('manipulateObjInfoMap', result);
}
return result;
}
});
this.defineProp('manipulateObjCurrInfoMap', {'dataType': DataType.OBJECT, 'serializable': false, 'setter': null,
'getter': function()
{
var result = this.getPropStoreFieldValue('manipulateObjCurrInfoMap');
if (!result)
{
result = new Kekule.MapEx(true);
this.setPropStoreFieldValue('manipulateObjCurrInfoMap', result);
}
return result;
}
});
this.defineProp('manipulationType', {'dataType': DataType.INT, 'serializable': false}); // private
this.defineProp('isManipulatingSelection', {'dataType': DataType.BOOL, 'serializable': false});
this.defineProp('isOffsetManipulating', {'dataType': DataType.BOOL, 'serializable': false});
this.defineProp('manipulationPointerType', {'dataType': DataType.BOOL, 'serializable': false,
'getter': function() { return this.getActivePointerType(); },
'setter': function(value) { this.setActivePointerType(value); }
}); // private, alias of property activePointerType
this.defineProp('activePointerId', {'dataType': DataType.INT, 'serializable': false}); // private, the pointer id currently activated in editpr
//this.defineProp('manipulateOperation', {'dataType': 'Kekule.MacroOperation', 'serializable': false}); // store operation of moving
//this.defineProp('activeOperation', {'dataType': 'Kekule.MacroOperation', 'serializable': false}); // store operation that should be add to history
this.defineProp('moveOperations', {'dataType': DataType.ARRAY, 'serializable': false}); // store operations of moving
//this.defineProp('mergeOperations', {'dataType': DataType.ARRAY, 'serializable': false}); // store operations of merging
this.defineProp('moveWrapperOperation', {'dataType': DataType.OBJECT, 'serializable': false}); // private
this.defineProp('objOperationMap', {'dataType': 'Kekule.MapEx', 'serializable': false,
'getter': function()
{
var result = this.getPropStoreFieldValue('objOperationMap');
if (!result)
{
result = new Kekule.MapEx(true);
this.setPropStoreFieldValue('objOperationMap', result);
}
return result;
}
}); // store operation on each object
},
/** @ignore */
initPropValues: function()
{
this.tryApplySuper('initPropValues');
this.setEnableSelect(false); // turn off select for most of IA controllers derived from this class
this.setEnableGestureManipulation(false); // turn off gesture for most of IA controllers derived from this class
/*
this.setEnableGestureManipulation(false);
this.setEnableMove(true);
this.setEnableResize(true);
this.setEnableAspectRatioLockedResize(true);
this.setEnableRotate(true);
*/
var options = Kekule.globalOptions.get('chemWidget.editor') || {};
var oneOf = Kekule.oneOf;
this.setEnableMove(oneOf(options.enableMove, true));
this.setEnableResize(oneOf(options.enableResize, true));
this.setEnableAspectRatioLockedResize(oneOf(options.enableAspectRatioLockedResize, true));
this.setEnableRotate(oneOf(options.enableRotate, true));
},
/** @private */
doFinalize: function(/*$super*/)
{
var map = this.getPropStoreFieldValue('manipulateObjInfoMap');
if (map)
map.clear();
map = this.getPropStoreFieldValue('objOperationMap');
if (map)
map.clear();
this.tryApplySuper('doFinalize') /* $super() */;
},
/* @ignore */
/*
activated: function($super, widget)
{
$super(widget);
//console.log('activated', this.getSelectMode());
// set select mode when be activated
if (this.getEnableSelect())
this.getEditor().setSelectMode(this.getSelectMode());
},
*/
/** @ignore */
hotTrackOnObj: function(/*$super, */obj)
{
// override parent method, is selectMode is ANCESTOR, hot track the whole ancestor object
if (this.getEnableSelect() && this.getSelectMode() === Kekule.Editor.SelectMode.ANCESTOR)
{
var concreteObj = this.getStandaloneAncestor(obj); (obj && obj.getStandaloneAncestor) ? obj.getStandaloneAncestor() : obj;
return this.tryApplySuper('hotTrackOnObj', [concreteObj]) /* $super(concreteObj) */;
}
else
return this.tryApplySuper('hotTrackOnObj', [obj]) /* $super(obj) */;
},
/** @private */
getStandaloneAncestor: function(obj)
{
return (obj && obj.getStandaloneAncestor) ? obj.getStandaloneAncestor() : obj;
},
/** @private */
isInAncestorSelectMode: function()
{
return this.getEnableSelect() && (this.getSelectMode() === Kekule.Editor.SelectMode.ANCESTOR);
},
/** @private */
isAspectRatioLockedResize: function()
{
return this.getEnableAspectRatioLockedResize() && (!this._suppressConstrainedResize);
},
/**
* Check if screenCoord is on near-outside of selection bound and returns which corner is the neraest.
* @param {Hash} screenCoord
* @returns {Variant} If on rotation region, a nearest corner flag (from @link Kekule.Editor.BoxRegion} will be returned,
* else false will be returned.
*/
getCoordOnSelectionRotationRegion: function(screenCoord)
{
var R = Kekule.Editor.BoxRegion;
var editor = this.getEditor();
var region = editor.getCoordRegionInSelectionMarker(screenCoord);
if (region !== R.OUTSIDE)
return false;
var r = editor.getEditorConfigs().getInteractionConfigs().getRotationRegionInflation();
var box = editor.getUiSelectionAreaContainerBox();
if (box && editor.hasSelection())
{
var corners = [R.CORNER_TL, R.CORNER_TR, R.CORNER_BR, R.CORNER_BL];
var points = [
{'x': box.x1, 'y': box.y1},
{'x': box.x2, 'y': box.y1},
{'x': box.x2, 'y': box.y2},
{'x': box.x1, 'y': box.y2}
];
var result = false;
var minDis = r;
for (var i = 0, l = corners.length; i < l; ++i)
{
var corner = corners[i];
var point = points[i];
var dis = Kekule.CoordUtils.getDistance(point, screenCoord);
if (dis <= minDis)
{
result = corner;
minDis = dis;
}
}
return result;
}
else
return false;
},
/**
* Create a coord change operation to add to operation history of editor.
* The operation is a macro one with sub operations on each obj.
* @private
*/
createManipulateOperation: function()
{
return this.doCreateManipulateMoveAndResizeOperation();
},
/** @private */
doCreateManipulateMoveAndResizeOperation: function()
{
//var oper = new Kekule.MacroOperation();
var opers = [];
this.setMoveOperations(opers);
var objs = this.getManipulateObjs();
var map = this.getManipulateObjInfoMap();
var operMap = this.getObjOperationMap();
operMap.clear();
var objsMoveInfo = [];
var totalOperation = new Kekule.ChemObjOperation.MoveAndResizeObjs([], objsMoveInfo, this.getEditor().getCoordMode(), true, this.getEditor());
totalOperation.setDisableIndirectCoord(true);
//console.log('init operations');
for (var i = 0, l = objs.length; i < l; ++i)
{
var obj = objs[i];
var item = map.get(obj);
//var sub = new Kekule.EditorOperation.OpSetObjCoord(this.getEditor(), obj, null, item.objCoord, Kekule.Editor.CoordSys.OBJ);
//var sub = new Kekule.ChemObjOperation.MoveTo(obj, null, this.getEditor().getCoordMode());
var sub = new Kekule.ChemObjOperation.MoveAndResize(obj, null, null, this.getEditor().getCoordMode(), true, this.getEditor()); // use abs coord
sub.setAllowCoordBorrow(this.getEditor().getAllowCoordBorrow());
sub.setOldCoord(item.objCoord);
sub.setOldDimension(item.size);
//oper.add(sub);
//operMap.set(obj, sub);
opers.push(sub);
/*
objsMoveInfo.push({
'obj': obj,
'oldCoord': item.objCoord,
'oldDimension': item.size
});
*/
totalOperation.getChildOperations().push(sub);
this.setMoveWrapperOperation(totalOperation);
}
//this.setManipulateOperation(oper);
//this.setActiveOperation(oper);
//return oper;
//return opers;
return [totalOperation];
},
/* @private */
/*
_ensureObjOperationToMove: function(obj)
{
var map = this.getObjOperationMap();
var oper = map.get(obj);
if (oper && !(oper instanceof Kekule.ChemObjOperation.MoveAndResize))
{
//console.log('_ensureObjOperationToMove reverse');
//oper.reverse();
oper.finalize();
oper = new Kekule.ChemObjOperation.MoveAndResize(obj, null, null, this.getEditor().getCoordMode(), true); // use abs coord
map.set(obj, oper);
}
return oper;
},
*/
/**
* Update new coord info of sub operations.
* @private
*/
updateChildMoveOperation: function(objIndex, obj, newObjCoord)
{
//console.log('update move', newObjCoord);
//var oper = this.getManipulateOperation().getChildAt(objIndex);
//var oper = this._ensureObjOperationToMove(obj);
var oper = this.getMoveOperations()[objIndex];
//oper.setCoord(newObjCoord);
oper.setNewCoord(newObjCoord);
},
/** @private */
updateChildResizeOperation: function(objIndex, obj, newDimension)
{
//var oper = this.getManipulateOperation().getChildAt(objIndex);
//var oper = this._ensureObjOperationToMove(obj);
var oper = this.getMoveOperations()[objIndex];
oper.setNewDimension(newDimension);
},
/** @private */
getAllObjOperations: function(isTheFinalOperationToEditor)
{
//var opers = this.getObjOperationMap().getValues();
var op = this.getMoveOperations();
var opers = op? Kekule.ArrayUtils.clone(op): [];
if (opers.length)
{
var wrapper = this.getMoveWrapperOperation();
wrapper.setChildOperations(opers);
//return opers;
return [wrapper];
}
else
return [];
},
/** @private */
getActiveOperation: function(isTheFinalOperationToEditor)
{
//console.log('get active operation', isTheFinalOperationToEditor);
var opers = this.getAllObjOperations(isTheFinalOperationToEditor);
opers = Kekule.ArrayUtils.toUnique(opers);
if (opers.length <= 0)
return null;
else if (opers.length === 1)
return opers[0];
else
{
var macro = new Kekule.MacroOperation(opers);
return macro;
}
},
/** @private */
reverseActiveOperation: function()
{
var oper = this.getActiveOperation();
return oper.reverse();
},
/* @private */
/*
clearActiveOperation: function()
{
//this.getObjOperationMap().clear();
},
*/
/** @private */
addOperationToEditor: function()
{
var editor = this.getEditor();
if (editor && editor.getEnableOperHistory())
{
//console.log('add oper to editor', this.getClassName(), this.getActiveOperation());
//editor.pushOperation(this.getActiveOperation());
/*
var opers = this.getAllObjOperations();
var macro = new Kekule.MacroOperation(opers);
editor.pushOperation(macro);
*/
var op = this.getActiveOperation(true);
if (op)
editor.pushOperation(op);
}
},
// methods about object move / resize
/** @private */
getCurrAvailableManipulationTypes: function()
{
var T = Kekule.Editor.BasicManipulationIaController.ManipulationType;
var box = this.getEditor().getSelectionContainerBox();
if (!box)
{
return [];
}
else
{
var result = [];
if (this.getEnableMove())
result.push(T.MOVE);
// if box is a single point, can not resize or rotate
if (!Kekule.NumUtils.isFloatEqual(box.x1, box.x2, 1e-10) || !Kekule.NumUtils.isFloatEqual(box.y1, box.y2, 1e-10))
{
if (this.getEnableResize())
result.push(T.RESIZE);
if (this.getEnableRotate())
result.push(T.ROTATE);
if (this.getEnableResize() || this.getEnableRotate())
result.push(T.TRANSFORM);
}
return result;
}
},
/** @private */
getActualManipulatingObjects: function(objs)
{
var result = [];
for (var i = 0, l = objs.length; i < l; ++i)
{
var obj = objs[i];
var actualObjs = obj.getCoordDependentObjects? obj.getCoordDependentObjects(): [obj];
Kekule.ArrayUtils.pushUnique(result, actualObjs);
}
return result;
},
/*
* Prepare to resize resizingObjs.
* Note that resizingObjs may differ from actual resized objects (for instance, resize a bond actually move its connected atoms).
* @param {Hash} startContextCoord Mouse position when starting to move objects. This coord is based on context.
* @param {Array} resizingObjs Objects about to be resized.
* @private
*/
/*
prepareResizing: function(startScreenCoord, startBox, movingObjs)
{
var actualObjs = this.getActualResizingObject(movingObjs);
this.setManipulateObjs(actualObjs);
var map = this.getManipulateObjInfoMap();
map.clear();
var editor = this.getEditor();
// store original objs coords info into map
for (var i = 0, l = actualObjs.length; i < l; ++i)
{
var obj = actualObjs[i];
var info = this.createManipulateObjInfo(obj, startScreenCoord);
map.set(obj, info);
}
this.setStartBox(startBox);
},
*/
/** @private */
doPrepareManipulatingObjects: function(manipulatingObjs, startScreenCoord)
{
var actualObjs = this.getActualManipulatingObjects(manipulatingObjs);
//console.log(manipulatingObjs, actualObjs);
this.setManipulateOriginObjs(manipulatingObjs);
this.setManipulateObjs(actualObjs);
var map = this.getManipulateObjInfoMap();
map.clear();
//this.getManipulateObjCurrInfoMap().clear();
var editor = this.getEditor();
// store original objs coords info into map
for (var i = 0, l = actualObjs.length; i < l; ++i)
{
var obj = actualObjs[i];
var info = this.createManipulateObjInfo(obj, i, startScreenCoord);
map.set(obj, info);
/*
// disable indirect coord during coord move
if (info.enableIndirectCoord)
obj.setEnableIndirectCoord(false);
*/
}
},
/** @private */
doPrepareManipulatingStartingCoords: function(startScreenCoord, startBox, rotateCenter, rotateRefCoord)
{
this.setStartBox(startBox);
this.setRotateCenter(rotateCenter);
this.setRotateRefCoord(rotateRefCoord);
this.setLastRotateAngle(null);
},
/**
* Prepare to move movingObjs.
* Note that movingObjs may differ from actual moved objects (for instance, move a bond actually move its connected atoms).
* @param {Hash} startContextCoord Mouse position when starting to move objects. This coord is based on context.
* @param {Array} manipulatingObjs Objects about to be moved or resized.
* @param {Hash} startBox
* @param {Hash} rotateCenter
* @private
*/
prepareManipulating: function(manipulationType, manipulatingObjs, startScreenCoord, startBox, rotateCenter, rotateRefCoord)
{
this.setManipulationType(manipulationType);
this.doPrepareManipulatingObjects(manipulatingObjs, startScreenCoord);
this.doPrepareManipulatingStartingCoords(startScreenCoord, startBox, rotateCenter, rotateRefCoord);
this.createManipulateOperation();
this._cachedTransformCompareThresholds = null; // clear cache
this._runManipulationStepId = Kekule.window.requestAnimationFrame(this.execManipulationStepBind);
//this.setManuallyHotTrack(true); // manully set hot track point when manipulating
},
/**
* Cancel the moving process and set objects to its original position.
* @private
*/
cancelManipulate: function()
{
var editor = this.getEditor();
var objs = this.getManipulateObjs();
//editor.beginUpdateObject();
//this.getActiveOperation().reverse();
this.reverseActiveOperation();
this.notifyCoordChangeOfObjects(this.getManipulateObjs());
//editor.endUpdateObject();
//this.setActiveOperation(null);
//this.clearActiveOperation();
//this.setManuallyHotTrack(false);
this.manipulateEnd();
},
/**
* Returns center coord of manipulate objs.
* @private
*/
_getManipulateObjsCenterCoord: function()
{
var objs = this.getManipulateObjs();
if (!objs || !objs.length)
return null;
var coordMode = this.getEditor().getCoordMode();
var allowCoordBorrow = this.getEditor().getAllowCoordBorrow();
var sum = {'x': 0, 'y': 0, 'z': 0};
for (var i = 0, l = objs.length; i < l; ++i)
{
var obj = objs[i];
var objCoord = obj.getAbsBaseCoord? obj.getAbsBaseCoord(coordMode, allowCoordBorrow):
obj.getAbsCoord? obj.getAbsCoord(coordMode, allowCoordBorrow):
obj.getCoordOfMode? obj.getCoordOfMode(coordMode, allowCoordBorrow):
null;
if (objCoord)
sum = Kekule.CoordUtils.add(sum, objCoord);
}
return Kekule.CoordUtils.divide(sum, objs.length);
},
/**
* Called when a phrase of rotate/resize/move function ends.
*/
_maniplateObjsFrameEnd: function(objs)
{
// do nothing here
},
/** @private */
_addManipultingObjNewInfo: function(obj, newInfo)
{
var newInfoMap = this.getManipulateObjCurrInfoMap();
var info = newInfoMap.get(obj) || {};
info = Object.extend(info, newInfo);
newInfoMap.set(obj, info);
},
/** @private */
applyManipulatingObjsInfo: function(endScreenCoord)
{
//this._moveResizeOperExecPending = false;
var objs = this.getManipulateObjs();
var newInfoMap = this.getManipulateObjCurrInfoMap();
var indirectCoordObjs = this._getIndirectCoordObjs(objs);
this._setEnableIndirectCoordOfObjs(indirectCoordObjs, false); // important, disable indirect coord first, avoid calculating during moving and position error
try
{
for (var i = 0, l = objs.length; i < l; ++i)
{
var obj = objs[i];
var newInfo = newInfoMap.get(obj);
this.applySingleManipulatingObjInfo(i, obj, newInfo, endScreenCoord);
}
/*
if (this._moveResizeOperExecPending)
this.getMoveWrapperOperation().execute();
*/
}
finally
{
this._setEnableIndirectCoordOfObjs(indirectCoordObjs, true);
}
},
/** @private */
_getIndirectCoordObjs: function(objs)
{
var result = [];
for (var i = 0, l = objs.length; i < l; ++i)
{
var obj = objs[i];
if (obj.getEnableIndirectCoord && obj.getEnableIndirectCoord())
result.push(obj);
}
return result;
},
/** @private */
_setEnableIndirectCoordOfObjs: function(objs, enabled)
{
if (!objs)
return;
for (var i = 0, l = objs.length; i < l; ++i)
{
objs[i].setEnableIndirectCoord(enabled);
}
},
/** @private */
applySingleManipulatingObjInfo: function(objIndex, obj, newInfo, endScreenCoord)
{
if (newInfo)
{
if (newInfo.screenCoord)
this.doMoveManipulatedObj(objIndex, obj, newInfo.screenCoord, endScreenCoord);
if (newInfo.size)
this.doResizeManipulatedObj(objIndex, obj, newInfo.size);
}
},
/** @private */
_calcRotateAngle: function(endScreenCoord)
{
var C = Kekule.CoordUtils;
var angle;
var angleCalculated = false;
var rotateCenter = this.getRotateCenter();
var startCoord = this.getRotateRefCoord() || this.getStartCoord();
// ensure startCoord large than threshold
var threshold = this.getEditorConfigs().getInteractionConfigs().getRotationLocationPointDistanceThreshold();
if (threshold)
{
var startDistance = C.getDistance(startCoord, rotateCenter);
if (startDistance < threshold)
{
angle = 0; // do not rotate
angleCalculated = true;
// and use endScreen coord as new start coord
this.setStartCoord(endScreenCoord);
return false;
}
var endDistance = C.getDistance(endScreenCoord, rotateCenter);<|fim▁hole|> angleCalculated = true;
return false;
}
}
if (!angleCalculated)
{
var vector = C.substract(endScreenCoord, rotateCenter);
var endAngle = Math.atan2(vector.y, vector.x);
vector = C.substract(startCoord, rotateCenter);
var startAngle = Math.atan2(vector.y, vector.x);
angle = endAngle - startAngle;
}
return {'angle': angle, 'startAngle': startAngle, 'endAngle': endAngle};
},
/** @private */
_calcActualRotateAngle: function(objs, newDeltaAngle, oldAbsAngle, newAbsAngle)
{
return newDeltaAngle;
},
/** @private */
_calcManipulateObjsRotationParams: function(manipulatingObjs, endScreenCoord)
{
if (!this.getEnableRotate())
return false;
var rotateCenter = this.getRotateCenter();
var angleInfo = this._calcRotateAngle(endScreenCoord);
if (!angleInfo) // need not to rotate
return false;
// get actual rotation angle
var angle = this._calcActualRotateAngle(manipulatingObjs, angleInfo.angle, angleInfo.startAngle, angleInfo.endAngle);
var lastAngle = this.getLastRotateAngle();
if (Kekule.ObjUtils.notUnset(lastAngle) && Kekule.NumUtils.isFloatEqual(angle, lastAngle, 0.0175)) // ignore angle change under 1 degree
{
return false; // no angle change, do not rotate
}
//console.log('rotateAngle', angle, lastAngle);
this.setLastRotateAngle(angle);
return {'center': rotateCenter, 'rotateAngle': angle};
},
/* @private */
/*
doRotateManipulatedObjs: function(endScreenCoord, transformParams)
{
var byPassRotate = !this._calcManipulateObjsTransformInfo(this.getManipulateObjs(), transformParams);
if (byPassRotate) // need not to rotate
{
//console.log('bypass rotate');
return;
}
//console.log('rotate');
var objNewInfo = this.getManipulateObjCurrInfoMap();
var editor = this.getEditor();
editor.beginUpdateObject();
try
{
var objs = this.getManipulateObjs();
this.applyManipulatingObjsInfo(endScreenCoord);
this._maniplateObjsFrameEnd(objs);
this.notifyCoordChangeOfObjects(objs);
}
finally
{
editor.endUpdateObject();
this.manipulateStepDone();
}
},
*/
/*
* Rotate manupulatedObjs according to endScreenCoord.
* @private
*/
/*
rotateManipulatedObjs: function(endScreenCoord)
{
var R = Kekule.Editor.BoxRegion;
var C = Kekule.CoordUtils;
//var editor = this.getEditor();
var changedObjs = [];
//console.log('rotate', this.getRotateCenter(), endScreenCoord);
var rotateParams = this._calcManipulateObjsRotationParams(this.getManipulateObjs(), endScreenCoord);
if (!rotateParams)
return;
this.doRotateManipulatedObjs(endScreenCoord, rotateParams);
},
*/
/** @private */
_calcActualResizeScales: function(objs, newScales)
{
return newScales;
},
/** @private */
_calcManipulateObjsResizeParams: function(manipulatingObjs, startingRegion, endScreenCoord)
{
if (!this.getEnableResize())
return false;
var R = Kekule.Editor.BoxRegion;
var C = Kekule.CoordUtils;
var box = this.getStartBox();
var coordDelta = C.substract(endScreenCoord, this.getStartCoord());
var scaleCenter;
var doConstraint, doConstraintOnX, doConstraintOnY;
if (startingRegion === R.EDGE_TOP)
{
coordDelta.x = 0;
scaleCenter = {'x': (box.x1 + box.x2) / 2, 'y': box.y2};
}
else if (startingRegion === R.EDGE_BOTTOM)
{
coordDelta.x = 0;
scaleCenter = {'x': (box.x1 + box.x2) / 2, 'y': box.y1};
}
else if (startingRegion === R.EDGE_LEFT)
{
coordDelta.y = 0;
scaleCenter = {'x': box.x2, 'y': (box.y1 + box.y2) / 2};
}
else if (startingRegion === R.EDGE_RIGHT)
{
coordDelta.y = 0;
scaleCenter = {'x': box.x1, 'y': (box.y1 + box.y2) / 2};
}
else // resize from corner
{
if (this.isAspectRatioLockedResize())
{
doConstraint = true;
/*
var widthHeightRatio = (box.x2 - box.x1) / (box.y2 - box.y1);
var currRatio = coordDelta.x / coordDelta.y;
if (Math.abs(currRatio) > widthHeightRatio)
//coordDelta.x = coordDelta.y * widthHeightRatio * (Math.sign(currRatio) || 1);
doConstraintOnY = true;
else
//coordDelta.y = coordDelta.x / widthHeightRatio * (Math.sign(currRatio) || 1);
doConstraintOnX = true;
*/
}
scaleCenter = (startingRegion === R.CORNER_TL)? {'x': box.x2, 'y': box.y2}:
(startingRegion === R.CORNER_TR)? {'x': box.x1, 'y': box.y2}:
(startingRegion === R.CORNER_BL)? {'x': box.x2, 'y': box.y1}:
{'x': box.x1, 'y': box.y1};
}
var reversedX = (startingRegion === R.CORNER_TL) || (startingRegion === R.CORNER_BL) || (startingRegion === R.EDGE_LEFT);
var reversedY = (startingRegion === R.CORNER_TL) || (startingRegion === R.CORNER_TR) || (startingRegion === R.EDGE_TOP);
// calc transform matrix
var scaleX, scaleY;
if (Kekule.NumUtils.isFloatEqual(box.x1, box.x2, 1e-10)) // box has no x size, can not scale on x
scaleX = 1;
else
scaleX = 1 + coordDelta.x / (box.x2 - box.x1) * (reversedX? -1: 1);
if (Kekule.NumUtils.isFloatEqual(box.y1, box.y2, 1e-10)) // box has no y size, can not scale on y
scaleY = 1;
else
scaleY = 1 + coordDelta.y / (box.y2 - box.y1) * (reversedY? -1: 1);
if (doConstraint)
{
var absX = Math.abs(scaleX), absY = Math.abs(scaleY);
if (absX >= absY)
scaleY = (Math.sign(scaleY) || 1) * absX; // avoid sign = 0
else
scaleX = (Math.sign(scaleX) || 1) * absY;
}
//console.log('before actual scale', coordDelta, {'scaleX': scaleX, 'scaleY': scaleY});
var actualScales = this._calcActualResizeScales(manipulatingObjs, {'scaleX': scaleX, 'scaleY': scaleY});
var transformParams = {'center': scaleCenter, 'scaleX': actualScales.scaleX, 'scaleY': actualScales.scaleY};
//console.log(this.isAspectRatioLockedResize(), scaleX, scaleY);
//console.log('startBox', box);
//console.log('transformParams', transformParams);
return transformParams;
},
/* @private */
/*
_calcManipulateObjsResizeInfo: function(manipulatingObjs, startingRegion, endScreenCoord)
{
var R = Kekule.Editor.BoxRegion;
var C = Kekule.CoordUtils;
var transformOps = this._calcManipulateObjsResizeParams(manipulatingObjs, startingRegion, endScreenCoord);
//console.log(scaleX, scaleY);
this._calcManipulateObjsTransformInfo(manipulatingObjs, transformOps);
return true;
},
*/
/** @private */
_calcManipulateObjsTransformInfo: function(manipulatingObjs, transformParams)
{
var C = Kekule.CoordUtils;
// since we transform screen coord, it will always be in 2D mode
// and now the editor only supports 2D
var is3D = false; // this.getEditor().getCoordMode() === Kekule.CoordMode.COORD3D;
var transformMatrix = is3D? C.calcTransform3DMatrix(transformParams): C.calcTransform2DMatrix(transformParams);
var scaleX = transformParams.scaleX || transformParams.scale;
var scaleY = transformParams.scaleY || transformParams.scale;
var isMovingOneStickNode = this._isManipulatingSingleStickedObj(manipulatingObjs);
for (var i = 0, l = manipulatingObjs.length; i < l; ++i)
{
var obj = manipulatingObjs[i];
var info = this.getManipulateObjInfoMap().get(obj);
var newInfo = {};
if (!info.hasNoCoord) // this object has coord property and can be rotated
{
var oldCoord = info.screenCoord;
if (!info.stickTarget || isMovingOneStickNode)
{
var newCoord = C.transform2DByMatrix(oldCoord, transformMatrix);
newInfo.screenCoord = newCoord;
}
else
newInfo.screenCoord = oldCoord;
//this._addManipultingObjNewInfo(obj, {'screenCoord': newCoord});
}
// TODO: may need change dimension also
if (info.size && (scaleX || scaleY))
{
var newSize = {'x': info.size.x * Math.abs(scaleX || 1), 'y': info.size.y * Math.abs(scaleY || 1)};
newInfo.size = newSize;
}
this._addManipultingObjNewInfo(obj, newInfo);
}
return true;
},
/**
* Whether an object is sticking to another one.
* @private
*/
_isStickedObj: function(obj)
{
return obj && obj.getCoordStickTarget && obj.getCoordStickTarget();
},
/** @private */
_isManipulatingSingleStickedObj: function(manipulatingObjs)
{
var result = false;
if (manipulatingObjs.length === 1)
{
var oneObj = manipulatingObjs[0];
var info = this.getManipulateObjInfoMap().get(oneObj);
result = !!info.stickTarget;
}
return result;
},
/*
* Resize manupulatedObjs according to endScreenCoord.
* @private
*/
/*
doResizeManipulatedObjs: function(endScreenCoord)
{
var editor = this.getEditor();
var objs = this.getManipulateObjs();
//var changedObjs = [];
this._calcManipulateObjsResizeInfo(objs, this.getResizeStartingRegion(), endScreenCoord);
editor.beginUpdateObject();
var newInfoMap = this.getManipulateObjCurrInfoMap();
try
{
this.applyManipulatingObjsInfo(endScreenCoord);
this._maniplateObjsFrameEnd(objs);
this.notifyCoordChangeOfObjects(objs);
}
finally
{
editor.endUpdateObject();
this.manipulateStepDone();
}
},
*/
/**
* Transform manupulatedObjs according to manipulateType(rotate/resize) endScreenCoord.
* @private
*/
doTransformManipulatedObjs: function(manipulateType, endScreenCoord, explicitTransformParams)
{
var T = Kekule.Editor.BasicManipulationIaController.ManipulationType;
var editor = this.getEditor();
var objs = this.getManipulateObjs();
//var changedObjs = [];
var transformParams = explicitTransformParams;
if (!transformParams)
{
if (manipulateType === T.RESIZE)
transformParams = this._calcManipulateObjsResizeParams(objs, this.getResizeStartingRegion(), endScreenCoord);
else if (manipulateType === T.ROTATE)
transformParams = this._calcManipulateObjsRotationParams(objs, endScreenCoord);
}
if (this._lastTransformParams && this._isSameTransformParams(this._lastTransformParams, transformParams, null, editor.getCoordMode())) // not a significant change, do not transform
{
//console.log('bypass transform');
return;
}
//console.log('do transform', transformParams);
var doConcreteTransform = transformParams && this._calcManipulateObjsTransformInfo(objs, transformParams);
if (!doConcreteTransform)
return;
this._lastTransformParams = transformParams;
editor.beginUpdateObject();
var newInfoMap = this.getManipulateObjCurrInfoMap();
try
{
this.applyManipulatingObjsInfo(endScreenCoord);
this._maniplateObjsFrameEnd(objs);
this.notifyCoordChangeOfObjects(objs);
}
finally
{
editor.endUpdateObject();
this.manipulateStepDone();
}
},
/** @private */
_isSameTransformParams: function(p1, p2, thresholds, coordMode)
{
if (!thresholds)
{
thresholds = this._cachedTransformCompareThresholds;
}
if (!thresholds)
{
thresholds = this._getTransformCompareThresholds(); // 0.1
this._cachedTransformCompareThresholds = thresholds; // cache the threshold to reduce calculation
}
/*
if (coordMode === Kekule.CoordMode.COORD2D)
return CU.isSameTransform2DOptions(p1, p2, {'translate': threshold, 'scale': threshold, 'rotate': threshold});
else
return CU.isSameTransform3DOptions(p1, p2, {'translate': threshold, 'scale': threshold, 'rotate': threshold});
*/
if (coordMode === Kekule.CoordMode.COORD2D)
return CU.isSameTransform2DOptions(p1, p2, {'translate': thresholds.translate, 'scale': thresholds.scale, 'rotate': thresholds.rotate});
else
return CU.isSameTransform3DOptions(p1, p2, {'translate': thresholds.translate, 'scale': thresholds.scale, 'rotate': thresholds.rotate});
},
/** @private */
_getTransformCompareThresholds: function(coordMode)
{
return {
translate: 0.1,
scale: 0.1,
rotate: 0.1
}
},
/* @private */
_calcActualMovedScreenCoord: function(obj, info, newScreenCoord)
{
return newScreenCoord;
},
/** @private */
_calcManipulateObjsMoveInfo: function(manipulatingObjs, endScreenCoord)
{
var C = Kekule.CoordUtils;
var newInfoMap = this.getManipulateObjCurrInfoMap();
var editor = this.getEditor();
var isMovingOneStickNode = this._isManipulatingSingleStickedObj(manipulatingObjs);
var isDirectManipulateSingleObj = this.isDirectManipulating() && (manipulatingObjs.length === 1);
var manipulatingObjHasSize = isDirectManipulateSingleObj?
(manipulatingObjs[0] && manipulatingObjs[0].getSizeOfMode && manipulatingObjs[0].getSizeOfMode(editor.getCoordMode(), editor.getAllowCoordBorrow())):
true;
var followPointerCoord = isDirectManipulateSingleObj && !manipulatingObjHasSize // when the object has size, it can not follow the pointer coord
&& this.getEditorConfigs().getInteractionConfigs().getFollowPointerCoordOnDirectManipulatingSingleObj();
if (followPointerCoord)
{
var startCoord = this.getStartCoord();
var moveDistance = C.getDistance(endScreenCoord, startCoord);
if (moveDistance <= this.getEditorConfigs().getInteractionConfigs().getFollowPointerCoordOnDirectManipulatingSingleObjDistanceThreshold())
{
followPointerCoord = false;
}
}
for (var i = 0, l = manipulatingObjs.length; i < l; ++i)
{
var obj = manipulatingObjs[i];
var info = this.getManipulateObjInfoMap().get(obj);
if (info.hasNoCoord) // this object has no coord property and can not be moved
continue;
if (info.stickTarget && !isMovingOneStickNode)
continue;
var newScreenCoord;
if (followPointerCoord)
newScreenCoord = endScreenCoord;
else
newScreenCoord = C.add(endScreenCoord, info.screenCoordOffset);
newScreenCoord = this._calcActualMovedScreenCoord(obj, info, newScreenCoord);
this._addManipultingObjNewInfo(obj, {'screenCoord': newScreenCoord});
}
},
/**
* Move objects in manipulateObjs array to new position. New coord is determinated by endContextCoord
* and each object's offset.
* @private
*/
moveManipulatedObjs: function(endScreenCoord)
{
var C = Kekule.CoordUtils;
var editor = this.getEditor();
var objs = this.getManipulateObjs();
var changedObjs = [];
this._calcManipulateObjsMoveInfo(objs, endScreenCoord);
editor.beginUpdateObject();
var newInfoMap = this.getManipulateObjCurrInfoMap();
try
{
this.applyManipulatingObjsInfo(endScreenCoord);
this._maniplateObjsFrameEnd(objs);
// notify
this.notifyCoordChangeOfObjects(objs);
}
finally
{
editor.endUpdateObject();
this.manipulateStepDone();
}
},
/**
* Move a single object to newScreenCoord. MoverScreenCoord is the actual coord of mouse.
* Note that not only move operation will call this method, rotate and resize may also affect
* objects' coord so this method will also be called.
* @private
*/
doMoveManipulatedObj: function(objIndex, obj, newScreenCoord, moverScreenCoord)
{
var editor = this.getEditor();
this.updateChildMoveOperation(objIndex, obj, editor.screenCoordToObj(newScreenCoord));
editor.setObjectScreenCoord(obj, newScreenCoord);
//this._moveResizeOperExecPending = true;
},
/**
* Resize a single object to newDimension.
* @private
*/
doResizeManipulatedObj: function(objIndex, obj, newSize)
{
this.updateChildResizeOperation(objIndex, obj, newSize);
if (obj.setSizeOfMode)
obj.setSizeOfMode(newSize, this.getEditor().getCoordMode());
//this._moveResizeOperExecPending = true;
},
/*
* Moving complete, do the wrap up job.
* @private
*/
/*
endMoving: function()
{
this.stopManipulate();
},
*/
/**
* Returns whether the controller is in direct manipulating state.
*/
isDirectManipulating: function()
{
return (this.getState() === Kekule.Editor.BasicManipulationIaController.State.MANIPULATING)
&& (!this.getIsManipulatingSelection());
},
/**
* Click on a object or objects and manipulate it directly.
* @private
*/
startDirectManipulate: function(manipulateType, objOrObjs, startCoord, startBox, rotateCenter, rotateRefCoord)
{
this.manipulateBegin();
return this.doStartDirectManipulate(manipulateType, objOrObjs, startCoord, startBox, rotateCenter, rotateRefCoord);
},
/** @private */
doStartDirectManipulate: function(manipulateType, objOrObjs, startCoord, startBox, rotateCenter, rotateRefCoord)
{
var objs = Kekule.ArrayUtils.toArray(objOrObjs);
this.setState(Kekule.Editor.BasicManipulationIaController.State.MANIPULATING);
this.setBaseCoord(startCoord);
this.setStartCoord(startCoord);
this.setRotateRefCoord(rotateRefCoord);
this.setIsManipulatingSelection(false);
//console.log('call prepareManipulating', startCoord, manipulateType, objOrObjs);
this.prepareManipulating(manipulateType || Kekule.Editor.BasicManipulationIaController.ManipulationType.MOVE, objs, startCoord, startBox, rotateCenter, rotateRefCoord);
},
/**
* Called when a manipulation is applied and the changes has been reflected in editor (editor redrawn done).
* Descendants may override this method.
* @private
*/
manipulateStepDone: function()
{
// do nothing here
},
/** @private */
doManipulateObjectsEnd: function(manipulatingObjs)
{
var map = this.getManipulateObjInfoMap();
for (var i = manipulatingObjs.length - 1; i >= 0; --i)
{
this.doManipulateObjectEnd(manipulatingObjs[i], map.get(manipulatingObjs[i]));
}
},
/** @private */
doManipulateObjectEnd: function(manipulateObj, objInfo)
{
/*
if (objInfo.enableIndirectCoord && manipulateObj.setEnableIndirectCoord)
manipulateObj.setEnableIndirectCoord(true);
*/
},
/**
* Called when a manipulation is beginning (usually with point down event).
* Descendants may override this method.
* @private
*/
manipulateBegin: function()
{
this.notifyEditorBeginManipulateObjects();
},
/**
* Called when a manipulation is ended (stopped or cancelled).
* Descendants may override this method.
* @private
*/
manipulateEnd: function()
{
if (this._runManipulationStepId)
{
Kekule.window.cancelAnimationFrame(this._runManipulationStepId);
this._runManipulationStepId = null;
}
this.doManipulateObjectsEnd(this.getManipulateObjs());
this._lastTransformParams = null;
this.setIsOffsetManipulating(false);
this.setManipulateObjs(null);
this.getManipulateObjInfoMap().clear();
this.getObjOperationMap().clear();
this.notifyEditorEndManipulateObjects();
},
/**
* Called before method stopManipulate.
* Descendants may do some round-off work here.
* @private
*/
manipulateBeforeStopping: function()
{
// do nothing here
},
/**
* Stop manipulate of objects.
* @private
*/
stopManipulate: function()
{
this.manipulateEnd();
},
/** @private */
refreshManipulateObjs: function()
{
this.setManipulateObjs(this.getManipulateObjs());
},
/** @private */
createManipulateObjInfo: function(obj, objIndex, startScreenCoord)
{
var editor = this.getEditor();
var info = {
//'obj': obj,
'objCoord': editor.getObjCoord(obj), // abs base coord
//'objSelfCoord': obj.getCoordOfMode? obj.getCoordOfMode(editor.getCoordMode()): null,
'screenCoord': editor.getObjectScreenCoord(obj),
'size': editor.getObjSize(obj),
'enableIndirectCoord': !!(obj.getEnableIndirectCoord && obj.getEnableIndirectCoord())
};
info.hasNoCoord = !info.objCoord;
if (!info.hasNoCoord && startScreenCoord)
info.screenCoordOffset = Kekule.CoordUtils.substract(info.screenCoord, startScreenCoord);
if (obj.getCoordStickTarget) // wether is a sticking object
{
info.stickTarget = obj.getCoordStickTarget();
}
return info;
},
/** @private */
notifyCoordChangeOfObjects: function(objs)
{
var changedDetails = [];
var editor = this.getEditor();
var coordPropName = this.getEditor().getCoordMode() === Kekule.CoordMode.COORD3D? 'coord3D': 'coord2D';
for (var i = 0, l = objs.length; i < l; ++i)
{
var obj = objs[i];
Kekule.ArrayUtils.pushUnique(changedDetails, {'obj': obj, 'propNames': [coordPropName]});
var relatedObjs = obj.getCoordDeterminateObjects? obj.getCoordDeterminateObjects(): [obj];
for (var j = 0, k = relatedObjs.length; j < k; ++j)
Kekule.ArrayUtils.pushUnique(changedDetails, {'obj': relatedObjs[j], 'propNames': [coordPropName]});
}
// notify
editor.objectsChanged(changedDetails);
},
/** @private */
canInteractWithObj: function(obj)
{
return (this.getState() === Kekule.Editor.BasicManipulationIaController.State.NORMAL) && obj;
},
/** @ignore */
doTestMouseCursor: function(coord, e)
{
if (!this.getEditor().isRenderable()) // if chem object not painted, do not need to test
return '';
var result = '';
// since client element is not the same to widget element, coord need to be recalculated
var c = this._getEventMouseCoord(e, this.getEditor().getEditClientElem());
if (this.getState() === Kekule.Editor.BasicManipulationIaController.State.NORMAL)
{
var R = Kekule.Editor.BoxRegion;
var region = this.getEditor().getCoordRegionInSelectionMarker(c);
if (this.getEnableSelect()) // show move/rotate/resize marker in select ia controller only
{
var T = Kekule.Editor.BasicManipulationIaController.ManipulationType;
var availManipulationTypes = this.getCurrAvailableManipulationTypes();
//if (this.getEnableMove())
if (availManipulationTypes.indexOf(T.MOVE) >= 0)
{
result = (region === R.INSIDE)? 'move': '';
}
//if (!result && this.getEnableResize())
if (!result && (availManipulationTypes.indexOf(T.RESIZE) >= 0))
{
var result =
(region === R.CORNER_TL)? 'nwse-resize':
(region === R.CORNER_TR)? 'nesw-resize':
(region === R.CORNER_BL)? 'nesw-resize':
(region === R.CORNER_BR)? 'nwse-resize':
(region === R.EDGE_TOP) || (region === R.EDGE_BOTTOM)? 'ns-resize':
(region === R.EDGE_LEFT) || (region === R.EDGE_RIGHT)? 'ew-resize':
'';
}
if (!result)
{
//if (this.getEnableRotate())
if (availManipulationTypes.indexOf(T.ROTATE) >= 0)
{
var region = this.getCoordOnSelectionRotationRegion(c);
if (!!region)
{
var SN = Kekule.Widget.StyleResourceNames;
result = (region === R.CORNER_TL)? SN.CURSOR_ROTATE_NW:
(region === R.CORNER_TR)? SN.CURSOR_ROTATE_NE:
(region === R.CORNER_BL)? SN.CURSOR_ROTATE_SW:
(region === R.CORNER_BR)? SN.CURSOR_ROTATE_SE:
SN.CURSOR_ROTATE;
//console.log('rotate cursor', result);
}
}
}
}
}
return result;
},
/**
* Set operations in suspended state.
* @param {Func} immediateOper
* @param {Func} delayedOper
* @param {Int} delay In ms.
* @private
*/
setSuspendedOperations: function(immediateOper, delayedOper, delay)
{
if (this._suspendedOperations)
this.haltSuspendedOperations(); // halt old
var self = this;
this._suspendedOperations = {
'immediate': immediateOper,
'delayed': delayedOper,
'delayExecId': setTimeout(this.execSuspendedDelayOperation.bind(this), delay)
};
return this._suspendedOperations;
},
/**
* Execute the immediate operation in suspended operations, cancelling the delayed one.
* @private
*/
execSuspendedImmediateOperation: function()
{
if (this._suspendedOperations)
{
//console.log('exec immediate');
clearTimeout(this._suspendedOperations.delayExecId);
var oper = this._suspendedOperations.immediate;
this._suspendedOperations = null; // clear old
return oper.apply(this);
}
},
/**
* Execute the delayed operation in suspended operations, cancelling the immediate one.
* @private
*/
execSuspendedDelayOperation: function()
{
if (this._suspendedOperations)
{
//console.log('exec delayed');
clearTimeout(this._suspendedOperations.delayExecId);
var oper = this._suspendedOperations.delayed;
this._suspendedOperations = null; // clear old
return oper.apply(this);
}
},
/**
* Halt all suspend operations.
* @private
*/
haltSuspendedOperations: function()
{
if (this._suspendedOperations)
{
clearTimeout(this._suspendedOperations.delayExecId);
this._suspendedOperations = null; // clear old
}
},
/** @private */
_startNewSelecting: function(startCoord, shifted)
{
if (this.getEnableSelect())
{
this.getEditor().startSelecting(startCoord, shifted || this.getEditor().getIsToggleSelectOn());
this.setState(Kekule.Editor.BasicManipulationIaController.State.SELECTING);
}
},
/** @private */
_startOffSelectionManipulation: function(currCoord)
{
//console.log('off selection!');
this.setIsOffsetManipulating(true);
this.startManipulation(currCoord, null, Kekule.Editor.BasicManipulationIaController.ManipulationType.MOVE);
this.getEditor().pulseSelectionAreaMarker(); // pulse selection, reach the user's attention
},
/**
* Begin a manipulation.
* Descendants may override this method.
* @param {Hash} currCoord Current coord of pointer (mouse or touch)
* @param {Object} e Pointer (mouse or touch) event parameter.
*/
startManipulation: function(currCoord, e, explicitManipulationType)
{
var S = Kekule.Editor.BasicManipulationIaController.State;
var T = Kekule.Editor.BasicManipulationIaController.ManipulationType;
var availManipulationTypes = this.getCurrAvailableManipulationTypes();
var evokedByTouch = e && e.pointerType === 'touch'; // edge resize/rotate will be disabled in touch
if (e)
{
this.setManipulationPointerType(e && e.pointerType);
}
this.manipulateBegin();
this.setBaseCoord(currCoord);
this.setStartCoord(currCoord);
this._lastTransformParams = null;
var coordRegion = currCoord && this.getEditor().getCoordRegionInSelectionMarker(currCoord);
var R = Kekule.Editor.BoxRegion;
var rotateRegion = currCoord && this.getCoordOnSelectionRotationRegion(currCoord);
// test manipulate type
/*
var isTransform = (this.getEnableResize() || this.getEnableRotate())
&& (explicitManipulationType === T.TRANSFORM); // gesture transform
*/
var isTransform = (availManipulationTypes.indexOf(T.TRANSFORM) >= 0)
&& (explicitManipulationType === T.TRANSFORM); // gesture transform
//console.log('check isTransform', isTransform, explicitManipulationType, availManipulationTypes);
if (!isTransform)
{
var isResize = !evokedByTouch && (availManipulationTypes.indexOf(T.RESIZE) >= 0) //&& this.getEnableResize()
&& ((explicitManipulationType === T.RESIZE) || ((coordRegion !== R.INSIDE) && (coordRegion !== R.OUTSIDE)));
var isMove = !isResize && (availManipulationTypes.indexOf(T.MOVE) >= 0) // this.getEnableMove()
&& ((explicitManipulationType === T.MOVE) || (coordRegion !== R.OUTSIDE));
var isRotate = !evokedByTouch && !isResize && !isMove && (availManipulationTypes.indexOf(T.ROTATE) >= 0)//this.getEnableRotate()
&& ((explicitManipulationType === T.ROTATE) || !!rotateRegion);
}
else // transform
{
//console.log('set transform types', availManipulationTypes);
this._availTransformTypes = availManipulationTypes; // stores the available transform types
}
if (!isTransform && !isResize && !isRotate) // when pointer not at resize or rotate position, check if it is directly on an object to evoke direct manipulation
{
// check if mouse just on an object, if so, direct manipulation mode
var hoveredObj = this.getEditor().getTopmostBasicObjectAtCoord(currCoord, this.getCurrBoundInflation());
if (hoveredObj && !evokedByTouch) // mouse down directly on a object
{
//hoveredObj = hoveredObj.getNearestSelectableObject();
if (this.isInAncestorSelectMode())
hoveredObj = this.getStandaloneAncestor(hoveredObj);
hoveredObj = hoveredObj.getNearestMovableObject();
if (this.getEnableMove())
{
this.doStartDirectManipulate(null, hoveredObj, currCoord); // call doStartDirectManipulate rather than startDirectManipulate, avoid calling doStartDirectManipulate twice
return;
}
}
}
// check if already has selection and mouse in selection rect first
//if (this.getEditor().isCoordInSelectionMarkerBound(coord))
if (isTransform)
{
this.setState(S.MANIPULATING);
this.setIsManipulatingSelection(true);
this.setResizeStartingRegion(coordRegion);
this.setRotateStartingRegion(rotateRegion);
this.prepareManipulating(T.TRANSFORM, this.getEditor().getSelection(), currCoord, this.getEditor().getSelectionContainerBox());
}
else if (isResize)
{
this.setState(S.MANIPULATING);
this.setIsManipulatingSelection(true);
this.setResizeStartingRegion(/*this.getEditor().getCoordRegionInSelectionMarker(coord)*/coordRegion);
//console.log('box', this.getEditor().getUiSelectionAreaContainerBox());
this.prepareManipulating(T.RESIZE, this.getEditor().getSelection(), currCoord, this.getEditor().getSelectionContainerBox());
//console.log('Resize');
}
else if (isMove)
{
//if (this.getEnableMove())
{
this.setState(S.MANIPULATING);
this.setIsManipulatingSelection(true);
this.prepareManipulating(T.MOVE, this.getEditor().getSelection(), currCoord);
}
}
else if (isRotate)
{
this.setState(S.MANIPULATING);
this.setIsManipulatingSelection(true);
this.setRotateStartingRegion(rotateRegion);
this.prepareManipulating(T.ROTATE, this.getEditor().getSelection(), currCoord, this.getEditor().getSelectionContainerBox());
}
else
{
/*
var obj = this.getEditor().getTopmostBasicObjectAtCoord(currCoord, this.getCurrBoundInflation());
if (obj) // mouse down directly on a object
{
obj = obj.getNearestSelectableObject();
if (this.isInAncestorSelectMode())
obj = this.getStandaloneAncestor(obj);
// only mouse down and moved will cause manupulating
if (this.getEnableMove())
this.startDirectManipulate(null, obj, currCoord);
}
*/
if (hoveredObj) // point on an object, direct move
{
if (this.getEnableMove())
this.startDirectManipulate(null, hoveredObj, currCoord);
}
else // pointer down on empty region, deselect old selection and prepare for new selecting
{
if (this.getEnableMove() && this.getEnableSelect()
&& this.getEditorConfigs().getInteractionConfigs().getEnableOffSelectionManipulation()
&& this.getEditor().hasSelection() && this.getEditor().isSelectionVisible())
{
//console.log('enter suspend');
this.setState(S.SUSPENDING);
// need wait for a while to determinate the actual operation
var delay = this.getEditorConfigs().getInteractionConfigs().getOffSelectionManipulationActivatingTimeThreshold();
var shifted = e && e.getShiftKey();
this.setSuspendedOperations(
this._startNewSelecting.bind(this, currCoord, shifted),
this._startOffSelectionManipulation.bind(this, currCoord),
delay
);
//this._startOffSelectionManipulation(currCoord);
}
else if (this.getEnableSelect())
{
var shifted = e && e.getShiftKey();
/*
//this.getEditor().startSelectingBoxDrag(currCoord, shifted);
//this.getEditor().setSelectMode(this.getSelectMode());
this.getEditor().startSelecting(currCoord, shifted);
this.setState(S.SELECTING);
*/
this._startNewSelecting(currCoord, shifted);
}
}
}
},
/**
* Do manipulation based on mouse/touch move step.
* //@param {Hash} currCoord Current coord of pointer (mouse or touch)
* //@param {Object} e Pointer (mouse or touch) event parameter.
*/
execManipulationStep: function(/*currCoord, e*/timeStamp)
{
if (this.getState() !== Kekule.Editor.BasicManipulationIaController.State.MANIPULATING)
return false;
var currCoord = this._manipulationStepBuffer.coord;
var e = this._manipulationStepBuffer.event;
var explicitTransformParams = this._manipulationStepBuffer.explicitTransformParams;
//console.log('step', this.getState(), this._manipulationStepBuffer.explicitTransformParams);
if (explicitTransformParams) // has transform params explicitly in gesture transform
{
this.doExecManipulationStepWithExplicitTransformParams(explicitTransformParams, this._manipulationStepBuffer);
}
else if (currCoord && e)
{
//console.log('do actual manipulate');
this.doExecManipulationStep(currCoord, e, this._manipulationStepBuffer);
// empty buffer, indicating that the event has been handled
}
this._manipulationStepBuffer.coord = null;
this._manipulationStepBuffer.event = null;
this._manipulationStepBuffer.explicitTransformParams = null;
/*
if (this._lastTimeStamp)
console.log('elpase', timeStamp - this._lastTimeStamp);
this._lastTimeStamp = timeStamp;
*/
this._runManipulationStepId = Kekule.window.requestAnimationFrame(this.execManipulationStepBind);
},
/**
* Do actual manipulation based on mouse/touch move step.
* Descendants may override this method.
* @param {Hash} currCoord Current coord of pointer (mouse or touch)
* @param {Object} e Pointer (mouse or touch) event parameter.
*/
doExecManipulationStep: function(currCoord, e, manipulationStepBuffer)
{
var T = Kekule.Editor.BasicManipulationIaController.ManipulationType;
var manipulateType = this.getManipulationType();
var editor = this.getEditor();
editor.beginUpdateObject();
try
{
this._isBusy = true;
if (manipulateType === T.MOVE)
{
this.moveManipulatedObjs(currCoord);
}
else if (manipulateType === T.RESIZE)
{
//this.doResizeManipulatedObjs(currCoord);
this.doTransformManipulatedObjs(manipulateType, currCoord);
}
else if (manipulateType === T.ROTATE)
{
//this.rotateManipulatedObjs(currCoord);
this.doTransformManipulatedObjs(manipulateType, currCoord);
}
}
finally
{
editor.endUpdateObject();
this._isBusy = false;
}
},
/**
* Do actual manipulation based on mouse/touch move step.
* Descendants may override this method.
* @param {Hash} currCoord Current coord of pointer (mouse or touch)
* @param {Object} e Pointer (mouse or touch) event parameter.
*/
doExecManipulationStepWithExplicitTransformParams: function(transformParams, manipulationStepBuffer)
{
var T = Kekule.Editor.BasicManipulationIaController.ManipulationType;
var manipulateType = this.getManipulationType();
if (manipulateType === T.TRANSFORM)
{
var editor = this.getEditor();
editor.beginUpdateObject();
try
{
this._isBusy = true;
this.doTransformManipulatedObjs(manipulateType, null, transformParams);
}
finally
{
editor.endUpdateObject();
this._isBusy = false;
}
}
},
/**
* Refill the manipulationStepBuffer.
* Descendants may override this method.
* @param {Object} e Pointer (mouse or touch) event parameter.
* @private
*/
updateManipulationStepBuffer: function(buffer, value)
{
Object.extend(buffer, value);
/*
buffer.coord = coord;
buffer.event = e;
*/
},
// event handle methods
/** @ignore */
react_pointermove: function(/*$super, */e)
{
this.tryApplySuper('react_pointermove', [e]) /* $super(e) */;
if (this._isBusy)
{
return true;
}
if (Kekule.ObjUtils.notUnset(this.getActivePointerId()) && e.pointerId !== this.getActivePointerId())
{
//console.log('hhh', e.pointerId, this.getActivePointerId());
return true;
}
var S = Kekule.Editor.BasicManipulationIaController.State;
var T = Kekule.Editor.BasicManipulationIaController.ManipulationType;
var state = this.getState();
var coord = this._getEventMouseCoord(e);
var distanceFromLast;
if (state === S.NORMAL || state === S.SUSPENDING)
{
if (this._lastMouseMoveCoord)
{
var dis = Kekule.CoordUtils.getDistance(coord, this._lastMouseMoveCoord);
distanceFromLast = dis;
if (dis < 4) // less than 4 px, too tiny to react
{
return true;
}
}
}
this._lastMouseMoveCoord = coord;
/*
if (state !== S.NORMAL)
this.getEditor().hideHotTrack();
if (state === S.NORMAL)
{
// in normal state, if mouse moved to boundary of a object, it may be highlighted
this.getEditor().hotTrackOnCoord(coord);
}
else
*/
if (state === S.SUSPENDING)
{
var disThreshold = this.getEditorConfigs().getInteractionConfigs().getUnmovePointerDistanceThreshold() || 0;
if (Kekule.ObjUtils.notUnset(distanceFromLast) && (distanceFromLast > disThreshold))
this.execSuspendedImmediateOperation();
}
if (state === S.SELECTING)
{
if (this.getEnableSelect())
{
//this.getEditor().dragSelectingBoxToCoord(coord);
this.getEditor().addSelectingAnchorCoord(coord);
}
e.preventDefault();
}
else if (state === S.MANIPULATING) // move or resize objects
{
//console.log('mouse move', coord);
this.updateManipulationStepBuffer(this._manipulationStepBuffer, {'coord': coord, 'event': e});
//this.execManipulationStep(coord, e);
e.preventDefault();
}
return true;
},
/** @private */
react_pointerdown: function(/*$super, */e)
{
this.tryApplySuper('react_pointerdown', [e]) /* $super(e) */;
//console.log('pointerdown', e);
this.setActivePointerId(e.pointerId);
var S = Kekule.Editor.BasicManipulationIaController.State;
//var T = Kekule.Editor.BasicManipulationIaController.ManipulationType;
if (e.getButton() === Kekule.X.Event.MouseButton.LEFT)
{
this._lastMouseMoveCoord = null;
var coord = this._getEventMouseCoord(e);
if ((this.getState() === S.NORMAL)/* && (this.getEditor().getMouseLBtnDown()) */)
{
//var evokedByTouch = e && e.pointerType === 'touch';
var self = this;
var beginNormalManipulation = function(){
if (self.getState() === S.NORMAL || self.getState() === S.SUSPENDING)
{
self.startManipulation(coord, e);
e.preventDefault();
}
};
this.setState(S.SUSPENDING);
// wait for a while for the possible gesture operations
this.setSuspendedOperations(beginNormalManipulation, beginNormalManipulation, 50);
}
}
else if (e.getButton() === Kekule.X.Event.MouseButton.RIGHT)
{
//if (this.getEnableMove())
{
if (this.getState() === S.MANIPULATING) // when click right button on manipulating, just cancel it.
{
this.cancelManipulate();
this.setState(S.NORMAL);
e.stopPropagation();
e.preventDefault();
}
else if (this.getState() === S.SUSPENDING)
this.haltSuspendedOperations();
}
}
return true;
},
/** @private */
react_pointerup: function(e)
{
if (e.getButton() === Kekule.X.Event.MouseButton.LEFT)
{
var coord = this._getEventMouseCoord(e);
this.setEndCoord(coord);
var startCoord = this.getStartCoord();
var endCoord = coord;
var shifted = e.getShiftKey();
var S = Kekule.Editor.BasicManipulationIaController.State;
if (this.getState() === S.SUSPENDING) // done suspended first, then finish the operation
this.execSuspendedImmediateOperation();
var state = this.getState();
if (state === S.SELECTING) // mouse up, end selecting
{
//this.getEditor().endSelectingBoxDrag(coord, shifted);
this.getEditor().endSelecting(coord, shifted || this.getEditor().getIsToggleSelectOn());
this.setState(S.NORMAL);
e.preventDefault();
var editor = this.getEditor();
editor.endManipulateObject();
}
else if (state === S.MANIPULATING)
{
//var dis = Kekule.CoordUtils.getDistance(startCoord, endCoord);
//if (dis <= this.getEditorConfigs().getInteractionConfigs().getUnmovePointerDistanceThreshold())
if (startCoord && endCoord && Kekule.CoordUtils.isEqual(startCoord, endCoord)) // mouse down and up in same point, not manupulate, just select a object
{
if (this.getEnableSelect())
this.getEditor().selectOnCoord(startCoord, shifted || this.getEditor().getIsToggleSelectOn());
}
else // move objects to new pos
{
this.manipulateBeforeStopping();
/*
if (this.getEnableMove())
{
//this.moveManipulatedObjs(coord);
//this.endMoving();
// add operation to editor's historys
this.addOperationToEditor();
}
*/
this.addOperationToEditor();
}
this.stopManipulate();
this.setState(S.NORMAL);
e.preventDefault();
}
}
return true;
},
/** @private */
react_mousewheel: function(/*$super, */e)
{
if (e.getCtrlKey())
{
var state = this.getState();
if (state === Kekule.Editor.BasicManipulationIaController.State.NORMAL)
{
// disallow mouse zoom during manipulation
return this.tryApplySuper('react_mousewheel', [e]) /* $super(e) */;
}
e.preventDefault();
}
},
/* @private */
/*
react_keyup: function(e)
{
var keyCode = e.getKeyCode();
switch (keyCode)
{
case 46: // delete
{
if (this.getEnableRemove())
this.removeSelection();
}
}
}
*/
//////////////////// Hammer Gesture event handlers ///////////////////////////
/** @private */
_isGestureManipulationEnabled: function()
{
return this.getEditorConfigs().getInteractionConfigs().getEnableGestureManipulation();
},
/** @private */
_isGestureZoomOnEditorEnabled: function()
{
return this.getEditorConfigs().getInteractionConfigs().getEnableGestureZoomOnEditor();
},
/** @private */
_isInGestureManipulation: function()
{
return !!this._initialGestureTransformParams;
},
/** @private */
_isGestureZoomOnEditor: function()
{
return !!this._initialGestureZoomLevel;
},
/**
* Starts a gesture transform.
* @param {Object} event
* @private
*/
beginGestureTransform: function(event)
{
if (this.getEditor().hasSelection())
{
this._initialGestureZoomLevel = null;
if (this._isGestureManipulationEnabled())
{
this.haltSuspendedOperations(); // halt possible touch hold manipulations
// stores initial gesture transform params
this._initialGestureTransformParams = {
'angle': (event.rotation * Math.PI / 180) || 0
};
//console.log('begin gesture manipulation', this.getState(), this.getManipulationType());
// start a brand new one
if (this.getState() !== Kekule.Editor.BasicManipulationIaController.State.MANIPULATING)
{
this.startManipulation(null, null, Kekule.Editor.BasicManipulationIaController.ManipulationType.TRANSFORM);
}
else
{
if (this.getManipulationType() !== Kekule.Editor.BasicManipulationIaController.ManipulationType.TRANSFORM)
{
// the gesture event may be evoked after pointerdown event,
// and in pointerdown, a calling to startManipulation without transform may be already called.
// So here we force a new manipulation with transform on.
//this.setManipulationType(Kekule.Editor.BasicManipulationIaController.ManipulationType.TRANSFORM);
this.startManipulation(null, null, Kekule.Editor.BasicManipulationIaController.ManipulationType.TRANSFORM);
}
}
}
else
this._initialGestureTransformParams = null;
}
else if (this._isGestureZoomOnEditorEnabled()) // zoom on editor
{
this.getEditor().cancelSelecting(); // force store the selecting
this.setState(Kekule.Editor.BasicManipulationIaController.State.NORMAL);
this._initialGestureZoomLevel = this.getEditor().getZoom();
}
},
/**
* Ends a gesture transform.
* @private
*/
endGestureTransform: function()
{
if (this.getState() === Kekule.Editor.BasicManipulationIaController.State.MANIPULATING) // stop prev manipulation first
{
if (this._isInGestureManipulation())
{
this.manipulateBeforeStopping();
this.addOperationToEditor();
this.stopManipulate();
this.setState(Kekule.Editor.BasicManipulationIaController.State.NORMAL);
this._initialGestureTransformParams = null;
}
}
if (this._isGestureZoomOnEditor())
{
this._initialGestureZoomLevel = null;
}
},
/**
* Do a new transform step according to received event.
* @param {Object} e Gesture event received.
* @private
*/
doGestureTransformStep: function(e)
{
var T = Kekule.Editor.BasicManipulationIaController.ManipulationType;
if ((this.getState() === Kekule.Editor.BasicManipulationIaController.State.MANIPULATING)
&& (this.getManipulationType() === T.TRANSFORM)
&& (this._isInGestureManipulation()))
{
var availTransformTypes = this._availTransformTypes || [];
// get transform params from event directly
var center = this.getRotateCenter(); // use the center of current editor selection
var resizeScales, rotateAngle;
if (availTransformTypes.indexOf(T.RESIZE) >= 0)
{
var scale = e.scale;
resizeScales = this._calcActualResizeScales(this.getManipulateObjs(), {'scaleX': scale, 'scaleY': scale});
}
else
resizeScales = {'scaleX': 1, 'scaleY': 1};
if (availTransformTypes.indexOf(T.ROTATE) >= 0)
{
var absAngle = e.rotation * Math.PI / 180;
var rotateAngle = absAngle - this._initialGestureTransformParams.angle;
// get actual rotation angle
rotateAngle = this._calcActualRotateAngle(this.getManipulateObjs(), rotateAngle, this._initialGestureTransformParams.angle, absAngle);
}
else
{
rotateAngle = 0;
}
//console.log('here', resizeScales.scaleX, resizeScales.scaleY, rotateAngle, availTransformTypes);
this.updateManipulationStepBuffer(this._manipulationStepBuffer, {
'explicitTransformParams': {
'center': center,
'scaleX': resizeScales.scaleX, 'scaleY': resizeScales.scaleY,
'rotateAngle': rotateAngle
//'rotateDegree': e.rotation,
//'event': e
}
});
e.preventDefault();
}
else if (this._isGestureZoomOnEditor())
{
var editor = this.getEditor();
var scale = e.scale;
var initZoom = this._initialGestureZoomLevel;
editor.zoomTo(initZoom * scale, null, e.center);
}
},
/** @ignore */
react_rotatestart: function(e)
{
if (this.getEnableGestureManipulation())
this.beginGestureTransform(e);
},
/** @ignore */
react_rotate: function(e)
{
if (this.getEnableGestureManipulation())
this.doGestureTransformStep(e);
},
/** @ignore */
react_rotateend: function(e)
{
if (this.getEnableGestureManipulation())
this.endGestureTransform();
},
/** @ignore */
react_rotatecancel: function(e)
{
if (this.getEnableGestureManipulation())
this.endGestureTransform();
},
/** @ignore */
react_pinchstart: function(e)
{
if (this.getEnableGestureManipulation())
this.beginGestureTransform(e);
},
/** @ignore */
react_pinchmove: function(e)
{
if (this.getEnableGestureManipulation())
this.doGestureTransformStep(e);
},
/** @ignore */
react_pinchend: function(e)
{
if (this.getEnableGestureManipulation())
this.endGestureTransform();
},
/** @ignore */
react_pinchcancel: function(e)
{
if (this.getEnableGestureManipulation())
this.endGestureTransform();
}
});
/**
* Enumeration of state of a {@link Kekule.Editor.BasicManipulationIaController}.
* @class
*/
Kekule.Editor.BasicManipulationIaController.State = {
/** Normal state. */
NORMAL: 0,
/** Is selecting objects. */
SELECTING: 1,
/** Is manipulating objects (e.g. changing object position). */
MANIPULATING: 2,
/**
* The pointer is down, but need to wait to determinate if there will be a gesture event.
*/
WAITING: 10,
/**
* Just put down pointer, if move the pointer immediately, selecting state will be open.
* But if hold down still for a while, it may turn to manipulating state to move current selected objects.
*/
SUSPENDING: 11
};
/**
* Enumeration of manipulation types of a {@link Kekule.Editor.BasicManipulationIaController}.
* @class
*/
Kekule.Editor.BasicManipulationIaController.ManipulationType = {
MOVE: 0,
ROTATE: 1,
RESIZE: 2,
TRANSFORM: 4 // scale and rotate simultaneously by touch
};
/** @ignore */
Kekule.Editor.IaControllerManager.register(Kekule.Editor.BasicManipulationIaController, Kekule.Editor.BaseEditor);
})();<|fim▁end|> | if (endDistance < threshold)
{
angle = 0; // do not rotate |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>use std::fmt;
use std::iter;
use std::f64;
use adivon::bag::Bag;
use adivon::stack::Stack;
use adivon::queue::Queue;
use adivon::priority_queue::IndexMinPQ;
/// Weighted directed edge
#[derive(Clone, Copy)]
pub struct DirectedEdge {
v: usize,
w: usize,
weight: f64
}
impl DirectedEdge {
pub fn new(v: usize, w: usize, weight: f64) -> DirectedEdge {
assert!(!weight.is_nan(), "weight is NaN");
DirectedEdge {
v: v,
w: w,
weight: weight
}
}
#[inline]
pub fn from(&self) -> usize {
self.v
}
#[inline]
pub fn to(&self) -> usize {
self.w
}
#[inline]
pub fn weight(&self) -> f64 {
self.weight
}
}
impl fmt::Debug for DirectedEdge {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{} -> {} {:5.2}", self.v, self.w, self.weight)
}
}
#[test]
fn test_directed_edge() {
let e = DirectedEdge::new(12, 24, 3.14);
assert_eq!(format!("{:?}", e), "12 -> 24 3.14");
}
/// Edge-weighted digraph, implemented using adjacency lists
#[derive(Clone)]
pub struct EdgeWeightedDigraph {
v: usize,
e: usize,
adj: Vec<Bag<DirectedEdge>>
}
impl EdgeWeightedDigraph {
pub fn new(v: usize) -> EdgeWeightedDigraph {
EdgeWeightedDigraph {
v: v,
e: 0,
adj: iter::repeat(Bag::new()).take(v).collect()
}
}
pub fn v(&self) -> usize {
self.v
}
pub fn e(&self) -> usize {
self.e
}
#[inline]
fn validate_vertex(&self, v: usize) {
assert!(v < self.v, "vertex must be between 0 and V");
}
pub fn add_edge(&mut self, e: DirectedEdge) {
let v = e.from();
let w = e.to();
self.validate_vertex(v);
self.validate_vertex(w);
self.adj[v].add(e);
self.e += 1;
}
pub fn adj(&self, v: usize) -> ::std::vec::IntoIter<DirectedEdge> {
self.validate_vertex(v);
self.adj[v].iter().map(|e| e.clone()).collect::<Vec<DirectedEdge>>().into_iter()
}
pub fn outdegree(&self, v: usize) -> usize {
self.validate_vertex(v);
self.adj[v].len()
}
pub fn edges(&self) -> ::std::vec::IntoIter<DirectedEdge> {
self.adj.iter()
.flat_map(|adj| {
adj.iter().map(|e| e.clone()).collect::<Vec<DirectedEdge>>().into_iter()
})
.collect::<Vec<DirectedEdge>>()
.into_iter()
}
pub fn to_dot(&self) -> String {
let mut dot = String::new();
dot.push_str("digraph G {\n");
for i in 0 .. self.v {
dot.push_str(&format!(" {};\n", i));
}
for e in self.edges() {
let v = e.from();
let w = e.to();
dot.push_str(&format!(" {} -> {} [ label=\"{}\" ];\n",
v, w, e.weight))
}
dot.push_str("}\n");
dot
}
}
// Single-source shortest paths API
#[allow(dead_code)]
pub struct DijkstraSP<'a> {
graph: &'a EdgeWeightedDigraph,
dist_to: Vec<f64>,
edge_to: Vec<Option<DirectedEdge>>,
pq: IndexMinPQ<f64>,
s: usize
}
impl<'a> DijkstraSP<'a> {
fn new<'b>(graph: &'b EdgeWeightedDigraph, s: usize) -> DijkstraSP<'b> {
let n = graph.v();
for e in graph.edges() {
if e.weight() < 0.0 {
panic!("edge has negative weight in DijkstraSP");
}
}
let dist_to = iter::repeat(f64::INFINITY).take(n).collect();
let edge_to = iter::repeat(None).take(n).collect();
let pq = IndexMinPQ::with_capacity(n);
let mut sp = DijkstraSP {
graph: graph,
s: s,
dist_to: dist_to,
edge_to: edge_to,
pq: pq
};
// alogrithm
sp.dist_to[s] = 0.0;
sp.pq.insert(s, 0.0);
while !sp.pq.is_empty() {
let v = sp.pq.del_min().unwrap();
for e in graph.adj(v) {
sp.relax(e);
}
}
sp
}
fn relax(&mut self, e: DirectedEdge) {
let v = e.from();
let w = e.to();
if self.dist_to[w] > self.dist_to[v] + e.weight() {
self.dist_to[w] = self.dist_to[v] + e.weight();
self.edge_to[w] = Some(e);
if self.pq.contains(w) {
self.pq.decrease_key(w, self.dist_to[w]);
} else {
self.pq.insert(w, self.dist_to[w]);
}
}
}
// length of shortest path from s to v
pub fn dist_to(&self, v: usize) -> f64 {
self.dist_to[v]
}
pub fn has_path_to(&self, v: usize) -> bool {
self.dist_to[v] < f64::INFINITY
}
// shortest path from s to v
pub fn path_to(&self, v: usize) -> ::std::vec::IntoIter<DirectedEdge> {
if !self.has_path_to(v) {
vec!().into_iter()
} else {
let mut path = Stack::new();
let mut e = self.edge_to[v];
while e.is_some() {
path.push(e.unwrap());
e = self.edge_to[e.unwrap().from()]
}
path.into_iter().collect::<Vec<DirectedEdge>>().into_iter()
}
}
#[cfg(test)]
fn check(&self) -> bool {
let s = self.s;
for e in self.graph.edges() {
if e.weight() < 0.0 {
return false;
}
}
if self.dist_to[s] != 0.0 || self.edge_to[s].is_some() {
return false;
}
for v in 0 .. self.graph.v() {
if v == s { continue }
if self.edge_to[v].is_none() && self.dist_to[v] != f64::INFINITY {
// dist_to[] edge_to[] inconsistent
return false;
}
}
for v in 0 .. self.graph.v() {
for e in self.graph.adj(v) {
let w = e.to();
if self.dist_to[v] + e.weight() < self.dist_to[w] {
// edge not relaxed
return false;
}
}
}
for w in 0 .. self.graph.v() {
if self.edge_to[w].is_none() { continue }
let e = self.edge_to[w].unwrap();
let v = e.from();
if w != e.to() {
return false;
}
if self.dist_to[v] + e.weight() != self.dist_to[w] {
// edge on shortest path not tight
return false;
}
}
true
}
}
/// Compute preorder and postorder for a digraph or edge-weighted digraph
pub struct DepthFirstOrder<'a> {
graph: &'a EdgeWeightedDigraph,
pre: Vec<usize>,
post: Vec<usize>,
preorder: Queue<usize>,
postorder: Queue<usize>,
marked: Vec<bool>,
pre_counter: usize,
post_counter: usize
}
impl<'a> DepthFirstOrder<'a> {
fn new<'b>(graph: &'b EdgeWeightedDigraph) -> DepthFirstOrder<'b> {
let n = graph.v();
let mut ret = DepthFirstOrder {
graph: graph,
pre: iter::repeat(0).take(n).collect(),
post: iter::repeat(0).take(n).collect(),
preorder: Queue::new(),
postorder: Queue::new(),
marked: iter::repeat(false).take(n).collect(),
pre_counter: 0,
post_counter: 0
};
ret.init();
ret
}
fn init(&mut self) {
for v in 0 .. self.graph.v() {
if !self.marked[v] {
self.dfs(v)
}
}
}
fn dfs(&mut self, v: usize) {
self.marked[v] = true;
self.pre[v] = self.pre_counter;
self.pre_counter += 1;
self.preorder.enqueue(v);
for e in self.graph.adj(v) {
let w = e.to();
if !self.marked[w] {
self.dfs(w);
}
}
self.postorder.enqueue(v);
self.post[v] = self.post_counter;
self.post_counter += 1;
}
// preorder number of vertex v
pub fn preorder(&self, v: usize) -> usize {
self.pre[v]
}
// postorder number of vertex v
pub fn postorder(&self, v: usize) -> usize {
self.post[v]
}
pub fn pre(&self) -> ::std::vec::IntoIter<usize> {
self.preorder.clone().into_iter().collect::<Vec<usize>>().into_iter()
}
pub fn post(&self) -> ::std::vec::IntoIter<usize> {
self.postorder.clone().into_iter().collect::<Vec<usize>>().into_iter()
}
pub fn reverse_post(&self) -> ::std::vec::IntoIter<usize> {
let mut reverse = Stack::new();
for v in self.postorder.iter() {
reverse.push(*v);
}
reverse.into_iter().collect::<Vec<usize>>().into_iter()
}
#[cfg(test)]<|fim▁hole|> let mut r = 0;
for v in self.post() {
if self.postorder(v) != r {
// post(v) and post() inconsistent
return false;
}
r += 1;
}
r = 0;
for v in self.pre() {
if self.preorder(v) != r {
// preorder(v) and pre() inconsistent
return false;
}
r += 1;
}
return true;
}
}
// Finds a directed cycle in an edge-weighted digraph
pub struct EdgeWeightedDirectedCycle<'a> {
graph: &'a EdgeWeightedDigraph,
marked: Vec<bool>,
edge_to: Vec<Option<DirectedEdge>>,
on_stack: Vec<bool>,
// directed cycle (or empty)
cycle: Option<Stack<DirectedEdge>>
}
impl<'a> EdgeWeightedDirectedCycle<'a> {
fn new<'b>(graph: &'b EdgeWeightedDigraph) -> EdgeWeightedDirectedCycle<'b> {
let n = graph.v();
let mut ret = EdgeWeightedDirectedCycle {
graph: graph,
marked: iter::repeat(false).take(n).collect(),
edge_to: iter::repeat(None).take(n).collect(),
on_stack: iter::repeat(false).take(n).collect(),
cycle: None
};
ret.init();
ret
}
fn init(&mut self) {
for v in 0 .. self.graph.v() {
if !self.marked[v] {
self.dfs(v)
}
}
}
fn dfs(&mut self, v: usize) {
self.on_stack[v] = true;
self.marked[v] = true;
for e in self.graph.adj(v) {
let w = e.to();
if self.cycle.is_some() {
return;
} else if !self.marked[w] {
self.edge_to[w] = Some(e);
self.dfs(w);
} else if self.on_stack[w] {
self.cycle = Some(Stack::new());
// scope local
let mut e = e.clone();
while e.from() != w {
self.cycle.as_mut().map(|s| s.push(e));
e = self.edge_to[e.from()].unwrap();
}
self.cycle.as_mut().map(|s| s.push(e));
}
}
self.on_stack[v] = false;
}
pub fn has_cycle(&self) -> bool {
self.cycle.is_some()
}
pub fn edges(&self) -> ::std::vec::IntoIter<DirectedEdge> {
self.cycle.iter().flat_map(|e| e.clone()).collect::<Vec<DirectedEdge>>().into_iter()
}
#[cfg(test)]
fn check(&self) -> bool {
if self.has_cycle() {
let first = self.edges().next().unwrap();
let last = self.edges().last().unwrap();
if first.from() == last.to() {
return true;
} else {
return false;
}
}
return true;
}
}
/// Compute topological ordering of a DAG or edge-weighted DAG
pub enum Topological {
NonDAG,
Order(Vec<usize>)
}
impl Topological {
fn new(graph: &EdgeWeightedDigraph) -> Topological {
if graph.cycle().has_cycle() {
Topological::NonDAG
} else {
Topological::Order(graph.depth_first_order().reverse_post().collect())
}
}
pub fn order(&self) -> ::std::vec::IntoIter<usize> {
match self {
&Topological::Order(ref order) => {
order.clone().into_iter()
},
&Topological::NonDAG => {
vec![].into_iter()
}
}
}
pub fn has_order(&self) -> bool {
match self {
&Topological::NonDAG => false,
&Topological::Order(_) => true
}
}
}
// Computes shortest paths in an edge-weighted acyclic digraph
pub struct AcyclicSP<'a> {
graph: &'a EdgeWeightedDigraph,
dist_to: Vec<f64>,
edge_to: Vec<Option<DirectedEdge>>
}
impl<'a> AcyclicSP<'a> {
fn new<'b>(graph: &'b EdgeWeightedDigraph, s: usize) -> AcyclicSP<'b> {
let n = graph.v();
let dist_to: Vec<f64> = iter::repeat(f64::INFINITY).take(n).collect();
let edge_to = iter::repeat(None).take(n).collect();
let mut ret = AcyclicSP {
graph: graph,
dist_to: dist_to,
edge_to: edge_to
};
ret.dist_to[s] = 0.0;
let topological = ret.graph.topological();
if !topological.has_order() {
panic!("digraph is not acyclic");
}
for v in topological.order() {
for e in ret.graph.adj(v) {
ret.relax(e);
}
}
ret
}
fn relax(&mut self, e: DirectedEdge) {
let v = e.from();
let w = e.to();
if self.dist_to[w] > self.dist_to[v] + e.weight() {
self.dist_to[w] = self.dist_to[v] + e.weight();
self.edge_to[w] = Some(e);
}
}
pub fn dist_to(&self, v: usize) -> f64 {
self.dist_to[v]
}
pub fn has_path_to(&self, v: usize) -> bool {
self.dist_to[v] < f64::INFINITY
}
pub fn path_to(&self, v: usize) -> ::std::vec::IntoIter<DirectedEdge> {
if !self.has_path_to(v) {
vec![].into_iter()
} else {
let mut path = Stack::new();
let mut e = self.edge_to[v];
while e.is_some() {
path.push(e.unwrap());
e = self.edge_to[e.unwrap().from()];
}
path.into_iter().collect::<Vec<DirectedEdge>>().into_iter()
}
}
}
/// Bellman-Ford shortest path algorithm. Computes the shortest path tree in
/// edge-weighted digraph G from vertex s, or finds a negative cost cycle
/// reachable from s.
pub struct BellmanFordSP<'a> {
graph: &'a EdgeWeightedDigraph,
dist_to: Vec<f64>,
edge_to: Vec<Option<DirectedEdge>>,
on_queue: Vec<bool>,
queue: Queue<usize>,
cost: usize,
cycle: Option<Vec<DirectedEdge>>
}
impl<'a> BellmanFordSP<'a> {
fn new<'b>(graph: &'b EdgeWeightedDigraph, s: usize) -> BellmanFordSP<'b> {
let n = graph.v();
let dist_to = iter::repeat(f64::INFINITY).take(n).collect();
let edge_to = iter::repeat(None).take(n).collect();
let on_queue = iter::repeat(false).take(n).collect();
let mut ret = BellmanFordSP {
graph: graph,
dist_to: dist_to,
edge_to: edge_to,
on_queue: on_queue,
queue: Queue::new(),
cost: 0,
cycle: None
};
ret.dist_to[s] = 0.0;
// Bellman-Ford algorithm
ret.queue.enqueue(s);
ret.on_queue[s] = true;
while !ret.queue.is_empty() && !ret.has_negative_cycle() {
let v = ret.queue.dequeue().unwrap();
ret.on_queue[v] = false;
ret.relax(v);
}
ret
}
fn relax(&mut self, v: usize) {
for e in self.graph.adj(v) {
let w = e.to();
if self.dist_to[w] > self.dist_to[v] + e.weight() {
self.dist_to[w] = self.dist_to[v] + e.weight();
self.edge_to[w] = Some(e);
if !self.on_queue[w] {
self.queue.enqueue(w);
self.on_queue[w] = true;
}
}
// workaround
self.cost += 1;
if (self.cost - 1) % self.graph.v() == 0 {
self.find_negative_cycle();
if self.has_negative_cycle() {
return;
}
}
}
}
pub fn has_negative_cycle(&self) -> bool {
self.cycle.is_some()
}
pub fn negative_cycle(&self) -> ::std::vec::IntoIter<DirectedEdge> {
self.cycle.iter().flat_map(|e| e.clone()).collect::<Vec<DirectedEdge>>().into_iter()
}
fn find_negative_cycle(&mut self) {
let n = self.graph.v();
let mut spt = EdgeWeightedDigraph::new(n);
for v in 0 .. n {
if self.edge_to[v].is_some() {
spt.add_edge(self.edge_to[v].unwrap());
}
}
let finder = spt.cycle();
if finder.has_cycle() {
self.cycle = Some(finder.edges().collect());
} else {
self.cycle = None;
}
}
pub fn dist_to(&self, v: usize) -> f64 {
if self.has_negative_cycle() {
panic!("negative cost cycle exists")
} else {
self.dist_to[v]
}
}
pub fn has_path_to(&self, v: usize) -> bool {
self.dist_to[v] < f64::INFINITY
}
pub fn path_to(&self, v: usize) -> ::std::vec::IntoIter<DirectedEdge> {
if self.has_negative_cycle() {
panic!("negative cost cycle exists")
} else if !self.has_path_to(v) {
vec![].into_iter()
} else {
let mut path = Stack::new();
let mut e = self.edge_to[v];
while e.is_some() {
path.push(e.unwrap());
e = self.edge_to[e.unwrap().from()];
}
path.into_iter().collect::<Vec<DirectedEdge>>().into_iter()
}
}
}
impl EdgeWeightedDigraph {
/// Compute preorder and postorder for a digraph or edge-weighted digraph.
pub fn depth_first_order<'a>(&'a self) -> DepthFirstOrder<'a> {
DepthFirstOrder::new(self)
}
/// Dijkstra's algorithm. Computes the shortest path tree.
pub fn dijkstra_sp<'a>(&'a self, s: usize) -> DijkstraSP<'a> {
DijkstraSP::new(self, s)
}
/// Finds a directed cycle in an edge-weighted digraph.
pub fn cycle<'a>(&'a self) -> EdgeWeightedDirectedCycle<'a> {
EdgeWeightedDirectedCycle::new(self)
}
/// Compute topological ordering of a DAG or edge-weighted DAG.
pub fn topological(&self) -> Topological {
Topological::new(self)
}
/// Computes shortest paths in an edge-weighted acyclic digraph.
pub fn acyclic_sp<'a>(&'a self, s: usize) -> AcyclicSP<'a> {
AcyclicSP::new(self, s)
}
/// Bellman-Ford shortest path algorithm.
pub fn bellman_ford_sp<'a>(&'a self, s: usize) -> BellmanFordSP<'a> {
BellmanFordSP::new(self, s)
}
}
#[test]
fn test_dijkstra_shortest_path() {
let mut g = EdgeWeightedDigraph::new(6);
g.add_edge(DirectedEdge::new(0, 1, 7.0));
g.add_edge(DirectedEdge::new(1, 2, 10.0));
g.add_edge(DirectedEdge::new(0, 2, 9.0));
g.add_edge(DirectedEdge::new(0, 5, 14.0));
g.add_edge(DirectedEdge::new(1, 3, 15.0));
g.add_edge(DirectedEdge::new(2, 5, 2.0));
g.add_edge(DirectedEdge::new(2, 3, 11.0));
g.add_edge(DirectedEdge::new(4, 5, 9.0));
g.add_edge(DirectedEdge::new(3, 4, 6.0));
// this edge makes a non-DAG
g.add_edge(DirectedEdge::new(2, 2, 1.0));
assert_eq!(20.0, g.dijkstra_sp(0).dist_to(3));
assert_eq!(26.0, g.dijkstra_sp(0).path_to(4).map(|e| e.weight()).sum());
assert!(g.dijkstra_sp(0).check());
}
#[test]
fn test_cyclic_edge_weighted_directed_graph() {
let mut g = EdgeWeightedDigraph::new(4);
g.add_edge(DirectedEdge::new(0, 1, 0.5));
g.add_edge(DirectedEdge::new(0, 2, 0.5));
g.add_edge(DirectedEdge::new(1, 2, 0.5));
g.add_edge(DirectedEdge::new(2, 3, 0.5));
g.add_edge(DirectedEdge::new(3, 1, 0.5));
let cycle = g.cycle();
assert!(cycle.has_cycle());
assert_eq!(3, cycle.edges().count());
assert!(cycle.check());
}
#[test]
fn test_acyclic_shortest_path() {
let mut g = EdgeWeightedDigraph::new(6);
g.add_edge(DirectedEdge::new(0, 1, 7.0));
g.add_edge(DirectedEdge::new(1, 2, 10.0));
g.add_edge(DirectedEdge::new(0, 2, 9.0));
g.add_edge(DirectedEdge::new(0, 5, 14.0));
g.add_edge(DirectedEdge::new(1, 3, 15.0));
g.add_edge(DirectedEdge::new(2, 5, 2.0));
g.add_edge(DirectedEdge::new(2, 3, 11.0));
g.add_edge(DirectedEdge::new(4, 5, 9.0));
g.add_edge(DirectedEdge::new(3, 4, 6.0));
assert!(g.depth_first_order().check());
assert_eq!(20.0, g.acyclic_sp(0).dist_to(3));
assert_eq!(26.0, g.acyclic_sp(0).path_to(4).map(|e| e.weight()).sum());
}
#[test]
fn test_negative_weight_shortest_path() {
let mut g = EdgeWeightedDigraph::new(6);
g.add_edge(DirectedEdge::new(0, 1, 7.0));
g.add_edge(DirectedEdge::new(1, 2, 10.0));
g.add_edge(DirectedEdge::new(0, 2, 9.0));
g.add_edge(DirectedEdge::new(0, 5, 14.0));
g.add_edge(DirectedEdge::new(1, 3, 15.0));
g.add_edge(DirectedEdge::new(2, 5, 2.0));
g.add_edge(DirectedEdge::new(2, 3, 11.0));
g.add_edge(DirectedEdge::new(4, 5, 9.0));
g.add_edge(DirectedEdge::new(3, 4, 6.0));
assert_eq!(20.0, g.bellman_ford_sp(0).dist_to(3));
assert_eq!(26.0, g.bellman_ford_sp(0).path_to(4).map(|e| e.weight()).sum());
g.add_edge(DirectedEdge::new(0, 3, -5.0));
assert_eq!(1.0, g.bellman_ford_sp(0).dist_to(4));
assert_eq!(2, g.bellman_ford_sp(0).path_to(4).count());
assert_eq!(1.0, g.bellman_ford_sp(0).path_to(4).map(|e| e.weight()).sum());
}<|fim▁end|> | fn check(&self) -> bool { |
<|file_name|>CodeGeneratorInspector.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# Copyright (c) 2011 Google Inc. All rights reserved.
# Copyright (c) 2012 Intel Corporation. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import os.path
import sys
import string
import optparse
import re
try:
import json
except ImportError:
import simplejson as json
import CodeGeneratorInspectorStrings
DOMAIN_DEFINE_NAME_MAP = {
"Database": "SQL_DATABASE",
"Debugger": "JAVASCRIPT_DEBUGGER",
"DOMDebugger": "JAVASCRIPT_DEBUGGER",
"FileSystem": "FILE_SYSTEM",
"IndexedDB": "INDEXED_DATABASE",
"Profiler": "JAVASCRIPT_DEBUGGER",
"Worker": "WORKERS",
}
# Manually-filled map of type name replacements.
TYPE_NAME_FIX_MAP = {
"RGBA": "Rgba", # RGBA is reported to be conflicting with a define name in Windows CE.
"": "Empty",
}
TYPES_WITH_RUNTIME_CAST_SET = frozenset(["Runtime.RemoteObject", "Runtime.PropertyDescriptor", "Runtime.InternalPropertyDescriptor",
"Debugger.FunctionDetails", "Debugger.CallFrame",
"Canvas.TraceLog", "Canvas.ResourceInfo", "Canvas.ResourceState",
# This should be a temporary hack. TimelineEvent should be created via generated C++ API.
"Timeline.TimelineEvent"])
TYPES_WITH_OPEN_FIELD_LIST_SET = frozenset(["Timeline.TimelineEvent",
# InspectorStyleSheet not only creates this property but wants to read it and modify it.
"CSS.CSSProperty",
# InspectorResourceAgent needs to update mime-type.
"Network.Response"])
EXACTLY_INT_SUPPORTED = False
cmdline_parser = optparse.OptionParser()
cmdline_parser.add_option("--output_h_dir")
cmdline_parser.add_option("--output_cpp_dir")
cmdline_parser.add_option("--output_js_dir")
cmdline_parser.add_option("--write_always", action="store_true")
cmdline_parser.add_option("--no_verification", action="store_true")
try:
arg_options, arg_values = cmdline_parser.parse_args()
if (len(arg_values) != 1):
raise Exception("Exactly one plain argument expected (found %s)" % len(arg_values))
input_json_filename = arg_values[0]
output_header_dirname = arg_options.output_h_dir
output_cpp_dirname = arg_options.output_cpp_dir
output_js_dirname = arg_options.output_js_dir
write_always = arg_options.write_always
verification = not arg_options.no_verification
if not output_header_dirname:
raise Exception("Output .h directory must be specified")
if not output_cpp_dirname:
raise Exception("Output .cpp directory must be specified")
if not output_js_dirname:
raise Exception("Output .js directory must be specified")
except Exception:
# Work with python 2 and 3 http://docs.python.org/py3k/howto/pyporting.html
exc = sys.exc_info()[1]
sys.stderr.write("Failed to parse command-line arguments: %s\n\n" % exc)
sys.stderr.write("Usage: <script> Inspector.json --output_h_dir <output_header_dir> --output_cpp_dir <output_cpp_dir> --output_js_dir <output_js_dir> [--write_always] [--no_verification]\n")
exit(1)
def dash_to_camelcase(word):
return ''.join(x.capitalize() or '-' for x in word.split('-'))
def fix_camel_case(name):
refined = re.sub(r'-(\w)', lambda pat: pat.group(1).upper(), name)
refined = to_title_case(refined)
return re.sub(r'(?i)HTML|XML|WML|API|GC|XHR|DOM|CSS', lambda pat: pat.group(0).upper(), refined)
def to_title_case(name):
return name[:1].upper() + name[1:]
class Capitalizer:
@staticmethod
def lower_camel_case_to_upper(str):
if len(str) > 0 and str[0].islower():
str = str[0].upper() + str[1:]
return str
@staticmethod
def upper_camel_case_to_lower(str):
pos = 0
while pos < len(str) and str[pos].isupper():
pos += 1
if pos == 0:
return str
if pos == 1:
return str[0].lower() + str[1:]
if pos < len(str):
pos -= 1
possible_abbreviation = str[0:pos]
if possible_abbreviation not in Capitalizer.ABBREVIATION:
raise Exception("Unknown abbreviation %s" % possible_abbreviation)
str = possible_abbreviation.lower() + str[pos:]
return str
@staticmethod
def camel_case_to_capitalized_with_underscores(str):
if len(str) == 0:
return str
output = Capitalizer.split_camel_case_(str)
return "_".join(output).upper()
@staticmethod
def split_camel_case_(str):
output = []
pos_being = 0
pos = 1
has_oneletter = False
while pos < len(str):
if str[pos].isupper():
output.append(str[pos_being:pos].upper())
if pos - pos_being == 1:
has_oneletter = True
pos_being = pos
pos += 1
output.append(str[pos_being:])
if has_oneletter:
array_pos = 0
while array_pos < len(output) - 1:
if len(output[array_pos]) == 1:
array_pos_end = array_pos + 1
while array_pos_end < len(output) and len(output[array_pos_end]) == 1:
array_pos_end += 1
if array_pos_end - array_pos > 1:
possible_abbreviation = "".join(output[array_pos:array_pos_end])
if possible_abbreviation.upper() in Capitalizer.ABBREVIATION:
output[array_pos:array_pos_end] = [possible_abbreviation]
else:
array_pos = array_pos_end - 1
array_pos += 1
return output
ABBREVIATION = frozenset(["XHR", "DOM", "CSS"])
VALIDATOR_IFDEF_NAME = "!ASSERT_DISABLED"
class DomainNameFixes:
@classmethod
def get_fixed_data(cls, domain_name):
field_name_res = Capitalizer.upper_camel_case_to_lower(domain_name) + "Agent"
class Res(object):
skip_js_bind = domain_name in cls.skip_js_bind_domains
agent_field_name = field_name_res
@staticmethod
def get_guard():
if domain_name in DOMAIN_DEFINE_NAME_MAP:
define_name = DOMAIN_DEFINE_NAME_MAP[domain_name]
class Guard:
@staticmethod
def generate_open(output):
output.append("#if ENABLE(%s)\n" % define_name)
@staticmethod
def generate_close(output):
output.append("#endif // ENABLE(%s)\n" % define_name)
return Guard
return Res
skip_js_bind_domains = set(["DOMDebugger"])
class RawTypes(object):
@staticmethod
def get(json_type):
if json_type == "boolean":
return RawTypes.Bool
elif json_type == "string":
return RawTypes.String
elif json_type == "array":
return RawTypes.Array
elif json_type == "object":
return RawTypes.Object
elif json_type == "integer":
return RawTypes.Int
elif json_type == "number":
return RawTypes.Number
elif json_type == "any":
return RawTypes.Any
else:
raise Exception("Unknown type: %s" % json_type)
# For output parameter all values are passed by pointer except RefPtr-based types.
class OutputPassModel:
class ByPointer:
@staticmethod
def get_argument_prefix():
return "&"
@staticmethod
def get_parameter_type_suffix():
return "*"
class ByReference:
@staticmethod
def get_argument_prefix():
return ""
@staticmethod
def get_parameter_type_suffix():
return "&"
class BaseType(object):
need_internal_runtime_cast_ = False
@classmethod
def request_raw_internal_runtime_cast(cls):
if not cls.need_internal_runtime_cast_:
cls.need_internal_runtime_cast_ = True
@classmethod
def get_raw_validator_call_text(cls):
return "RuntimeCastHelper::assertType<InspectorValue::Type%s>" % cls.get_validate_method_params().template_type
class String(BaseType):
@staticmethod
def get_getter_name():
return "String"
get_setter_name = get_getter_name
@staticmethod
def get_c_initializer():
return "\"\""
@staticmethod
def get_js_bind_type():
return "string"
@staticmethod
def get_validate_method_params():
class ValidateMethodParams:
template_type = "String"
return ValidateMethodParams
@staticmethod
def get_output_pass_model():
return RawTypes.OutputPassModel.ByPointer
@staticmethod
def is_heavy_value():
return True
@staticmethod
def get_array_item_raw_c_type_text():
return "String"
@staticmethod
def get_raw_type_model():
return TypeModel.String
class Int(BaseType):
@staticmethod
def get_getter_name():
return "Int"
@staticmethod
def get_setter_name():
return "Number"
@staticmethod
def get_c_initializer():
return "0"
@staticmethod
def get_js_bind_type():
return "number"
@classmethod
def get_raw_validator_call_text(cls):
return "RuntimeCastHelper::assertInt"
@staticmethod
def get_output_pass_model():
return RawTypes.OutputPassModel.ByPointer
@staticmethod
def is_heavy_value():
return False
@staticmethod
def get_array_item_raw_c_type_text():
return "int"
@staticmethod
def get_raw_type_model():
return TypeModel.Int
class Number(BaseType):
@staticmethod
def get_getter_name():
return "Double"
@staticmethod
def get_setter_name():
return "Number"
@staticmethod
def get_c_initializer():
return "0"
@staticmethod
def get_js_bind_type():
return "number"
@staticmethod
def get_validate_method_params():
class ValidateMethodParams:
template_type = "Number"
return ValidateMethodParams
@staticmethod
def get_output_pass_model():
return RawTypes.OutputPassModel.ByPointer
@staticmethod
def is_heavy_value():
return False
@staticmethod
def get_array_item_raw_c_type_text():
return "double"
@staticmethod
def get_raw_type_model():
return TypeModel.Number
class Bool(BaseType):
@staticmethod
def get_getter_name():
return "Boolean"
get_setter_name = get_getter_name
@staticmethod
def get_c_initializer():
return "false"
@staticmethod
def get_js_bind_type():
return "boolean"
@staticmethod
def get_validate_method_params():
class ValidateMethodParams:
template_type = "Boolean"
return ValidateMethodParams
@staticmethod
def get_output_pass_model():
return RawTypes.OutputPassModel.ByPointer
@staticmethod
def is_heavy_value():
return False
@staticmethod
def get_array_item_raw_c_type_text():
return "bool"
@staticmethod
def get_raw_type_model():
return TypeModel.Bool
class Object(BaseType):
@staticmethod
def get_getter_name():
return "Object"
@staticmethod
def get_setter_name():
return "Value"
@staticmethod
def get_c_initializer():
return "InspectorObject::create()"
@staticmethod
def get_js_bind_type():
return "object"
@staticmethod
def get_output_argument_prefix():
return ""
@staticmethod
def get_validate_method_params():
class ValidateMethodParams:
template_type = "Object"
return ValidateMethodParams
@staticmethod
def get_output_pass_model():
return RawTypes.OutputPassModel.ByReference
@staticmethod
def is_heavy_value():
return True
@staticmethod
def get_array_item_raw_c_type_text():
return "InspectorObject"
@staticmethod
def get_raw_type_model():
return TypeModel.Object
class Any(BaseType):
@staticmethod
def get_getter_name():
return "Value"
get_setter_name = get_getter_name
@staticmethod
def get_c_initializer():
raise Exception("Unsupported")
@staticmethod
def get_js_bind_type():
raise Exception("Unsupported")
@staticmethod
def get_raw_validator_call_text():
return "RuntimeCastHelper::assertAny"
@staticmethod
def get_output_pass_model():
return RawTypes.OutputPassModel.ByReference
@staticmethod
def is_heavy_value():
return True
@staticmethod
def get_array_item_raw_c_type_text():
return "InspectorValue"
@staticmethod
def get_raw_type_model():
return TypeModel.Any
class Array(BaseType):
@staticmethod
def get_getter_name():
return "Array"
@staticmethod
def get_setter_name():
return "Value"
@staticmethod
def get_c_initializer():
return "InspectorArray::create()"
@staticmethod
def get_js_bind_type():
return "object"
@staticmethod
def get_output_argument_prefix():
return ""
@staticmethod
def get_validate_method_params():
class ValidateMethodParams:
template_type = "Array"
return ValidateMethodParams
@staticmethod
def get_output_pass_model():
return RawTypes.OutputPassModel.ByReference
@staticmethod
def is_heavy_value():
return True
@staticmethod
def get_array_item_raw_c_type_text():
return "InspectorArray"
@staticmethod
def get_raw_type_model():
return TypeModel.Array
def replace_right_shift(input_str):
return input_str.replace(">>", "> >")
class CommandReturnPassModel:
class ByReference:
def __init__(self, var_type, set_condition):
self.var_type = var_type
self.set_condition = set_condition
def get_return_var_type(self):
return self.var_type
@staticmethod
def get_output_argument_prefix():
return ""
@staticmethod
def get_output_to_raw_expression():
return "%s"
def get_output_parameter_type(self):
return self.var_type + "&"
def get_set_return_condition(self):
return self.set_condition
class ByPointer:
def __init__(self, var_type):
self.var_type = var_type
def get_return_var_type(self):
return self.var_type
@staticmethod
def get_output_argument_prefix():
return "&"
@staticmethod
def get_output_to_raw_expression():
return "%s"
def get_output_parameter_type(self):
return self.var_type + "*"
@staticmethod
def get_set_return_condition():
return None
class OptOutput:
def __init__(self, var_type):
self.var_type = var_type
def get_return_var_type(self):
return "TypeBuilder::OptOutput<%s>" % self.var_type
@staticmethod
def get_output_argument_prefix():
return "&"
@staticmethod
def get_output_to_raw_expression():
return "%s.getValue()"
def get_output_parameter_type(self):
return "TypeBuilder::OptOutput<%s>*" % self.var_type
@staticmethod
def get_set_return_condition():
return "%s.isAssigned()"
class TypeModel:
class RefPtrBased(object):
def __init__(self, class_name):
self.class_name = class_name
self.optional = False
def get_optional(self):
result = TypeModel.RefPtrBased(self.class_name)
result.optional = True
return result
def get_command_return_pass_model(self):
if self.optional:
set_condition = "%s"
else:
set_condition = None
return CommandReturnPassModel.ByReference(replace_right_shift("RefPtr<%s>" % self.class_name), set_condition)
def get_input_param_type_text(self):
return replace_right_shift("PassRefPtr<%s>" % self.class_name)
@staticmethod
def get_event_setter_expression_pattern():
return "%s"
class Enum(object):
def __init__(self, base_type_name):
self.type_name = base_type_name + "::Enum"
def get_optional(base_self):
class EnumOptional:
@classmethod
def get_optional(cls):
return cls
@staticmethod
def get_command_return_pass_model():
return CommandReturnPassModel.OptOutput(base_self.type_name)
@staticmethod
def get_input_param_type_text():
return base_self.type_name + "*"
@staticmethod
def get_event_setter_expression_pattern():
raise Exception("TODO")
return EnumOptional
def get_command_return_pass_model(self):
return CommandReturnPassModel.ByPointer(self.type_name)
def get_input_param_type_text(self):
return self.type_name
@staticmethod
def get_event_setter_expression_pattern():
return "%s"
class ValueType(object):
def __init__(self, type_name, is_heavy):
self.type_name = type_name
self.is_heavy = is_heavy
def get_optional(self):
return self.ValueOptional(self)
def get_command_return_pass_model(self):
return CommandReturnPassModel.ByPointer(self.type_name)
def get_input_param_type_text(self):
if self.is_heavy:
return "const %s&" % self.type_name
else:
return self.type_name
def get_opt_output_type_(self):
return self.type_name
@staticmethod
def get_event_setter_expression_pattern():
return "%s"
class ValueOptional:
def __init__(self, base):
self.base = base
def get_optional(self):
return self
def get_command_return_pass_model(self):
return CommandReturnPassModel.OptOutput(self.base.get_opt_output_type_())
def get_input_param_type_text(self):
return "const %s* const" % self.base.type_name
@staticmethod
def get_event_setter_expression_pattern():
return "*%s"
class ExactlyInt(ValueType):
def __init__(self):
TypeModel.ValueType.__init__(self, "int", False)
def get_input_param_type_text(self):
return "TypeBuilder::ExactlyInt"
def get_opt_output_type_(self):
return "TypeBuilder::ExactlyInt"
@classmethod
def init_class(cls):
cls.Bool = cls.ValueType("bool", False)
if EXACTLY_INT_SUPPORTED:
cls.Int = cls.ExactlyInt()
else:
cls.Int = cls.ValueType("int", False)
cls.Number = cls.ValueType("double", False)
cls.String = cls.ValueType("String", True,)
cls.Object = cls.RefPtrBased("InspectorObject")
cls.Array = cls.RefPtrBased("InspectorArray")
cls.Any = cls.RefPtrBased("InspectorValue")
TypeModel.init_class()
# Collection of InspectorObject class methods that are likely to be overloaded in generated class.
# We must explicitly import all overloaded methods or they won't be available to user.
INSPECTOR_OBJECT_SETTER_NAMES = frozenset(["setValue", "setBoolean", "setNumber", "setString", "setValue", "setObject", "setArray"])
def fix_type_name(json_name):
if json_name in TYPE_NAME_FIX_MAP:
fixed = TYPE_NAME_FIX_MAP[json_name]
class Result(object):
class_name = fixed
@staticmethod
def output_comment(writer):
writer.newline("// Type originally was named '%s'.\n" % json_name)
else:
class Result(object):
class_name = json_name
@staticmethod
def output_comment(writer):
pass
return Result
class Writer:
def __init__(self, output, indent):
self.output = output
self.indent = indent
def newline(self, str):
if (self.indent):
self.output.append(self.indent)
self.output.append(str)
def append(self, str):
self.output.append(str)
def newline_multiline(self, str):
parts = str.split('\n')
self.newline(parts[0])
for p in parts[1:]:
self.output.append('\n')
if p:
self.newline(p)
def append_multiline(self, str):
parts = str.split('\n')
self.append(parts[0])
for p in parts[1:]:
self.output.append('\n')
if p:
self.newline(p)
def get_indent(self):
return self.indent
def get_indented(self, additional_indent):
return Writer(self.output, self.indent + additional_indent)
def insert_writer(self, additional_indent):
new_output = []
self.output.append(new_output)
return Writer(new_output, self.indent + additional_indent)
class EnumConstants:
map_ = {}
constants_ = []
@classmethod
def add_constant(cls, value):
if value in cls.map_:
return cls.map_[value]
else:
pos = len(cls.map_)
cls.map_[value] = pos
cls.constants_.append(value)
return pos
@classmethod
def get_enum_constant_code(cls):
output = []
for item in cls.constants_:
output.append(" \"" + item + "\"")
return ",\n".join(output) + "\n"
# Typebuilder code is generated in several passes: first typedefs, then other classes.
# Manual pass management is needed because we cannot have forward declarations for typedefs.
class TypeBuilderPass:
TYPEDEF = "typedef"
MAIN = "main"
class TypeBindings:
@staticmethod
def create_named_type_declaration(json_typable, context_domain_name, type_data):
json_type = type_data.get_json_type()
class Helper:
is_ad_hoc = False
full_name_prefix_for_use = "TypeBuilder::" + context_domain_name + "::"
full_name_prefix_for_impl = "TypeBuilder::" + context_domain_name + "::"
@staticmethod
def write_doc(writer):
if "description" in json_type:
writer.newline("/* ")
writer.append(json_type["description"])
writer.append(" */\n")
@staticmethod
def add_to_forward_listener(forward_listener):
forward_listener.add_type_data(type_data)
fixed_type_name = fix_type_name(json_type["id"])
return TypeBindings.create_type_declaration_(json_typable, context_domain_name, fixed_type_name, Helper)
@staticmethod
def create_ad_hoc_type_declaration(json_typable, context_domain_name, ad_hoc_type_context):
class Helper:
is_ad_hoc = True
full_name_prefix_for_use = ad_hoc_type_context.container_relative_name_prefix
full_name_prefix_for_impl = ad_hoc_type_context.container_full_name_prefix
@staticmethod
def write_doc(writer):
pass
@staticmethod
def add_to_forward_listener(forward_listener):
pass
fixed_type_name = ad_hoc_type_context.get_type_name_fix()
return TypeBindings.create_type_declaration_(json_typable, context_domain_name, fixed_type_name, Helper)
@staticmethod
def create_type_declaration_(json_typable, context_domain_name, fixed_type_name, helper):
if json_typable["type"] == "string":
if "enum" in json_typable:
class EnumBinding:
need_user_runtime_cast_ = False
need_internal_runtime_cast_ = False
@classmethod
def resolve_inner(cls, resolve_context):
pass
@classmethod
def request_user_runtime_cast(cls, request):
if request:
cls.need_user_runtime_cast_ = True
request.acknowledge()
@classmethod
def request_internal_runtime_cast(cls):
cls.need_internal_runtime_cast_ = True
@classmethod
def get_code_generator(enum_binding_cls):
#FIXME: generate ad-hoc enums too once we figure out how to better implement them in C++.
comment_out = helper.is_ad_hoc
class CodeGenerator:
@staticmethod
def generate_type_builder(writer, generate_context):
enum = json_typable["enum"]
helper.write_doc(writer)
enum_name = fixed_type_name.class_name
fixed_type_name.output_comment(writer)
writer.newline("struct ")
writer.append(enum_name)
writer.append(" {\n")
writer.newline(" enum Enum {\n")
for enum_item in enum:
enum_pos = EnumConstants.add_constant(enum_item)
item_c_name = fix_camel_case(enum_item)
if item_c_name in TYPE_NAME_FIX_MAP:
item_c_name = TYPE_NAME_FIX_MAP[item_c_name]
writer.newline(" ")
writer.append(item_c_name)
writer.append(" = ")
writer.append("%s" % enum_pos)
writer.append(",\n")
writer.newline(" };\n")
if enum_binding_cls.need_user_runtime_cast_:
raise Exception("Not yet implemented")
if enum_binding_cls.need_internal_runtime_cast_:
writer.append("#if %s\n" % VALIDATOR_IFDEF_NAME)
writer.newline(" static void assertCorrectValue(InspectorValue* value);\n")
writer.append("#endif // %s\n" % VALIDATOR_IFDEF_NAME)
validator_writer = generate_context.validator_writer
domain_fixes = DomainNameFixes.get_fixed_data(context_domain_name)
domain_guard = domain_fixes.get_guard()
if domain_guard:
domain_guard.generate_open(validator_writer)
validator_writer.newline("void %s%s::assertCorrectValue(InspectorValue* value)\n" % (helper.full_name_prefix_for_impl, enum_name))
validator_writer.newline("{\n")
validator_writer.newline(" WTF::String s;\n")
validator_writer.newline(" bool cast_res = value->asString(&s);\n")
validator_writer.newline(" ASSERT(cast_res);\n")
if len(enum) > 0:
condition_list = []
for enum_item in enum:
enum_pos = EnumConstants.add_constant(enum_item)
condition_list.append("s == \"%s\"" % enum_item)
validator_writer.newline(" ASSERT(%s);\n" % " || ".join(condition_list))
validator_writer.newline("}\n")
if domain_guard:
domain_guard.generate_close(validator_writer)
validator_writer.newline("\n\n")
writer.newline("}; // struct ")
writer.append(enum_name)
writer.append("\n\n")
@staticmethod
def register_use(forward_listener):
pass
@staticmethod
def get_generate_pass_id():
return TypeBuilderPass.MAIN
return CodeGenerator
@classmethod
def get_validator_call_text(cls):
return helper.full_name_prefix_for_use + fixed_type_name.class_name + "::assertCorrectValue"
@classmethod
def get_array_item_c_type_text(cls):
return helper.full_name_prefix_for_use + fixed_type_name.class_name + "::Enum"
@staticmethod
def get_setter_value_expression_pattern():
return "TypeBuilder::getEnumConstantValue(%s)"
@staticmethod
def reduce_to_raw_type():
return RawTypes.String
@staticmethod
def get_type_model():
return TypeModel.Enum(helper.full_name_prefix_for_use + fixed_type_name.class_name)
return EnumBinding
else:
if helper.is_ad_hoc:
class PlainString:
@classmethod
def resolve_inner(cls, resolve_context):
pass
@staticmethod
def request_user_runtime_cast(request):
raise Exception("Unsupported")
@staticmethod
def request_internal_runtime_cast():
pass
@staticmethod
def get_code_generator():
return None
@classmethod
def get_validator_call_text(cls):
return RawTypes.String.get_raw_validator_call_text()
@staticmethod
def reduce_to_raw_type():
return RawTypes.String
@staticmethod
def get_type_model():
return TypeModel.String
@staticmethod
def get_setter_value_expression_pattern():
return None
@classmethod
def get_array_item_c_type_text(cls):
return cls.reduce_to_raw_type().get_array_item_raw_c_type_text()
return PlainString
else:
class TypedefString:
@classmethod
def resolve_inner(cls, resolve_context):
pass
@staticmethod
def request_user_runtime_cast(request):
raise Exception("Unsupported")
@staticmethod
def request_internal_runtime_cast():
RawTypes.String.request_raw_internal_runtime_cast()
@staticmethod
def get_code_generator():
class CodeGenerator:
@staticmethod
def generate_type_builder(writer, generate_context):
helper.write_doc(writer)
fixed_type_name.output_comment(writer)
writer.newline("typedef String ")
writer.append(fixed_type_name.class_name)
writer.append(";\n\n")
@staticmethod
def register_use(forward_listener):
pass
@staticmethod
def get_generate_pass_id():
return TypeBuilderPass.TYPEDEF
return CodeGenerator
@classmethod
def get_validator_call_text(cls):
return RawTypes.String.get_raw_validator_call_text()
@staticmethod
def reduce_to_raw_type():
return RawTypes.String
@staticmethod
def get_type_model():
return TypeModel.ValueType("%s%s" % (helper.full_name_prefix_for_use, fixed_type_name.class_name), True)
@staticmethod
def get_setter_value_expression_pattern():
return None
@classmethod
def get_array_item_c_type_text(cls):
return "const %s%s&" % (helper.full_name_prefix_for_use, fixed_type_name.class_name)
return TypedefString
elif json_typable["type"] == "object":
if "properties" in json_typable:
class ClassBinding:
resolve_data_ = None
need_user_runtime_cast_ = False
need_internal_runtime_cast_ = False
@classmethod
def resolve_inner(cls, resolve_context):
if cls.resolve_data_:
return
properties = json_typable["properties"]
main = []
optional = []
ad_hoc_type_list = []
for prop in properties:
prop_name = prop["name"]
ad_hoc_type_context = cls.AdHocTypeContextImpl(prop_name, fixed_type_name.class_name, resolve_context, ad_hoc_type_list, helper.full_name_prefix_for_impl)
binding = resolve_param_type(prop, context_domain_name, ad_hoc_type_context)
code_generator = binding.get_code_generator()
if code_generator:
code_generator.register_use(resolve_context.forward_listener)
class PropertyData:
param_type_binding = binding
p = prop
if prop.get("optional"):
optional.append(PropertyData)
else:
main.append(PropertyData)
class ResolveData:
main_properties = main
optional_properties = optional
ad_hoc_types = ad_hoc_type_list
cls.resolve_data_ = ResolveData
for ad_hoc in ad_hoc_type_list:
ad_hoc.resolve_inner(resolve_context)
@classmethod
def request_user_runtime_cast(cls, request):
if not request:
return
cls.need_user_runtime_cast_ = True
request.acknowledge()
cls.request_internal_runtime_cast()
@classmethod
def request_internal_runtime_cast(cls):
if cls.need_internal_runtime_cast_:
return
cls.need_internal_runtime_cast_ = True
for p in cls.resolve_data_.main_properties:
p.param_type_binding.request_internal_runtime_cast()
for p in cls.resolve_data_.optional_properties:
p.param_type_binding.request_internal_runtime_cast()
@classmethod
def get_code_generator(class_binding_cls):
class CodeGenerator:
@classmethod
def generate_type_builder(cls, writer, generate_context):
resolve_data = class_binding_cls.resolve_data_
helper.write_doc(writer)
class_name = fixed_type_name.class_name
is_open_type = (context_domain_name + "." + class_name) in TYPES_WITH_OPEN_FIELD_LIST_SET
fixed_type_name.output_comment(writer)
writer.newline("class ")
writer.append(class_name)
writer.append(" : public ")
if is_open_type:
writer.append("InspectorObject")
else:
writer.append("InspectorObjectBase")
writer.append(" {\n")
writer.newline("public:\n")
ad_hoc_type_writer = writer.insert_writer(" ")
for ad_hoc_type in resolve_data.ad_hoc_types:
code_generator = ad_hoc_type.get_code_generator()
if code_generator:
code_generator.generate_type_builder(ad_hoc_type_writer, generate_context)
writer.newline_multiline(
""" enum {
NoFieldsSet = 0,
""")
state_enum_items = []
if len(resolve_data.main_properties) > 0:
pos = 0
for prop_data in resolve_data.main_properties:
item_name = Capitalizer.lower_camel_case_to_upper(prop_data.p["name"]) + "Set"
state_enum_items.append(item_name)
writer.newline(" %s = 1 << %s,\n" % (item_name, pos))
pos += 1
all_fields_set_value = "(" + (" | ".join(state_enum_items)) + ")"
else:
all_fields_set_value = "0"
writer.newline_multiline(CodeGeneratorInspectorStrings.class_binding_builder_part_1
% (all_fields_set_value, class_name, class_name))
pos = 0
for prop_data in resolve_data.main_properties:
prop_name = prop_data.p["name"]
param_type_binding = prop_data.param_type_binding
param_raw_type = param_type_binding.reduce_to_raw_type()
writer.newline_multiline(CodeGeneratorInspectorStrings.class_binding_builder_part_2
% (state_enum_items[pos],
Capitalizer.lower_camel_case_to_upper(prop_name),
param_type_binding.get_type_model().get_input_param_type_text(),
state_enum_items[pos], prop_name,
param_raw_type.get_setter_name(), prop_name,
format_setter_value_expression(param_type_binding, "value"),
state_enum_items[pos]))
pos += 1
writer.newline_multiline(CodeGeneratorInspectorStrings.class_binding_builder_part_3
% (class_name, class_name, class_name, class_name, class_name))
writer.newline(" /*\n")
writer.newline(" * Synthetic constructor:\n")
writer.newline(" * RefPtr<%s> result = %s::create()" % (class_name, class_name))
for prop_data in resolve_data.main_properties:
writer.append_multiline("\n * .set%s(...)" % Capitalizer.lower_camel_case_to_upper(prop_data.p["name"]))
writer.append_multiline(";\n */\n")
writer.newline_multiline(CodeGeneratorInspectorStrings.class_binding_builder_part_4)
writer.newline(" typedef TypeBuilder::StructItemTraits ItemTraits;\n")
for prop_data in resolve_data.optional_properties:
prop_name = prop_data.p["name"]
param_type_binding = prop_data.param_type_binding
setter_name = "set%s" % Capitalizer.lower_camel_case_to_upper(prop_name)
writer.append_multiline("\n void %s" % setter_name)
writer.append("(%s value)\n" % param_type_binding.get_type_model().get_input_param_type_text())
writer.newline(" {\n")
writer.newline(" this->set%s(\"%s\", %s);\n"
% (param_type_binding.reduce_to_raw_type().get_setter_name(), prop_data.p["name"],
format_setter_value_expression(param_type_binding, "value")))
writer.newline(" }\n")
if setter_name in INSPECTOR_OBJECT_SETTER_NAMES:
writer.newline(" using InspectorObjectBase::%s;\n\n" % setter_name)
if class_binding_cls.need_user_runtime_cast_:
writer.newline(" static PassRefPtr<%s> runtimeCast(PassRefPtr<InspectorValue> value)\n" % class_name)
writer.newline(" {\n")
writer.newline(" RefPtr<InspectorObject> object;\n")
writer.newline(" bool castRes = value->asObject(&object);\n")
writer.newline(" ASSERT_UNUSED(castRes, castRes);\n")
writer.append("#if %s\n" % VALIDATOR_IFDEF_NAME)
writer.newline(" assertCorrectValue(object.get());\n")
writer.append("#endif // %s\n" % VALIDATOR_IFDEF_NAME)
writer.newline(" COMPILE_ASSERT(sizeof(%s) == sizeof(InspectorObjectBase), type_cast_problem);\n" % class_name)
writer.newline(" return static_cast<%s*>(static_cast<InspectorObjectBase*>(object.get()));\n" % class_name)
writer.newline(" }\n")
writer.append("\n")
if class_binding_cls.need_internal_runtime_cast_:
writer.append("#if %s\n" % VALIDATOR_IFDEF_NAME)
writer.newline(" static void assertCorrectValue(InspectorValue* value);\n")
writer.append("#endif // %s\n" % VALIDATOR_IFDEF_NAME)
closed_field_set = (context_domain_name + "." + class_name) not in TYPES_WITH_OPEN_FIELD_LIST_SET
validator_writer = generate_context.validator_writer
domain_fixes = DomainNameFixes.get_fixed_data(context_domain_name)
domain_guard = domain_fixes.get_guard()
if domain_guard:
domain_guard.generate_open(validator_writer)
validator_writer.newline("void %s%s::assertCorrectValue(InspectorValue* value)\n" % (helper.full_name_prefix_for_impl, class_name))
validator_writer.newline("{\n")
validator_writer.newline(" RefPtr<InspectorObject> object;\n")
validator_writer.newline(" bool castRes = value->asObject(&object);\n")
validator_writer.newline(" ASSERT_UNUSED(castRes, castRes);\n")
for prop_data in resolve_data.main_properties:
validator_writer.newline(" {\n")
it_name = "%sPos" % prop_data.p["name"]
validator_writer.newline(" InspectorObject::iterator %s;\n" % it_name)
validator_writer.newline(" %s = object->find(\"%s\");\n" % (it_name, prop_data.p["name"]))
validator_writer.newline(" ASSERT(%s != object->end());\n" % it_name)
validator_writer.newline(" %s(%s->value.get());\n" % (prop_data.param_type_binding.get_validator_call_text(), it_name))
validator_writer.newline(" }\n")
if closed_field_set:
validator_writer.newline(" int foundPropertiesCount = %s;\n" % len(resolve_data.main_properties))
for prop_data in resolve_data.optional_properties:
validator_writer.newline(" {\n")
it_name = "%sPos" % prop_data.p["name"]
validator_writer.newline(" InspectorObject::iterator %s;\n" % it_name)
validator_writer.newline(" %s = object->find(\"%s\");\n" % (it_name, prop_data.p["name"]))
validator_writer.newline(" if (%s != object->end()) {\n" % it_name)
validator_writer.newline(" %s(%s->value.get());\n" % (prop_data.param_type_binding.get_validator_call_text(), it_name))
if closed_field_set:
validator_writer.newline(" ++foundPropertiesCount;\n")
validator_writer.newline(" }\n")
validator_writer.newline(" }\n")
if closed_field_set:
validator_writer.newline(" if (foundPropertiesCount != object->size()) {\n")
validator_writer.newline(" FATAL(\"Unexpected properties in object: %s\\n\", object->toJSONString().ascii().data());\n")
validator_writer.newline(" }\n")
validator_writer.newline("}\n")
if domain_guard:
domain_guard.generate_close(validator_writer)
validator_writer.newline("\n\n")
if is_open_type:
cpp_writer = generate_context.cpp_writer
writer.append("\n")
writer.newline(" // Property names for type generated as open.\n")
for prop_data in resolve_data.main_properties + resolve_data.optional_properties:
prop_name = prop_data.p["name"]
prop_field_name = Capitalizer.lower_camel_case_to_upper(prop_name)
writer.newline(" static const char* %s;\n" % (prop_field_name))
cpp_writer.newline("const char* %s%s::%s = \"%s\";\n" % (helper.full_name_prefix_for_impl, class_name, prop_field_name, prop_name))
writer.newline("};\n\n")
@staticmethod
def generate_forward_declaration(writer):
class_name = fixed_type_name.class_name
writer.newline("class ")
writer.append(class_name)
writer.append(";\n")
@staticmethod
def register_use(forward_listener):
helper.add_to_forward_listener(forward_listener)
@staticmethod
def get_generate_pass_id():
return TypeBuilderPass.MAIN
return CodeGenerator
@staticmethod
def get_validator_call_text():
return helper.full_name_prefix_for_use + fixed_type_name.class_name + "::assertCorrectValue"
@classmethod
def get_array_item_c_type_text(cls):
return helper.full_name_prefix_for_use + fixed_type_name.class_name
@staticmethod
def get_setter_value_expression_pattern():
return None
@staticmethod
def reduce_to_raw_type():
return RawTypes.Object
@staticmethod
def get_type_model():
return TypeModel.RefPtrBased(helper.full_name_prefix_for_use + fixed_type_name.class_name)
class AdHocTypeContextImpl:
def __init__(self, property_name, class_name, resolve_context, ad_hoc_type_list, parent_full_name_prefix):
self.property_name = property_name
self.class_name = class_name
self.resolve_context = resolve_context
self.ad_hoc_type_list = ad_hoc_type_list
self.container_full_name_prefix = parent_full_name_prefix + class_name + "::"
self.container_relative_name_prefix = ""
def get_type_name_fix(self):
class NameFix:
class_name = Capitalizer.lower_camel_case_to_upper(self.property_name)
@staticmethod
def output_comment(writer):
writer.newline("// Named after property name '%s' while generating %s.\n" % (self.property_name, self.class_name))
return NameFix
def add_type(self, binding):
self.ad_hoc_type_list.append(binding)
return ClassBinding
else:
class PlainObjectBinding:
@classmethod
def resolve_inner(cls, resolve_context):
pass
@staticmethod
def request_user_runtime_cast(request):
pass
@staticmethod
def request_internal_runtime_cast():
RawTypes.Object.request_raw_internal_runtime_cast()
@staticmethod
def get_code_generator():
pass
@staticmethod
def get_validator_call_text():
return "RuntimeCastHelper::assertType<InspectorValue::TypeObject>"
@classmethod
def get_array_item_c_type_text(cls):
return cls.reduce_to_raw_type().get_array_item_raw_c_type_text()
@staticmethod
def get_setter_value_expression_pattern():
return None
@staticmethod
def reduce_to_raw_type():
return RawTypes.Object
@staticmethod
def get_type_model():
return TypeModel.Object
return PlainObjectBinding
elif json_typable["type"] == "array":
if "items" in json_typable:
ad_hoc_types = []
class AdHocTypeContext:
container_full_name_prefix = "<not yet defined>"
container_relative_name_prefix = ""
@staticmethod
def get_type_name_fix():
return fixed_type_name
@staticmethod
def add_type(binding):
ad_hoc_types.append(binding)
item_binding = resolve_param_type(json_typable["items"], context_domain_name, AdHocTypeContext)
class ArrayBinding:
resolve_data_ = None
need_internal_runtime_cast_ = False
@classmethod
def resolve_inner(cls, resolve_context):
if cls.resolve_data_:
return
class ResolveData:
item_type_binding = item_binding
ad_hoc_type_list = ad_hoc_types
cls.resolve_data_ = ResolveData
for t in ad_hoc_types:
t.resolve_inner(resolve_context)
@classmethod
def request_user_runtime_cast(cls, request):
raise Exception("Not implemented yet")
@classmethod
def request_internal_runtime_cast(cls):
if cls.need_internal_runtime_cast_:
return
cls.need_internal_runtime_cast_ = True
cls.resolve_data_.item_type_binding.request_internal_runtime_cast()
@classmethod
def get_code_generator(array_binding_cls):
class CodeGenerator:
@staticmethod
def generate_type_builder(writer, generate_context):
ad_hoc_type_writer = writer
resolve_data = array_binding_cls.resolve_data_
for ad_hoc_type in resolve_data.ad_hoc_type_list:
code_generator = ad_hoc_type.get_code_generator()
if code_generator:
code_generator.generate_type_builder(ad_hoc_type_writer, generate_context)
@staticmethod
def generate_forward_declaration(writer):
pass
@staticmethod
def register_use(forward_listener):
item_code_generator = item_binding.get_code_generator()
if item_code_generator:
item_code_generator.register_use(forward_listener)
@staticmethod
def get_generate_pass_id():
return TypeBuilderPass.MAIN
return CodeGenerator
@classmethod
def get_validator_call_text(cls):
return cls.get_array_item_c_type_text() + "::assertCorrectValue"
@classmethod
def get_array_item_c_type_text(cls):
return replace_right_shift("TypeBuilder::Array<%s>" % cls.resolve_data_.item_type_binding.get_array_item_c_type_text())
@staticmethod
def get_setter_value_expression_pattern():
return None
@staticmethod
def reduce_to_raw_type():
return RawTypes.Array
@classmethod
def get_type_model(cls):
return TypeModel.RefPtrBased(cls.get_array_item_c_type_text())
return ArrayBinding
else:
# Fall-through to raw type.
pass
raw_type = RawTypes.get(json_typable["type"])
return RawTypeBinding(raw_type)
class RawTypeBinding:
def __init__(self, raw_type):
self.raw_type_ = raw_type
def resolve_inner(self, resolve_context):
pass
def request_user_runtime_cast(self, request):
raise Exception("Unsupported")
def request_internal_runtime_cast(self):
self.raw_type_.request_raw_internal_runtime_cast()
def get_code_generator(self):
return None
def get_validator_call_text(self):
return self.raw_type_.get_raw_validator_call_text()
def get_array_item_c_type_text(self):
return self.raw_type_.get_array_item_raw_c_type_text()
def get_setter_value_expression_pattern(self):
return None
def reduce_to_raw_type(self):
return self.raw_type_
def get_type_model(self):
return self.raw_type_.get_raw_type_model()
class TypeData(object):
def __init__(self, json_type, json_domain, domain_data):
self.json_type_ = json_type
self.json_domain_ = json_domain
self.domain_data_ = domain_data
if "type" not in json_type:
raise Exception("Unknown type")
json_type_name = json_type["type"]
raw_type = RawTypes.get(json_type_name)
self.raw_type_ = raw_type
self.binding_being_resolved_ = False
self.binding_ = None
def get_raw_type(self):
return self.raw_type_
def get_binding(self):
if not self.binding_:
if self.binding_being_resolved_:
raise Error("Type %s is already being resolved" % self.json_type_["type"])
# Resolve only lazily, because resolving one named type may require resolving some other named type.
self.binding_being_resolved_ = True
try:
self.binding_ = TypeBindings.create_named_type_declaration(self.json_type_, self.json_domain_["domain"], self)
finally:
self.binding_being_resolved_ = False
return self.binding_
def get_json_type(self):
return self.json_type_
def get_name(self):
return self.json_type_["id"]
def get_domain_name(self):
return self.json_domain_["domain"]
class DomainData:
def __init__(self, json_domain):
self.json_domain = json_domain
self.types_ = []
def add_type(self, type_data):
self.types_.append(type_data)
def name(self):
return self.json_domain["domain"]
def types(self):
return self.types_
class TypeMap:
def __init__(self, api):
self.map_ = {}
self.domains_ = []
for json_domain in api["domains"]:
domain_name = json_domain["domain"]
domain_map = {}
self.map_[domain_name] = domain_map
domain_data = DomainData(json_domain)
self.domains_.append(domain_data)
if "types" in json_domain:
for json_type in json_domain["types"]:
type_name = json_type["id"]
type_data = TypeData(json_type, json_domain, domain_data)
domain_map[type_name] = type_data
domain_data.add_type(type_data)
def domains(self):
return self.domains_
def get(self, domain_name, type_name):
return self.map_[domain_name][type_name]
def resolve_param_type(json_parameter, scope_domain_name, ad_hoc_type_context):
if "$ref" in json_parameter:
json_ref = json_parameter["$ref"]
type_data = get_ref_data(json_ref, scope_domain_name)
return type_data.get_binding()
elif "type" in json_parameter:
result = TypeBindings.create_ad_hoc_type_declaration(json_parameter, scope_domain_name, ad_hoc_type_context)
ad_hoc_type_context.add_type(result)
return result
else:
raise Exception("Unknown type")
def resolve_param_raw_type(json_parameter, scope_domain_name):
if "$ref" in json_parameter:
json_ref = json_parameter["$ref"]
type_data = get_ref_data(json_ref, scope_domain_name)
return type_data.get_raw_type()
elif "type" in json_parameter:
json_type = json_parameter["type"]
return RawTypes.get(json_type)
else:
raise Exception("Unknown type")
def get_ref_data(json_ref, scope_domain_name):
dot_pos = json_ref.find(".")
if dot_pos == -1:
domain_name = scope_domain_name
type_name = json_ref
else:
domain_name = json_ref[:dot_pos]
type_name = json_ref[dot_pos + 1:]
return type_map.get(domain_name, type_name)
input_file = open(input_json_filename, "r")
json_string = input_file.read()
json_api = json.loads(json_string)
class Templates:
def get_this_script_path_(absolute_path):
absolute_path = os.path.abspath(absolute_path)
components = []
def fill_recursive(path_part, depth):
if depth <= 0 or path_part == '/':
return
fill_recursive(os.path.dirname(path_part), depth - 1)
components.append(os.path.basename(path_part))
# Typical path is /Source/WebCore/inspector/CodeGeneratorInspector.py
# Let's take 4 components from the real path then.
fill_recursive(absolute_path, 4)
return "/".join(components)
file_header_ = ("// File is generated by %s\n\n" % get_this_script_path_(sys.argv[0]) +
"""// Copyright (c) 2011 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
""")
frontend_domain_class = string.Template(CodeGeneratorInspectorStrings.frontend_domain_class)
backend_method = string.Template(CodeGeneratorInspectorStrings.backend_method)
frontend_method = string.Template(CodeGeneratorInspectorStrings.frontend_method)
callback_method = string.Template(CodeGeneratorInspectorStrings.callback_method)
frontend_h = string.Template(file_header_ + CodeGeneratorInspectorStrings.frontend_h)
backend_h = string.Template(file_header_ + CodeGeneratorInspectorStrings.backend_h)
backend_cpp = string.Template(file_header_ + CodeGeneratorInspectorStrings.backend_cpp)
frontend_cpp = string.Template(file_header_ + CodeGeneratorInspectorStrings.frontend_cpp)
typebuilder_h = string.Template(file_header_ + CodeGeneratorInspectorStrings.typebuilder_h)
typebuilder_cpp = string.Template(file_header_ + CodeGeneratorInspectorStrings.typebuilder_cpp)
backend_js = string.Template(file_header_ + CodeGeneratorInspectorStrings.backend_js)
param_container_access_code = CodeGeneratorInspectorStrings.param_container_access_code
type_map = TypeMap(json_api)
class NeedRuntimeCastRequest:
def __init__(self):
self.ack_ = None
def acknowledge(self):
self.ack_ = True
def is_acknowledged(self):
return self.ack_
def resolve_all_types():
runtime_cast_generate_requests = {}
for type_name in TYPES_WITH_RUNTIME_CAST_SET:
runtime_cast_generate_requests[type_name] = NeedRuntimeCastRequest()
class ForwardListener:
type_data_set = set()
already_declared_set = set()
@classmethod
def add_type_data(cls, type_data):
if type_data not in cls.already_declared_set:
cls.type_data_set.add(type_data)
class ResolveContext:
forward_listener = ForwardListener
for domain_data in type_map.domains():
for type_data in domain_data.types():
# Do not generate forwards for this type any longer.
ForwardListener.already_declared_set.add(type_data)
binding = type_data.get_binding()
binding.resolve_inner(ResolveContext)
for domain_data in type_map.domains():
for type_data in domain_data.types():
full_type_name = "%s.%s" % (type_data.get_domain_name(), type_data.get_name())
request = runtime_cast_generate_requests.pop(full_type_name, None)
binding = type_data.get_binding()
if request:
binding.request_user_runtime_cast(request)
if request and not request.is_acknowledged():
raise Exception("Failed to generate runtimeCast in " + full_type_name)
if verification:
for full_type_name in runtime_cast_generate_requests:
raise Exception("Failed to generate runtimeCast. Type " + full_type_name + " not found")
return ForwardListener
global_forward_listener = resolve_all_types()
def get_annotated_type_text(raw_type, annotated_type):
if annotated_type != raw_type:
return "/*%s*/ %s" % (annotated_type, raw_type)
else:
return raw_type
def format_setter_value_expression(param_type_binding, value_ref):
pattern = param_type_binding.get_setter_value_expression_pattern()
if pattern:
return pattern % value_ref
else:
return value_ref
class Generator:
frontend_class_field_lines = []
frontend_domain_class_lines = []
method_name_enum_list = []
backend_method_declaration_list = []
backend_method_implementation_list = []
backend_method_name_declaration_list = []
method_handler_list = []
frontend_method_list = []
backend_js_domain_initializer_list = []
backend_virtual_setters_list = []
backend_agent_interface_list = []
backend_setters_list = []
backend_constructor_init_list = []
backend_field_list = []
frontend_constructor_init_list = []
type_builder_fragments = []
type_builder_forwards = []
validator_impl_list = []
type_builder_impl_list = []
@staticmethod
def go():
Generator.process_types(type_map)
first_cycle_guardable_list_list = [
Generator.backend_method_declaration_list,
Generator.backend_method_implementation_list,
Generator.backend_method_name_declaration_list,
Generator.backend_agent_interface_list,
Generator.frontend_class_field_lines,
Generator.frontend_constructor_init_list,
Generator.frontend_domain_class_lines,
Generator.frontend_method_list,
Generator.method_handler_list,
Generator.method_name_enum_list,
Generator.backend_constructor_init_list,
Generator.backend_virtual_setters_list,
Generator.backend_setters_list,
Generator.backend_field_list]
for json_domain in json_api["domains"]:
domain_name = json_domain["domain"]
domain_name_lower = domain_name.lower()
domain_fixes = DomainNameFixes.get_fixed_data(domain_name)
domain_guard = domain_fixes.get_guard()
if domain_guard:
for l in first_cycle_guardable_list_list:
domain_guard.generate_open(l)
agent_field_name = domain_fixes.agent_field_name
frontend_method_declaration_lines = []
Generator.backend_js_domain_initializer_list.append("// %s.\n" % domain_name)
if not domain_fixes.skip_js_bind:
Generator.backend_js_domain_initializer_list.append("InspectorBackend.register%sDispatcher = InspectorBackend.registerDomainDispatcher.bind(InspectorBackend, \"%s\");\n" % (domain_name, domain_name))
if "types" in json_domain:
for json_type in json_domain["types"]:
if "type" in json_type and json_type["type"] == "string" and "enum" in json_type:
enum_name = "%s.%s" % (domain_name, json_type["id"])
Generator.process_enum(json_type, enum_name)
elif json_type["type"] == "object":
if "properties" in json_type:
for json_property in json_type["properties"]:
if "type" in json_property and json_property["type"] == "string" and "enum" in json_property:
enum_name = "%s.%s%s" % (domain_name, json_type["id"], to_title_case(json_property["name"]))
Generator.process_enum(json_property, enum_name)
if "events" in json_domain:
for json_event in json_domain["events"]:
Generator.process_event(json_event, domain_name, frontend_method_declaration_lines)
Generator.frontend_class_field_lines.append(" %s m_%s;\n" % (domain_name, domain_name_lower))
if Generator.frontend_constructor_init_list:
Generator.frontend_constructor_init_list.append(" , ")
Generator.frontend_constructor_init_list.append("m_%s(inspectorFrontendChannel)\n" % domain_name_lower)
Generator.frontend_domain_class_lines.append(Templates.frontend_domain_class.substitute(None,
domainClassName=domain_name,
domainFieldName=domain_name_lower,
frontendDomainMethodDeclarations="".join(flatten_list(frontend_method_declaration_lines))))
agent_interface_name = Capitalizer.lower_camel_case_to_upper(domain_name) + "CommandHandler"
Generator.backend_agent_interface_list.append(" class %s {\n" % agent_interface_name)
Generator.backend_agent_interface_list.append(" public:\n")
if "commands" in json_domain:
for json_command in json_domain["commands"]:
Generator.process_command(json_command, domain_name, agent_field_name, agent_interface_name)
Generator.backend_agent_interface_list.append("\n protected:\n")
Generator.backend_agent_interface_list.append(" virtual ~%s() { }\n" % agent_interface_name)
Generator.backend_agent_interface_list.append(" };\n\n")
Generator.backend_constructor_init_list.append(" , m_%s(0)" % agent_field_name)
Generator.backend_virtual_setters_list.append(" virtual void registerAgent(%s* %s) = 0;" % (agent_interface_name, agent_field_name))
Generator.backend_setters_list.append(" virtual void registerAgent(%s* %s) { ASSERT(!m_%s); m_%s = %s; }" % (agent_interface_name, agent_field_name, agent_field_name, agent_field_name, agent_field_name))
Generator.backend_field_list.append(" %s* m_%s;" % (agent_interface_name, agent_field_name))
if domain_guard:
for l in reversed(first_cycle_guardable_list_list):
domain_guard.generate_close(l)
Generator.backend_js_domain_initializer_list.append("\n")
@staticmethod
def process_enum(json_enum, enum_name):
enum_members = []
for member in json_enum["enum"]:
enum_members.append("%s: \"%s\"" % (fix_camel_case(member), member))
Generator.backend_js_domain_initializer_list.append("InspectorBackend.registerEnum(\"%s\", {%s});\n" % (
enum_name, ", ".join(enum_members)))
@staticmethod
def process_event(json_event, domain_name, frontend_method_declaration_lines):
event_name = json_event["name"]
ad_hoc_type_output = []
frontend_method_declaration_lines.append(ad_hoc_type_output)
ad_hoc_type_writer = Writer(ad_hoc_type_output, " ")
decl_parameter_list = []
json_parameters = json_event.get("parameters")
Generator.generate_send_method(json_parameters, event_name, domain_name, ad_hoc_type_writer,
decl_parameter_list,
Generator.EventMethodStructTemplate,
Generator.frontend_method_list, Templates.frontend_method, {"eventName": event_name})
backend_js_event_param_list = []
if json_parameters:
for parameter in json_parameters:
parameter_name = parameter["name"]
backend_js_event_param_list.append("\"%s\"" % parameter_name)
frontend_method_declaration_lines.append(
" void %s(%s);\n" % (event_name, ", ".join(decl_parameter_list)))
Generator.backend_js_domain_initializer_list.append("InspectorBackend.registerEvent(\"%s.%s\", [%s]);\n" % (
domain_name, event_name, ", ".join(backend_js_event_param_list)))
class EventMethodStructTemplate:
@staticmethod
def append_prolog(line_list):
line_list.append(" RefPtr<InspectorObject> paramsObject = InspectorObject::create();\n")
@staticmethod
def append_epilog(line_list):
line_list.append(" jsonMessage->setObject(\"params\", paramsObject);\n")
container_name = "paramsObject"
@staticmethod
def process_command(json_command, domain_name, agent_field_name, agent_interface_name):
json_command_name = json_command["name"]
cmd_enum_name = "k%s_%sCmd" % (domain_name, json_command["name"])
Generator.method_name_enum_list.append(" %s," % cmd_enum_name)
Generator.method_handler_list.append(" &InspectorBackendDispatcherImpl::%s_%s," % (domain_name, json_command_name))
Generator.backend_method_declaration_list.append(" void %s_%s(long callId, InspectorObject* requestMessageObject);" % (domain_name, json_command_name))
ad_hoc_type_output = []
Generator.backend_agent_interface_list.append(ad_hoc_type_output)
ad_hoc_type_writer = Writer(ad_hoc_type_output, " ")
Generator.backend_agent_interface_list.append(" virtual void %s(ErrorString*" % json_command_name)
method_in_code = ""
method_out_code = ""
agent_call_param_list = []
response_cook_list = []
request_message_param = ""
js_parameters_text = ""
if "parameters" in json_command:
json_params = json_command["parameters"]
method_in_code += Templates.param_container_access_code
request_message_param = " requestMessageObject"
js_param_list = []
for json_parameter in json_params:
json_param_name = json_parameter["name"]
param_raw_type = resolve_param_raw_type(json_parameter, domain_name)
getter_name = param_raw_type.get_getter_name()
optional = json_parameter.get("optional")
non_optional_type_model = param_raw_type.get_raw_type_model()
if optional:
type_model = non_optional_type_model.get_optional()
else:
type_model = non_optional_type_model
if optional:
code = (" bool %s_valueFound = false;\n"
" %s in_%s = get%s(paramsContainerPtr, \"%s\", &%s_valueFound, protocolErrorsPtr);\n" %
(json_param_name, non_optional_type_model.get_command_return_pass_model().get_return_var_type(), json_param_name, getter_name, json_param_name, json_param_name))
param = ", %s_valueFound ? &in_%s : 0" % (json_param_name, json_param_name)
# FIXME: pass optional refptr-values as PassRefPtr
formal_param_type_pattern = "const %s*"<|fim▁hole|> (non_optional_type_model.get_command_return_pass_model().get_return_var_type(), json_param_name, getter_name, json_param_name))
param = ", in_%s" % json_param_name
# FIXME: pass not-optional refptr-values as NonNullPassRefPtr
if param_raw_type.is_heavy_value():
formal_param_type_pattern = "const %s&"
else:
formal_param_type_pattern = "%s"
method_in_code += code
agent_call_param_list.append(param)
Generator.backend_agent_interface_list.append(", %s in_%s" % (formal_param_type_pattern % non_optional_type_model.get_command_return_pass_model().get_return_var_type(), json_param_name))
js_bind_type = param_raw_type.get_js_bind_type()
js_param_text = "{\"name\": \"%s\", \"type\": \"%s\", \"optional\": %s}" % (
json_param_name,
js_bind_type,
("true" if ("optional" in json_parameter and json_parameter["optional"]) else "false"))
js_param_list.append(js_param_text)
js_parameters_text = ", ".join(js_param_list)
response_cook_text = ""
if json_command.get("async") == True:
callback_name = Capitalizer.lower_camel_case_to_upper(json_command_name) + "Callback"
callback_output = []
callback_writer = Writer(callback_output, ad_hoc_type_writer.get_indent())
decl_parameter_list = []
Generator.generate_send_method(json_command.get("returns"), json_command_name, domain_name, ad_hoc_type_writer,
decl_parameter_list,
Generator.CallbackMethodStructTemplate,
Generator.backend_method_implementation_list, Templates.callback_method,
{"callbackName": callback_name, "agentName": agent_interface_name})
callback_writer.newline("class " + callback_name + " : public CallbackBase {\n")
callback_writer.newline("public:\n")
callback_writer.newline(" " + callback_name + "(PassRefPtr<InspectorBackendDispatcherImpl>, int id);\n")
callback_writer.newline(" void sendSuccess(" + ", ".join(decl_parameter_list) + ");\n")
callback_writer.newline("};\n")
ad_hoc_type_output.append(callback_output)
method_out_code += " RefPtr<" + agent_interface_name + "::" + callback_name + "> callback = adoptRef(new " + agent_interface_name + "::" + callback_name + "(this, callId));\n"
agent_call_param_list.append(", callback")
response_cook_text += " if (!error.length()) \n"
response_cook_text += " return;\n"
response_cook_text += " callback->disable();\n"
Generator.backend_agent_interface_list.append(", PassRefPtr<%s> callback" % callback_name)
else:
if "returns" in json_command:
method_out_code += "\n"
for json_return in json_command["returns"]:
json_return_name = json_return["name"]
optional = bool(json_return.get("optional"))
return_type_binding = Generator.resolve_type_and_generate_ad_hoc(json_return, json_command_name, domain_name, ad_hoc_type_writer, agent_interface_name + "::")
raw_type = return_type_binding.reduce_to_raw_type()
setter_type = raw_type.get_setter_name()
initializer = raw_type.get_c_initializer()
type_model = return_type_binding.get_type_model()
if optional:
type_model = type_model.get_optional()
code = " %s out_%s;\n" % (type_model.get_command_return_pass_model().get_return_var_type(), json_return_name)
param = ", %sout_%s" % (type_model.get_command_return_pass_model().get_output_argument_prefix(), json_return_name)
var_name = "out_%s" % json_return_name
setter_argument = type_model.get_command_return_pass_model().get_output_to_raw_expression() % var_name
if return_type_binding.get_setter_value_expression_pattern():
setter_argument = return_type_binding.get_setter_value_expression_pattern() % setter_argument
cook = " result->set%s(\"%s\", %s);\n" % (setter_type, json_return_name,
setter_argument)
set_condition_pattern = type_model.get_command_return_pass_model().get_set_return_condition()
if set_condition_pattern:
cook = (" if (%s)\n " % (set_condition_pattern % var_name)) + cook
annotated_type = type_model.get_command_return_pass_model().get_output_parameter_type()
param_name = "out_%s" % json_return_name
if optional:
param_name = "opt_" + param_name
Generator.backend_agent_interface_list.append(", %s %s" % (annotated_type, param_name))
response_cook_list.append(cook)
method_out_code += code
agent_call_param_list.append(param)
response_cook_text = "".join(response_cook_list)
if len(response_cook_text) != 0:
response_cook_text = " if (!error.length()) {\n" + response_cook_text + " }"
backend_js_reply_param_list = []
if "returns" in json_command:
for json_return in json_command["returns"]:
json_return_name = json_return["name"]
backend_js_reply_param_list.append("\"%s\"" % json_return_name)
js_reply_list = "[%s]" % ", ".join(backend_js_reply_param_list)
Generator.backend_method_implementation_list.append(Templates.backend_method.substitute(None,
domainName=domain_name, methodName=json_command_name,
agentField="m_" + agent_field_name,
methodInCode=method_in_code,
methodOutCode=method_out_code,
agentCallParams="".join(agent_call_param_list),
requestMessageObject=request_message_param,
responseCook=response_cook_text,
commandNameIndex=cmd_enum_name))
Generator.backend_method_name_declaration_list.append(" \"%s.%s\"," % (domain_name, json_command_name))
Generator.backend_js_domain_initializer_list.append("InspectorBackend.registerCommand(\"%s.%s\", [%s], %s);\n" % (domain_name, json_command_name, js_parameters_text, js_reply_list))
Generator.backend_agent_interface_list.append(") = 0;\n")
class CallbackMethodStructTemplate:
@staticmethod
def append_prolog(line_list):
pass
@staticmethod
def append_epilog(line_list):
pass
container_name = "jsonMessage"
# Generates common code for event sending and callback response data sending.
@staticmethod
def generate_send_method(parameters, event_name, domain_name, ad_hoc_type_writer, decl_parameter_list,
method_struct_template,
generator_method_list, method_template, template_params):
method_line_list = []
if parameters:
method_struct_template.append_prolog(method_line_list)
for json_parameter in parameters:
parameter_name = json_parameter["name"]
param_type_binding = Generator.resolve_type_and_generate_ad_hoc(json_parameter, event_name, domain_name, ad_hoc_type_writer, "")
raw_type = param_type_binding.reduce_to_raw_type()
raw_type_binding = RawTypeBinding(raw_type)
optional = bool(json_parameter.get("optional"))
setter_type = raw_type.get_setter_name()
type_model = param_type_binding.get_type_model()
raw_type_model = raw_type_binding.get_type_model()
if optional:
type_model = type_model.get_optional()
raw_type_model = raw_type_model.get_optional()
annotated_type = type_model.get_input_param_type_text()
mode_type_binding = param_type_binding
decl_parameter_list.append("%s %s" % (annotated_type, parameter_name))
setter_argument = raw_type_model.get_event_setter_expression_pattern() % parameter_name
if mode_type_binding.get_setter_value_expression_pattern():
setter_argument = mode_type_binding.get_setter_value_expression_pattern() % setter_argument
setter_code = " %s->set%s(\"%s\", %s);\n" % (method_struct_template.container_name, setter_type, parameter_name, setter_argument)
if optional:
setter_code = (" if (%s)\n " % parameter_name) + setter_code
method_line_list.append(setter_code)
method_struct_template.append_epilog(method_line_list)
generator_method_list.append(method_template.substitute(None,
domainName=domain_name,
parameters=", ".join(decl_parameter_list),
code="".join(method_line_list), **template_params))
@staticmethod
def resolve_type_and_generate_ad_hoc(json_param, method_name, domain_name, ad_hoc_type_writer, container_relative_name_prefix_param):
param_name = json_param["name"]
ad_hoc_type_list = []
class AdHocTypeContext:
container_full_name_prefix = "<not yet defined>"
container_relative_name_prefix = container_relative_name_prefix_param
@staticmethod
def get_type_name_fix():
class NameFix:
class_name = Capitalizer.lower_camel_case_to_upper(param_name)
@staticmethod
def output_comment(writer):
writer.newline("// Named after parameter '%s' while generating command/event %s.\n" % (param_name, method_name))
return NameFix
@staticmethod
def add_type(binding):
ad_hoc_type_list.append(binding)
type_binding = resolve_param_type(json_param, domain_name, AdHocTypeContext)
class InterfaceForwardListener:
@staticmethod
def add_type_data(type_data):
pass
class InterfaceResolveContext:
forward_listener = InterfaceForwardListener
for type in ad_hoc_type_list:
type.resolve_inner(InterfaceResolveContext)
class InterfaceGenerateContext:
validator_writer = "not supported in InterfaceGenerateContext"
cpp_writer = validator_writer
for type in ad_hoc_type_list:
generator = type.get_code_generator()
if generator:
generator.generate_type_builder(ad_hoc_type_writer, InterfaceGenerateContext)
return type_binding
@staticmethod
def process_types(type_map):
output = Generator.type_builder_fragments
class GenerateContext:
validator_writer = Writer(Generator.validator_impl_list, "")
cpp_writer = Writer(Generator.type_builder_impl_list, "")
def generate_all_domains_code(out, type_data_callback):
writer = Writer(out, "")
for domain_data in type_map.domains():
domain_fixes = DomainNameFixes.get_fixed_data(domain_data.name())
domain_guard = domain_fixes.get_guard()
namespace_declared = []
def namespace_lazy_generator():
if not namespace_declared:
if domain_guard:
domain_guard.generate_open(out)
writer.newline("namespace ")
writer.append(domain_data.name())
writer.append(" {\n")
# What is a better way to change value from outer scope?
namespace_declared.append(True)
return writer
for type_data in domain_data.types():
type_data_callback(type_data, namespace_lazy_generator)
if namespace_declared:
writer.append("} // ")
writer.append(domain_data.name())
writer.append("\n\n")
if domain_guard:
domain_guard.generate_close(out)
def create_type_builder_caller(generate_pass_id):
def call_type_builder(type_data, writer_getter):
code_generator = type_data.get_binding().get_code_generator()
if code_generator and generate_pass_id == code_generator.get_generate_pass_id():
writer = writer_getter()
code_generator.generate_type_builder(writer, GenerateContext)
return call_type_builder
generate_all_domains_code(output, create_type_builder_caller(TypeBuilderPass.MAIN))
Generator.type_builder_forwards.append("// Forward declarations.\n")
def generate_forward_callback(type_data, writer_getter):
if type_data in global_forward_listener.type_data_set:
binding = type_data.get_binding()
binding.get_code_generator().generate_forward_declaration(writer_getter())
generate_all_domains_code(Generator.type_builder_forwards, generate_forward_callback)
Generator.type_builder_forwards.append("// End of forward declarations.\n\n")
Generator.type_builder_forwards.append("// Typedefs.\n")
generate_all_domains_code(Generator.type_builder_forwards, create_type_builder_caller(TypeBuilderPass.TYPEDEF))
Generator.type_builder_forwards.append("// End of typedefs.\n\n")
def flatten_list(input):
res = []
def fill_recursive(l):
for item in l:
if isinstance(item, list):
fill_recursive(item)
else:
res.append(item)
fill_recursive(input)
return res
# A writer that only updates file if it actually changed to better support incremental build.
class SmartOutput:
def __init__(self, file_name):
self.file_name_ = file_name
self.output_ = ""
def write(self, text):
self.output_ += text
def close(self):
text_changed = True
self.output_ = self.output_.rstrip() + "\n"
try:
read_file = open(self.file_name_, "r")
old_text = read_file.read()
read_file.close()
text_changed = old_text != self.output_
except:
# Ignore, just overwrite by default
pass
if text_changed or write_always:
out_file = open(self.file_name_, "w")
out_file.write(self.output_)
out_file.close()
Generator.go()
backend_h_file = SmartOutput(output_header_dirname + "/InspectorBackendDispatcher.h")
backend_cpp_file = SmartOutput(output_cpp_dirname + "/InspectorBackendDispatcher.cpp")
frontend_h_file = SmartOutput(output_header_dirname + "/InspectorFrontend.h")
frontend_cpp_file = SmartOutput(output_cpp_dirname + "/InspectorFrontend.cpp")
typebuilder_h_file = SmartOutput(output_header_dirname + "/InspectorTypeBuilder.h")
typebuilder_cpp_file = SmartOutput(output_cpp_dirname + "/InspectorTypeBuilder.cpp")
backend_js_file = SmartOutput(output_js_dirname + "/InspectorBackendCommands.js")
backend_h_file.write(Templates.backend_h.substitute(None,
virtualSetters="\n".join(Generator.backend_virtual_setters_list),
agentInterfaces="".join(flatten_list(Generator.backend_agent_interface_list)),
methodNamesEnumContent="\n".join(Generator.method_name_enum_list)))
backend_cpp_file.write(Templates.backend_cpp.substitute(None,
constructorInit="\n".join(Generator.backend_constructor_init_list),
setters="\n".join(Generator.backend_setters_list),
fieldDeclarations="\n".join(Generator.backend_field_list),
methodNameDeclarations="\n".join(Generator.backend_method_name_declaration_list),
methods="\n".join(Generator.backend_method_implementation_list),
methodDeclarations="\n".join(Generator.backend_method_declaration_list),
messageHandlers="\n".join(Generator.method_handler_list)))
frontend_h_file.write(Templates.frontend_h.substitute(None,
fieldDeclarations="".join(Generator.frontend_class_field_lines),
domainClassList="".join(Generator.frontend_domain_class_lines)))
frontend_cpp_file.write(Templates.frontend_cpp.substitute(None,
constructorInit="".join(Generator.frontend_constructor_init_list),
methods="\n".join(Generator.frontend_method_list)))
typebuilder_h_file.write(Templates.typebuilder_h.substitute(None,
typeBuilders="".join(flatten_list(Generator.type_builder_fragments)),
forwards="".join(Generator.type_builder_forwards),
validatorIfdefName=VALIDATOR_IFDEF_NAME))
typebuilder_cpp_file.write(Templates.typebuilder_cpp.substitute(None,
enumConstantValues=EnumConstants.get_enum_constant_code(),
implCode="".join(flatten_list(Generator.type_builder_impl_list)),
validatorCode="".join(flatten_list(Generator.validator_impl_list)),
validatorIfdefName=VALIDATOR_IFDEF_NAME))
backend_js_file.write(Templates.backend_js.substitute(None,
domainInitializers="".join(Generator.backend_js_domain_initializer_list)))
backend_h_file.close()
backend_cpp_file.close()
frontend_h_file.close()
frontend_cpp_file.close()
typebuilder_h_file.close()
typebuilder_cpp_file.close()
backend_js_file.close()<|fim▁end|> | else:
code = (" %s in_%s = get%s(paramsContainerPtr, \"%s\", 0, protocolErrorsPtr);\n" % |
<|file_name|>test_conf.py<|end_file_name|><|fim▁begin|>import sys
import os
from django.test import TestCase, override_settings, Client
from django.conf import settings<|fim▁hole|>from ..conf import (DatabaseUndefined, validate_database,
InaccessibleSettings, _load_py_file, load_py_settings,
load_colab_apps, load_widgets_settings)
from mock import patch
test_files_dir = "./colab/utils/tests"
class TestConf(TestCase):
@override_settings(DEBUG=False, DATABASES={
'default': {
'NAME': settings.DEFAULT_DATABASE,
},
})
def test_database_undefined(self):
with self.assertRaises(DatabaseUndefined):
validate_database(settings.DATABASES, settings.DEFAULT_DATABASE,
settings.DEBUG)
def test_load_py_file_with_io_error(self):
self.assertRaises(InaccessibleSettings,
_load_py_file, 'settings_test', '/etc/colab/')
def test_load_py_file_with_syntax_error(self):
with file('/tmp/settings_with_syntax_error.py', 'w') as temp_settings:
temp_settings.write('(')
self.assertRaises(InaccessibleSettings,
_load_py_file, 'settings_with_syntax_error', '/tmp')
def test_load_py_file(self):
py_settings = _load_py_file('colab_settings', test_files_dir)
self.assertIn('SOCIAL_NETWORK_ENABLED', py_settings)
self.assertTrue(py_settings['SOCIAL_NETWORK_ENABLED'])
self.assertIn('EMAIL_PORT', py_settings)
self.assertEquals(py_settings['EMAIL_PORT'], 25)
@patch('os.getenv', return_value='/path/fake/settings.py')
def test_load_py_settings_with_inaccessible_settings(self, mock):
self.assertRaises(InaccessibleSettings, load_py_settings)
def test_load_py_settings_without_settings_d(self):
COLAB_SETTINGS_DIR = ''
if 'COLAB_SETTINGS_DIR' in os.environ:
COLAB_SETTINGS_DIR = os.environ['COLAB_SETTINGS_DIR']
del os.environ['COLAB_SETTINGS_DIR']
py_settings = load_py_settings('/path/fake/settings.d/test.py')
self.assertIn('SOCIAL_NETWORK_ENABLED', py_settings)
self.assertTrue(py_settings['SOCIAL_NETWORK_ENABLED'])
self.assertIn('EMAIL_PORT', py_settings)
self.assertEquals(py_settings['EMAIL_PORT'], 25)
if COLAB_SETTINGS_DIR:
os.environ['COLAB_SETTINGS_DIR'] = COLAB_SETTINGS_DIR
@patch('os.listdir', return_value=[test_files_dir + '/settings.d/test.py',
'non_python_file'])
@patch('colab.utils.conf._load_py_file',
side_effect=[{'SOCIAL_NETWORK_ENABLED': True, 'EMAIL_PORT': 25},
{'TEST': 'test'}])
def test_load_py_settings_with_settings_d(self, mock_py, mock_listdir):
py_settings = load_py_settings(test_files_dir + '/settings.d/')
self.assertIn('SOCIAL_NETWORK_ENABLED', py_settings)
self.assertTrue(py_settings['SOCIAL_NETWORK_ENABLED'])
self.assertIn('EMAIL_PORT', py_settings)
self.assertEquals(py_settings['EMAIL_PORT'], 25)
self.assertIn('TEST', py_settings)
self.assertEquals(py_settings['TEST'], 'test')
@patch('os.getenv', return_value='/path/fake/plugins.d/')
def test_load_colab_apps_without_plugins_d_directory(self, mock):
colab_apps = load_colab_apps()
self.assertIn('COLAB_APPS', colab_apps)
self.assertEquals(colab_apps['COLAB_APPS'], {})
@patch('os.getenv', return_value=test_files_dir + '/plugins.d/')
def test_load_colab_apps_with_plugins_d_directory(self, os_getenv):
sys.path.insert(0, os_getenv.return_value)
colab_apps = load_colab_apps()
self.assertIn('gitlab', colab_apps['COLAB_APPS'])
self.assertIn('noosfero', colab_apps['COLAB_APPS'])
sys.path.remove(os_getenv.return_value)
self.assertNotIn(os_getenv.return_value, sys.path)
@patch('os.getenv', return_value='/path/fake/widgets_settings.py')
def test_load_widgets_settings_without_settings(self, mock):
self.assertIsNone(load_widgets_settings())
@patch('os.getenv', side_effect=[test_files_dir + '/colab_settings.py',
'/path/fake/widgets_settings.py'])
def test_load_widgets_settings_without_settings_d(self, mock):
self.assertIsNone(load_widgets_settings())
def test_blacklist(self):
client = Client()
response = client.get('/test_blacklist')
self.assertEquals(403, response.status_code)<|fim▁end|> | |
<|file_name|>exceptions.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import absolute_import
<|fim▁hole|>
class BadOption(Exception):
""" Incorrect HTTP API arguments """
pass
class RenderError(Exception):
""" Error rendering page """
pass
class InternalError(Exception):
""" Unhandled internal error """
pass
class GlobalTimeoutError(Exception):
""" Timeout exceeded rendering page """
pass
class UnsupportedContentType(Exception):
""" Request Content-Type is not supported """
pass
class ExpiredArguments(Exception):
""" Arguments stored with ``save_args`` are expired """
pass
class ScriptError(BadOption):
""" Error happened while executing Lua script """
LUA_INIT_ERROR = 'LUA_INIT_ERROR' # error happened before coroutine starts
LUA_ERROR = 'LUA_ERROR' # lua error() is called from the coroutine
LUA_CONVERT_ERROR = 'LUA_CONVERT_ERROR' # result can't be converted to Python
SPLASH_LUA_ERROR = 'SPLASH_LUA_ERROR' # custom error raised by Splash
BAD_MAIN_ERROR = 'BAD_MAIN_ERROR' # main() definition is incorrect
MAIN_NOT_FOUND_ERROR = 'MAIN_NOT_FOUND_ERROR' # main() is not found
SYNTAX_ERROR = 'SYNTAX_ERROR' # XXX: unused; reported as INIT_ERROR now
JS_ERROR = 'JS_ERROR' # error in a wrapped JS function
UNKNOWN_ERROR = 'UNKNOWN_ERROR'
class JsError(Exception):
""" Error occured in JavaScript code """
pass
class OneShotCallbackError(Exception):
""" A one shot callback was called more than once. """
pass<|fim▁end|> |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.