prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>classifiedReleaseOrder.model.js<|end_file_name|><|fim▁begin|>import mongoose, { Schema } from 'mongoose'
import auditingSchema from '../../core/base.model'
import mongoosePaginate from 'mongoose-paginate'
import uniqueValidator from 'mongoose-unique-validator'
const ClassifiedReleaseOrderSchema = new auditingSchema({
publicationId: {
type: Schema.ObjectId,
ref: 'Publication',
required: true,
},
releaseOrderNumber: {
type: String,
required: true,
unique: true,
},
specialInstructions: {
type: String,
},
dateOfCreation: {
type: Date,
},
})
ClassifiedReleaseOrderSchema.pre('validate', function(next) {
next()
})
ClassifiedReleaseOrderSchema.methods = {
/* Model Instance Methods come here */
}
/* Plug-ins */
ClassifiedReleaseOrderSchema.plugin(mongoosePaginate)
ClassifiedReleaseOrderSchema.plugin(uniqueValidator, {
message: 'Classified Release Order : "{VALUE}" already exists in system',
})
<|fim▁hole|> 'ClassifiedReleaseOrder',
ClassifiedReleaseOrderSchema
)<|fim▁end|> | export default mongoose.model( |
<|file_name|>core_view.js<|end_file_name|><|fim▁begin|>import {
ActionHandler,
Evented,
FrameworkObject,
deprecateUnderscoreActions
} from 'ember-runtime';
import { initViewElement } from '../system/utils';
import { cloneStates, states } from './states';
/**
`Ember.CoreView` is an abstract class that exists to give view-like behavior
to both Ember's main view class `Ember.Component` and other classes that don't need
the full functionality of `Ember.Component`.
<|fim▁hole|> in your applications.
@class CoreView
@namespace Ember
@extends Ember.Object
@deprecated Use `Ember.Component` instead.
@uses Ember.Evented
@uses Ember.ActionHandler
@private
*/
const CoreView = FrameworkObject.extend(Evented, ActionHandler, {
isView: true,
_states: cloneStates(states),
init() {
this._super(...arguments);
this._state = 'preRender';
this._currentState = this._states.preRender;
initViewElement(this);
if (!this.renderer) {
throw new Error(`Cannot instantiate a component without a renderer. Please ensure that you are creating ${this} with a proper container/registry.`);
}
},
/**
If the view is currently inserted into the DOM of a parent view, this
property will point to the parent of the view.
@property parentView
@type Ember.View
@default null
@private
*/
parentView: null,
instrumentDetails(hash) {
hash.object = this.toString();
hash.containerKey = this._debugContainerKey;
hash.view = this;
return hash;
},
/**
Override the default event firing from `Ember.Evented` to
also call methods with the given name.
@method trigger
@param name {String}
@private
*/
trigger(name, ...args) {
this._super(...arguments);
let method = this[name];
if (typeof method === 'function') {
return method.apply(this, args);
}
},
has(name) {
return typeof this[name] === 'function' || this._super(name);
}
});
deprecateUnderscoreActions(CoreView);
CoreView.reopenClass({
isViewFactory: true
});
export default CoreView;<|fim▁end|> | Unless you have specific needs for `CoreView`, you will use `Ember.Component` |
<|file_name|>wiki_model.py<|end_file_name|><|fim▁begin|>import numpy
from wiki_scraper import (
parse_html_simple,
crawl_page)
import pickle
from sklearn.feature_extraction.text import CountVectorizer
from sklearn.naive_bayes import MultinomialNB
import os.path
from parallel_webscrape import scrape_wikipedia
PATH = 'wikimodel/'
class WikiModel():
def __init__(self):
self.vocabulary = set()
self.stop_words = set()
self.english_words = set()
self.label_map = {}
self.reverse_label_map = {}
self.count_data = []
self.labels = []
self.vectorizer = None
self.classifier = None
self.load_training_data()
def load_training_data(self):<|fim▁hole|> english_words = set()
with open(PATH + "american-english.txt") as english_dictionary:
english_words = set(
word.strip().lower() for word in english_dictionary)
stop_words = set()
with open(PATH + "english_stopwords.txt") as stopwords:
stop_words = set(word.strip().lower() for word in stopwords)
self.english_words = english_words
self.stop_words = stop_words
if not os.path.isfile(PATH + 'categories.pickle'):
scrape_wikipedia()
categories = pickle.load(open(PATH + 'categories.pickle', 'rb'))
# parse the html, turning it into a list of words
# and removing stop words and non-dictionary words
# we'll also collect all of the words so that we can make a map of
# words to numbers
all_words = set()
# the category level
for k, v in categories.iteritems():
# the document level
for inner_k, inner_document in v.iteritems():
# parse the html to get lists of words per document
words = parse_html_simple(inner_document)
parsed = []
for word in words:
if word in english_words and word not in stop_words:
all_words.add(word)
parsed.append(word)
categories[k][inner_k] = parsed
# aggregate all of the documents into one big data set while
# transforming them into counts
self.vocabulary = set(all_words)
self.vectorizer = CountVectorizer(vocabulary=self.vocabulary)
count_data = []
string_data = []
labels = []
# the category level
for k, v in categories.iteritems():
# the document level
for inner_k, inner_document in v.iteritems():
# oops, we actually need this in string format
string_data.append(' '.join(inner_document))
labels.append(k)
# transform the string data into count data
count_data = self.vectorizer.transform(string_data).todense()
# transform count_data and babels into numpy arrays for easy indexing
count_data = numpy.array(count_data)
labels = numpy.array(labels).squeeze()
# make a map from the string label to a number and vice versa
self.label_map = {}
self.reverse_label_map = {}
i = 0
for label in sorted(set(labels)):
self.reverse_label_map[i] = label
self.label_map[label] = i
i += 1
# fit the model
self.classifier = MultinomialNB()
self.classifier.fit(count_data, labels)
def classify_url(self, domain, page, depth=0):
"""
Classify the documents after crawling them.
args:
domain - the domain part of the url
page - the other part of the url
depth - how deep to crawl
returns:
a list of predicted probabilities for each instance belonging to
each class
"""
# get the documents
documents, _ = crawl_page(domain, page, depth=0)
# parse the documents
string_data = []
for page, doc in documents.iteritems():
words = parse_html_simple(doc)
parsed = []
for word in words:
if (word in self.english_words
and word not in self.stop_words
and word in self.vocabulary):
parsed.append(word)
string_data.append(' '.join(parsed))
count_data = self.vectorizer.transform(string_data)
# classify the documents
probs = self.classifier.predict_proba(count_data)
return probs<|fim▁end|> | # make some dictionaries to preprocess the words |
<|file_name|>circ_id_complete.js<|end_file_name|><|fim▁begin|>/*
* This file is part of Invenio.
* Copyright (C) 2015 CERN.
*
* Invenio is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License as
* published by the Free Software Foundation; either version 2 of the
* License, or (at your option) any later version.
*
* Invenio is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Invenio; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
*/
define(
[
'jquery',
'js/other/awesomplete'
],
function($) {
$('.circ_id_complete').each(function(i, element) {
var awesomplete = new Awesomplete(element, {list: []});
var entity = $(element).data('entity');
var last_input = null;
function success(data) {
data = JSON.parse(data);<|fim▁hole|> res.push({label: val.value, value: val.id});
});
awesomplete.list = res;
awesomplete.evaluate();
}
$(element).on('input', function(event) {
var search = {entity: entity, search: event.target.value};
var ajax_query = {
type: "POST",
url: "/circulation/api/entity/search_autocomplete",
data: JSON.stringify(JSON.stringify(search)),
success: success,
contentType: 'application/json',
};
function run() {
var now = new Date();
if (now - last_input > 800) {
$.ajax(ajax_query);
}
}
last_input = new Date();
setTimeout(run, 1000);
});
});
});<|fim▁end|> | var res = [];
$(data).each(function(i, val) { |
<|file_name|>controlserver.py<|end_file_name|><|fim▁begin|>import logging
from queue import Queue
from gi.repository import GObject
from lib.commands import ControlServerCommands
from lib.tcpmulticonnection import TCPMultiConnection
from lib.response import NotifyResponse
class ControlServer(TCPMultiConnection):
def __init__(self, pipeline):
'''Initialize server and start listening.'''
self.log = logging.getLogger('ControlServer')
super().__init__(port=9999)
self.command_queue = Queue()
self.commands = ControlServerCommands(pipeline)
def on_accepted(self, conn, addr):
'''Asynchronous connection listener.
Starts a handler for each connection.'''
self.log.debug('setting gobject io-watch on connection')
GObject.io_add_watch(conn, GObject.IO_IN, self.on_data, [''])
def on_data(self, conn, _, leftovers, *args):
'''Asynchronous connection handler.
Pushes data from socket into command queue linewise'''
close_after = False
try:
while True:
try:
leftovers.append(conn.recv(4096).decode(errors='replace'))
if len(leftovers[-1]) == 0:
self.log.info("Socket was closed")
leftovers.pop()
close_after = True
break
except UnicodeDecodeError as e:
continue
except BlockingIOError:
pass
data = "".join(leftovers)
del leftovers[:]
lines = data.split('\n')<|fim▁hole|>
line = line.strip()
# 'quit' = remote wants us to close the connection
if line == 'quit' or line == 'exit':
self.log.info("Client asked us to close the Connection")
self.close_connection(conn)
return False
self.log.debug('re-starting on_loop scheduling')
GObject.idle_add(self.on_loop)
self.command_queue.put((line, conn))
if close_after:
self.close_connection(conn)
return False
if lines[-1] != '':
self.log.debug("remaining %r", lines[-1])
leftovers.append(lines[-1])
return True
def on_loop(self):
'''Command handler. Processes commands in the command queue whenever
nothing else is happening (registered as GObject idle callback)'''
self.log.debug('on_loop called')
if self.command_queue.empty():
self.log.debug('command_queue is empty again, '
'stopping on_loop scheduling')
return False
line, requestor = self.command_queue.get()
words = line.split()
if len(words) < 1:
self.log.debug('command_queue is empty again, '
'stopping on_loop scheduling')
return True
command = words[0]
args = words[1:]
self.log.info("processing command %r with args %s", command, args)
response = None
try:
# deny calling private methods
if command[0] == '_':
self.log.info('private methods are not callable')
raise KeyError()
command_function = self.commands.__class__.__dict__[command]
except KeyError as e:
self.log.info("received unknown command %s", command)
response = "error unknown command %s\n" % command
else:
try:
responseObject = command_function(self.commands, *args)
except Exception as e:
message = str(e) or "<no message>"
response = "error %s\n" % message
else:
if isinstance(responseObject, NotifyResponse):
responseObject = [responseObject]
if isinstance(responseObject, list):
for obj in responseObject:
signal = "%s\n" % str(obj)
for conn in self.currentConnections:
self._schedule_write(conn, signal)
else:
response = "%s\n" % str(responseObject)
finally:
if response is not None and requestor in self.currentConnections:
self._schedule_write(requestor, response)
return False
def _schedule_write(self, conn, message):
queue = self.currentConnections[conn]
self.log.debug('re-starting on_write[%u] scheduling', conn.fileno())
GObject.io_add_watch(conn, GObject.IO_OUT, self.on_write)
queue.put(message)
def on_write(self, conn, *args):
self.log.debug('on_write[%u] called', conn.fileno())
try:
queue = self.currentConnections[conn]
except KeyError:
return False
if queue.empty():
self.log.debug('write_queue[%u] is empty again, '
'stopping on_write scheduling',
conn.fileno())
return False
message = queue.get()
try:
conn.send(message.encode())
except Exception as e:
self.log.warning('failed to send message', exc_info=True)
return True<|fim▁end|> | for line in lines[:-1]:
self.log.debug("got line: %r", line) |
<|file_name|>generate_std_names.py<|end_file_name|><|fim▁begin|># (C) British Crown Copyright 2010 - 2014, Met Office
#
# This file is part of Iris.
#
# Iris is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Iris is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Iris. If not, see <http://www.gnu.org/licenses/>.
"""
A script to convert the standard names information from the provided XML
file into a Python dictionary format.
Takes two arguments: the first is the XML file to process and the second
is the name of the file to write the Python dictionary file into.
By default, Iris will use the source XML file:
etc/cf-standard-name-table.xml
as obtained from:
http://cf-pcmdi.llnl.gov/documents/cf-standard-names
"""
from __future__ import (absolute_import, division, print_function)<|fim▁hole|>import xml.etree.ElementTree as ET
STD_VALUES_FILE_TEMPLATE = '''
# (C) British Crown Copyright 2010 - 2014, Met Office
#
# This file is part of Iris.
#
# Iris is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Iris is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Iris. If not, see <http://www.gnu.org/licenses/>.
"""
This file contains a dictionary of standard value names that are mapped
to another dictionary of other standard name attributes. Currently only
the `canonical_unit` exists in these attribute dictionaries.
This file is automatically generated. Do not edit this file by hand.
The file will be generated during a standard build/installation:
python setup.py build
python setup.py install
Also, the file can be re-generated in the source distribution via:
python setup.py std_names
Or for more control (e.g. to use an alternative XML file) via:
python tools/generate_std_names.py XML_FILE MODULE_FILE
"""
from __future__ import (absolute_import, division, print_function)
STD_NAMES = '''.lstrip()
def process_name_table(tree, element_name, *child_elements):
"""
Yields a series of dictionaries with the key being the id of the entry element and the value containing
another dictionary mapping other attributes of the standard name to their values, e.g. units, description, grib value etc.
"""
for elem in tree.iterfind(element_name):
sub_section = {}
for child_elem in child_elements:
found_elem = elem.find(child_elem)
sub_section[child_elem] = found_elem.text if found_elem is not None else None
yield {elem.get("id") : sub_section}
def to_dict(infile, outfile):
values = {}
aliases = {}
tree = ET.parse(infile)
for section in process_name_table(tree, 'entry', 'canonical_units'):
values.update(section)
for section in process_name_table(tree, 'alias', 'entry_id'):
aliases.update(section)
for key, valued in aliases.iteritems():
values.update({
key : {'canonical_units' : values.get(valued['entry_id']).get('canonical_units')}
})
outfile.write(STD_VALUES_FILE_TEMPLATE + pprint.pformat(values))
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description='Create Python code from CF standard name XML.')
parser.add_argument('input', type=argparse.FileType(),
metavar='INPUT',
help='Path to CF standard name XML')
parser.add_argument('output', type=argparse.FileType('w'),
metavar='OUTPUT',
help='Path to resulting Python code')
args = parser.parse_args()
to_dict(args.input, args.output)<|fim▁end|> |
import argparse
import pprint |
<|file_name|>InterfaceWithLambda.java<|end_file_name|><|fim▁begin|>// Copyright 2016 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.android.desugar.testdata;
import com.google.common.collect.ImmutableList;
import java.util.List;
import java.util.stream.Collectors;
public interface InterfaceWithLambda {
String ZERO = String.valueOf(0);
List<String> DIGITS =<|fim▁hole|> ImmutableList.of(0, 1)
.stream()
.map(i -> i == 0 ? ZERO : String.valueOf(i))
.collect(Collectors.toList());
}<|fim▁end|> | |
<|file_name|>util.py<|end_file_name|><|fim▁begin|>import os
from glob import glob
from pyramid.path import AssetResolver
from reportlab.lib.units import mm
from reportlab.pdfgen import canvas
from ..models import generate_random_digest
__all__ = [
'generate_random_filename',
'delete_files',
'NumberedCanvas'
]
def generate_random_filename(path=None, extension='pdf'):
r = AssetResolver('erp')
path = path or r.resolve('static/temp').abspath()
if not os.path.exists(path):
os.mkdir(path)
filename = generate_random_digest()
return '/'.join([path, '{filename}.{extension}'.format(filename=filename, extension=extension)])
def delete_files(expression):
files = glob(expression)<|fim▁hole|>
class NumberedCanvas(canvas.Canvas):
def __init__(self, *args, **kwargs):
canvas.Canvas.__init__(self, *args, **kwargs)
self._saved_page_states = []
def showPage(self):
self._saved_page_states.append(dict(self.__dict__))
self._startPage()
def save(self):
"""add page info to each page (page x of y)"""
num_pages = len(self._saved_page_states)
for state in self._saved_page_states:
self.__dict__.update(state)
self.draw_page_number(num_pages)
canvas.Canvas.showPage(self)
canvas.Canvas.save(self)
def draw_page_number(self, page_count):
self.setFont("Helvetica", 7)
self.drawRightString(200*mm, 10*mm,
"Page %d of %d" % (self._pageNumber, page_count))<|fim▁end|> | if files:
os.remove(*files)
|
<|file_name|>interfaces.py<|end_file_name|><|fim▁begin|>#
# -*- coding: utf-8 -*-
# Copyright 2019 Red Hat
# GNU General Public License v3.0+
# (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#############################################<|fim▁hole|># WARNING #
#############################################
#
# This file is auto generated by the resource
# module builder playbook.
#
# Do not edit this file manually.
#
# Changes to this file will be over written
# by the resource module builder.
#
# Changes should be made in the model used to
# generate this file or in the resource module
# builder template.
#
#############################################
"""
The arg spec for the nxos_interfaces module
"""
from __future__ import absolute_import, division, print_function
__metaclass__ = type
class InterfacesArgs(object): # pylint: disable=R0903
"""The arg spec for the nxos_interfaces module
"""
def __init__(self, **kwargs):
pass
argument_spec = {
'config': {
'elements': 'dict',
'options': {
'description': {
'type': 'str'
},
'duplex': {
'choices': ['full', 'half', 'auto'],
'type': 'str'
},
'enabled': {
'default': True,
'type': 'bool'
},
'fabric_forwarding_anycast_gateway': {
'type': 'bool'
},
'ip_forward': {
'type': 'bool'
},
'mode': {
'choices': ['layer2', 'layer3'],
'type': 'str'
},
'mtu': {
'type': 'str'
},
'name': {
'required': True,
'type': 'str'
},
'speed': {
'type': 'str'
}
},
'type': 'list'
},
'state': {
'choices': ['merged', 'replaced', 'overridden', 'deleted'],
'default': 'merged',
'type': 'str'
}
} # pylint: disable=C0301<|fim▁end|> | |
<|file_name|>process_test.go<|end_file_name|><|fim▁begin|>package gophpfpm_test
import (
"os"
"path"
"testing"
"time"
"github.com/yookoala/gophpfpm"
)
var username, basepath, pathToPhpFpm string
func init() {
var err error<|fim▁hole|> basepath = path.Join(basepath, "_test")
// defined in environment
pathToPhpFpm = "/usr/sbin/php5-fpm"
if envPath := os.Getenv("PHPFPM_PATH"); envPath != "" {
pathToPhpFpm = envPath
}
username = os.Getenv("USER")
}
func TestNew(t *testing.T) {
path := pathToPhpFpm
process := gophpfpm.NewProcess(path)
if want, have := path, process.Exec; want != have {
t.Errorf("expected %#v, got %#v", want, have)
}
}
func TestProcess_SetPrefix(t *testing.T) {
path := pathToPhpFpm
process := gophpfpm.NewProcess(path)
process.SetDatadir(basepath + "/var")
if want, have := basepath+"/var/phpfpm.pid", process.PidFile; want != have {
t.Errorf("expected %#v, got %#v", want, have)
}
if want, have := basepath+"/var/phpfpm.error_log", process.ErrorLog; want != have {
t.Errorf("expected %#v, got %#v", want, have)
}
if want, have := basepath+"/var/phpfpm.sock", process.Listen; want != have {
t.Errorf("expected %#v, got %#v", want, have)
}
}
func TestProcess_Address(t *testing.T) {
var network, address string
process := &gophpfpm.Process{}
process.Listen = "192.168.123.456:12345"
network, address = process.Address()
if want, have := "tcp", network; want != have {
t.Errorf("expected %#v; got %#v", want, have)
}
if want, have := "192.168.123.456:12345", address; want != have {
t.Errorf("expected %#v; got %#v", want, have)
}
process.Listen = "12345"
network, address = process.Address()
if want, have := "tcp", network; want != have {
t.Errorf("expected %#v; got %#v", want, have)
}
if want, have := ":12345", address; want != have {
t.Errorf("expected %#v; got %#v", want, have)
}
process.Listen = "hello.sock"
network, address = process.Address()
if want, have := "unix", network; want != have {
t.Errorf("expected %#v; got %#v", want, have)
}
if want, have := "hello.sock", address; want != have {
t.Errorf("expected %#v; got %#v", want, have)
}
process.Listen = "/path/to/hello.sock"
network, address = process.Address()
if want, have := "unix", network; want != have {
t.Errorf("expected %#v; got %#v", want, have)
}
if want, have := "/path/to/hello.sock", address; want != have {
t.Errorf("expected %#v; got %#v", want, have)
}
}
func TestProcess_StartStop(t *testing.T) {
path := pathToPhpFpm
process := gophpfpm.NewProcess(path)
process.SetDatadir(basepath + "/var")
process.User = username
process.SaveConfig(basepath + "/etc/test.startstop.conf")
if err := process.Start(); err != nil {
t.Errorf("unexpected error: %s", err.Error())
return
}
go func() {
// do something that needs phpfpm
// ...
time.Sleep(time.Millisecond * 50)
if err := process.Stop(); err != nil {
panic(err)
}
}()
if err := process.Wait(); err != nil {
t.Errorf("unexpected error: %#v", err.Error())
}
}
func ExampleProcess() {
process := gophpfpm.NewProcess(pathToPhpFpm)
// SetDatadir equals to running these 3 settings:
// process.PidFile = basepath + "/phpfpm.pid"
// process.ErrorLog = basepath + "/phpfpm.error_log"
// process.Listen = basepath + "/phpfpm.sock"
process.SetDatadir(basepath + "/var")
process.User = username
// save the config file to basepath + "/etc/php-fpm.conf"
process.SaveConfig(basepath + "/etc/example.conf")
process.Start()
go func() {
// do something that needs phpfpm
// ...
time.Sleep(time.Millisecond * 50)
process.Stop()
}()
process.Wait()
// Output:
}<|fim▁end|> | basepath, err = os.Getwd()
if err != nil {
panic(err)
} |
<|file_name|>version.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | __version__ = "master" |
<|file_name|>input.rs<|end_file_name|><|fim▁begin|>use super::errors::{Error, Result};
use super::system::System;
use super::window::Window;
use crate::internal_derive::DependenciesFrom;
use glium::glutin::event::{
DeviceEvent, ElementState, Event, KeyboardInput, StartCause, VirtualKeyCode, WindowEvent,
};
use math::Vec2f;
use num_traits::Zero;
use std::vec::Vec;
pub use glium::glutin::event::{MouseButton, VirtualKeyCode as Scancode};
pub type Sensitivity = f32;
pub enum Gesture {
NoGesture,
KeyHold(VirtualKeyCode),
KeyTrigger(VirtualKeyCode),
ButtonHold(MouseButton),
ButtonTrigger(MouseButton),
AnyOf(Vec<Gesture>),
AllOf(Vec<Gesture>),
QuitTrigger,
}
pub enum Analog2d {
NoAnalog2d,
Mouse {
sensitivity: Sensitivity,
},
Gestures {
x_positive: Gesture,
x_negative: Gesture,
y_positive: Gesture,
y_negative: Gesture,
step: Sensitivity,
},
Sum {
analogs: Vec<Analog2d>,
},
}
impl Input {
pub(crate) fn reset(&mut self) {
self.current_update_index += 1;
self.mouse_rel = Vec2f::zero();
}
pub(crate) fn handle_event(&mut self, event: Event<'_, ()>) -> bool {
match event {
Event::NewEvents(StartCause::WaitCancelled { .. }) => {}
Event::NewEvents(StartCause::ResumeTimeReached {
requested_resume, ..
}) if requested_resume > std::time::Instant::now() => {}
Event::NewEvents(_) => {
self.new_step = true;
}
Event::MainEventsCleared => {
let new_step = self.new_step;
self.new_step = false;
return new_step;
}
Event::WindowEvent {
event: WindowEvent::CloseRequested,
..
} => {
self.quit_requested_index = self.current_update_index;
}
Event::WindowEvent {
event:
WindowEvent::KeyboardInput {
input:
KeyboardInput {
state,
virtual_keycode: Some(virtual_keycode),
..
},
..
},
..
} => {
self.keyboard_state[virtual_keycode as usize] = match state {
ElementState::Pressed => ButtonState::Down(self.current_update_index),
ElementState::Released => ButtonState::Up(self.current_update_index),
}
}
Event::DeviceEvent {
event: DeviceEvent::Motion { axis, value },
..
} => {
if self.mouse_enabled && axis < 2 {
self.mouse_rel[axis as usize] += value as f32;
}
}
Event::DeviceEvent {
event: DeviceEvent::Button { button, state },
..
} => {
let button = button as usize;
if self.mouse_enabled && button < NUM_MOUSE_BUTTONS {
self.mouse_button_state[button] = match state {
ElementState::Pressed => ButtonState::Down(self.current_update_index),
ElementState::Released => ButtonState::Up(self.current_update_index),
}
}
}
_ => {}
}
false
}
pub fn set_cursor_grabbed(&mut self, grabbed: bool) {
self.new_mouse_grabbed = grabbed
}
pub fn set_mouse_enabled(&mut self, enable: bool) {
self.mouse_enabled = enable;
}
pub fn poll_gesture(&self, gesture: &Gesture) -> bool {
match *gesture {
Gesture::QuitTrigger => self.quit_requested_index == self.current_update_index,
Gesture::KeyHold(code) => match self.keyboard_state[code as usize] {
ButtonState::Down(_) => true,
ButtonState::Up(_) => false,
},
Gesture::KeyTrigger(code) => match self.keyboard_state[code as usize] {
ButtonState::Down(index) => self.current_update_index == index,
ButtonState::Up(_) => false,
},
Gesture::ButtonHold(button) => {
match self.mouse_button_state[mouse_button_to_index(button)] {
ButtonState::Down(_) => true,
ButtonState::Up(_) => false,
}
}
Gesture::ButtonTrigger(button) => {
match self.mouse_button_state[mouse_button_to_index(button)] {
ButtonState::Down(index) => self.current_update_index == index,
ButtonState::Up(_) => false,
}
}
Gesture::AnyOf(ref subgestures) => subgestures
.iter()
.any(|subgesture| self.poll_gesture(subgesture)),
Gesture::AllOf(ref subgestures) => subgestures
.iter()
.all(|subgesture| self.poll_gesture(subgesture)),
Gesture::NoGesture => false,
}
}
pub fn poll_analog2d(&self, motion: &Analog2d) -> Vec2f {
match *motion {
Analog2d::Sum { ref analogs } => analogs
.iter()
.map(|analog| self.poll_analog2d(analog))
.fold(Vec2f::zero(), |x, y| x + y),
Analog2d::Mouse { sensitivity } => self.mouse_rel * sensitivity,
Analog2d::Gestures {
ref x_positive,
ref x_negative,
ref y_positive,
ref y_negative,
step,
} => Vec2f::new(
if self.poll_gesture(x_positive) {
step
} else if self.poll_gesture(x_negative) {
-step
} else {
0.0
},
if self.poll_gesture(y_positive) {
step
} else if self.poll_gesture(y_negative) {
-step
} else {
0.0
},
),
Analog2d::NoAnalog2d => Vec2f::zero(),
}
}
}
#[derive(DependenciesFrom)]
pub struct Dependencies<'context> {
window: &'context mut Window,<|fim▁hole|>
pub struct Input {
current_update_index: UpdateIndex,
keyboard_state: [ButtonState; NUM_SCAN_CODES],
mouse_button_state: [ButtonState; NUM_MOUSE_BUTTONS],
quit_requested_index: UpdateIndex,
new_step: bool,
mouse_enabled: bool,
mouse_grabbed: bool,
new_mouse_grabbed: bool,
mouse_rel: Vec2f,
}
impl<'context> System<'context> for Input {
type Dependencies = Dependencies<'context>;
type Error = Error;
fn create(_deps: Dependencies) -> Result<Self> {
Ok(Input {
current_update_index: 1,
keyboard_state: [ButtonState::Up(0); NUM_SCAN_CODES],
mouse_button_state: [ButtonState::Up(0); NUM_MOUSE_BUTTONS],
quit_requested_index: 0,
new_step: false,
mouse_enabled: true,
new_mouse_grabbed: true,
mouse_grabbed: false,
mouse_rel: Vec2f::zero(),
})
}
fn debug_name() -> &'static str {
"input"
}
fn update(&mut self, deps: Dependencies) -> Result<()> {
if self.new_mouse_grabbed != self.mouse_grabbed {
self.mouse_grabbed = self.new_mouse_grabbed;
deps.window
.facade()
.gl_window()
.window()
.set_cursor_grab(self.mouse_grabbed)
.ok();
deps.window
.facade()
.gl_window()
.window()
.set_cursor_visible(!self.mouse_grabbed);
}
if self.mouse_grabbed {
let _ = deps.window.facade().gl_window().window();
}
Ok(())
}
}
const NUM_SCAN_CODES: usize = 512;
const NUM_MOUSE_BUTTONS: usize = 256;
type UpdateIndex = u32;
#[derive(Copy, Clone)]
enum ButtonState {
Up(UpdateIndex),
Down(UpdateIndex),
}
fn mouse_button_to_index(button: MouseButton) -> usize {
match button {
MouseButton::Left => 1,
MouseButton::Middle => 2,
MouseButton::Right => 3,
MouseButton::Other(index) => ((index + 4) as usize).min(NUM_MOUSE_BUTTONS - 1),
}
}<|fim▁end|> | } |
<|file_name|>multiples_of_3_and_5.py<|end_file_name|><|fim▁begin|>#Kunal Gautam
#Codewars : @Kunalpod
#Problem name: Multiples of 3 and 5<|fim▁hole|>def solution(number):
return sum([x for x in range(3,number) if x%3==0 or x%5==0])<|fim▁end|> | #Problem level: 6 kyu
|
<|file_name|>webpack.config.js<|end_file_name|><|fim▁begin|>// webpack.config.js
module.exports = {
module: {<|fim▁hole|> use: [
'vue-style-loader',
{
loader: 'css-loader',
options: { importLoaders: 1 }
},
'postcss-loader'
]
}
]
}
}<|fim▁end|> | rules: [
{
test: /\.css$/, |
<|file_name|>issue-32995.rs<|end_file_name|><|fim▁begin|>// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.<|fim▁hole|>
fn main() {
let x: usize() = 1;
//~^ ERROR parenthesized parameters may only be used with a trait
//~| WARN previously accepted
let b: ::std::boxed()::Box<_> = Box::new(1);
//~^ ERROR parenthesized parameters may only be used with a trait
//~| WARN previously accepted
let p = ::std::str::()::from_utf8(b"foo").unwrap();
//~^ ERROR parenthesized parameters may only be used with a trait
//~| WARN previously accepted
let p = ::std::str::from_utf8::()(b"foo").unwrap();
//~^ ERROR parenthesized parameters may only be used with a trait
//~| WARN previously accepted
let o : Box<::std::marker()::Send> = Box::new(1);
//~^ ERROR parenthesized parameters may only be used with a trait
//~| WARN previously accepted
let o : Box<Send + ::std::marker()::Sync> = Box::new(1);
//~^ ERROR parenthesized parameters may only be used with a trait
//~| WARN previously accepted
}
fn foo<X:Default>() {
let d : X() = Default::default();
//~^ ERROR parenthesized parameters may only be used with a trait
//~| WARN previously accepted
}<|fim▁end|> |
#![allow(unused)] |
<|file_name|>event_service_config.pb.go<|end_file_name|><|fim▁begin|>// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
// protoc-gen-go v1.25.0
// protoc v3.16.0
// source: envoy/api/v2/core/event_service_config.proto
package envoy_api_v2_core<|fim▁hole|> proto "github.com/golang/protobuf/proto"
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
reflect "reflect"
sync "sync"
)
const (
// Verify that this generated code is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion)
// Verify that runtime/protoimpl is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20)
)
// This is a compile-time assertion that a sufficiently up-to-date version
// of the legacy proto package is being used.
const _ = proto.ProtoPackageIsVersion4
// [#not-implemented-hide:]
// Configuration of the event reporting service endpoint.
type EventServiceConfig struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
// Types that are assignable to ConfigSourceSpecifier:
// *EventServiceConfig_GrpcService
ConfigSourceSpecifier isEventServiceConfig_ConfigSourceSpecifier `protobuf_oneof:"config_source_specifier"`
}
func (x *EventServiceConfig) Reset() {
*x = EventServiceConfig{}
if protoimpl.UnsafeEnabled {
mi := &file_envoy_api_v2_core_event_service_config_proto_msgTypes[0]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *EventServiceConfig) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*EventServiceConfig) ProtoMessage() {}
func (x *EventServiceConfig) ProtoReflect() protoreflect.Message {
mi := &file_envoy_api_v2_core_event_service_config_proto_msgTypes[0]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use EventServiceConfig.ProtoReflect.Descriptor instead.
func (*EventServiceConfig) Descriptor() ([]byte, []int) {
return file_envoy_api_v2_core_event_service_config_proto_rawDescGZIP(), []int{0}
}
func (m *EventServiceConfig) GetConfigSourceSpecifier() isEventServiceConfig_ConfigSourceSpecifier {
if m != nil {
return m.ConfigSourceSpecifier
}
return nil
}
func (x *EventServiceConfig) GetGrpcService() *GrpcService {
if x, ok := x.GetConfigSourceSpecifier().(*EventServiceConfig_GrpcService); ok {
return x.GrpcService
}
return nil
}
type isEventServiceConfig_ConfigSourceSpecifier interface {
isEventServiceConfig_ConfigSourceSpecifier()
}
type EventServiceConfig_GrpcService struct {
// Specifies the gRPC service that hosts the event reporting service.
GrpcService *GrpcService `protobuf:"bytes,1,opt,name=grpc_service,json=grpcService,proto3,oneof"`
}
func (*EventServiceConfig_GrpcService) isEventServiceConfig_ConfigSourceSpecifier() {}
var File_envoy_api_v2_core_event_service_config_proto protoreflect.FileDescriptor
var file_envoy_api_v2_core_event_service_config_proto_rawDesc = []byte{
0x0a, 0x2c, 0x65, 0x6e, 0x76, 0x6f, 0x79, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x32, 0x2f, 0x63,
0x6f, 0x72, 0x65, 0x2f, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x5f, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63,
0x65, 0x5f, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x11,
0x65, 0x6e, 0x76, 0x6f, 0x79, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x63, 0x6f, 0x72,
0x65, 0x1a, 0x24, 0x65, 0x6e, 0x76, 0x6f, 0x79, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x76, 0x32, 0x2f,
0x63, 0x6f, 0x72, 0x65, 0x2f, 0x67, 0x72, 0x70, 0x63, 0x5f, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63,
0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1e, 0x75, 0x64, 0x70, 0x61, 0x2f, 0x61, 0x6e,
0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2f, 0x6d, 0x69, 0x67, 0x72, 0x61, 0x74,
0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1d, 0x75, 0x64, 0x70, 0x61, 0x2f, 0x61, 0x6e,
0x6e, 0x6f, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2f, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73,
0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x17, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65,
0x2f, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x61, 0x74, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22,
0x79, 0x0a, 0x12, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x43,
0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x43, 0x0a, 0x0c, 0x67, 0x72, 0x70, 0x63, 0x5f, 0x73, 0x65,
0x72, 0x76, 0x69, 0x63, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x65, 0x6e,
0x76, 0x6f, 0x79, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x2e,
0x47, 0x72, 0x70, 0x63, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x48, 0x00, 0x52, 0x0b, 0x67,
0x72, 0x70, 0x63, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x42, 0x1e, 0x0a, 0x17, 0x63, 0x6f,
0x6e, 0x66, 0x69, 0x67, 0x5f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x73, 0x70, 0x65, 0x63,
0x69, 0x66, 0x69, 0x65, 0x72, 0x12, 0x03, 0xf8, 0x42, 0x01, 0x42, 0x60, 0x0a, 0x1f, 0x69, 0x6f,
0x2e, 0x65, 0x6e, 0x76, 0x6f, 0x79, 0x70, 0x72, 0x6f, 0x78, 0x79, 0x2e, 0x65, 0x6e, 0x76, 0x6f,
0x79, 0x2e, 0x61, 0x70, 0x69, 0x2e, 0x76, 0x32, 0x2e, 0x63, 0x6f, 0x72, 0x65, 0x42, 0x17, 0x45,
0x76, 0x65, 0x6e, 0x74, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x43, 0x6f, 0x6e, 0x66, 0x69,
0x67, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0xf2, 0x98, 0xfe, 0x8f, 0x05, 0x16, 0x12, 0x14,
0x65, 0x6e, 0x76, 0x6f, 0x79, 0x2e, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2e, 0x63, 0x6f, 0x72,
0x65, 0x2e, 0x76, 0x33, 0xba, 0x80, 0xc8, 0xd1, 0x06, 0x02, 0x10, 0x01, 0x62, 0x06, 0x70, 0x72,
0x6f, 0x74, 0x6f, 0x33,
}
var (
file_envoy_api_v2_core_event_service_config_proto_rawDescOnce sync.Once
file_envoy_api_v2_core_event_service_config_proto_rawDescData = file_envoy_api_v2_core_event_service_config_proto_rawDesc
)
func file_envoy_api_v2_core_event_service_config_proto_rawDescGZIP() []byte {
file_envoy_api_v2_core_event_service_config_proto_rawDescOnce.Do(func() {
file_envoy_api_v2_core_event_service_config_proto_rawDescData = protoimpl.X.CompressGZIP(file_envoy_api_v2_core_event_service_config_proto_rawDescData)
})
return file_envoy_api_v2_core_event_service_config_proto_rawDescData
}
var file_envoy_api_v2_core_event_service_config_proto_msgTypes = make([]protoimpl.MessageInfo, 1)
var file_envoy_api_v2_core_event_service_config_proto_goTypes = []interface{}{
(*EventServiceConfig)(nil), // 0: envoy.api.v2.core.EventServiceConfig
(*GrpcService)(nil), // 1: envoy.api.v2.core.GrpcService
}
var file_envoy_api_v2_core_event_service_config_proto_depIdxs = []int32{
1, // 0: envoy.api.v2.core.EventServiceConfig.grpc_service:type_name -> envoy.api.v2.core.GrpcService
1, // [1:1] is the sub-list for method output_type
1, // [1:1] is the sub-list for method input_type
1, // [1:1] is the sub-list for extension type_name
1, // [1:1] is the sub-list for extension extendee
0, // [0:1] is the sub-list for field type_name
}
func init() { file_envoy_api_v2_core_event_service_config_proto_init() }
func file_envoy_api_v2_core_event_service_config_proto_init() {
if File_envoy_api_v2_core_event_service_config_proto != nil {
return
}
file_envoy_api_v2_core_grpc_service_proto_init()
if !protoimpl.UnsafeEnabled {
file_envoy_api_v2_core_event_service_config_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*EventServiceConfig); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
}
file_envoy_api_v2_core_event_service_config_proto_msgTypes[0].OneofWrappers = []interface{}{
(*EventServiceConfig_GrpcService)(nil),
}
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: file_envoy_api_v2_core_event_service_config_proto_rawDesc,
NumEnums: 0,
NumMessages: 1,
NumExtensions: 0,
NumServices: 0,
},
GoTypes: file_envoy_api_v2_core_event_service_config_proto_goTypes,
DependencyIndexes: file_envoy_api_v2_core_event_service_config_proto_depIdxs,
MessageInfos: file_envoy_api_v2_core_event_service_config_proto_msgTypes,
}.Build()
File_envoy_api_v2_core_event_service_config_proto = out.File
file_envoy_api_v2_core_event_service_config_proto_rawDesc = nil
file_envoy_api_v2_core_event_service_config_proto_goTypes = nil
file_envoy_api_v2_core_event_service_config_proto_depIdxs = nil
}<|fim▁end|> |
import (
_ "github.com/cncf/xds/go/udpa/annotations"
_ "github.com/envoyproxy/protoc-gen-validate/validate" |
<|file_name|>basic_block.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use llvm;
use llvm::{BasicBlockRef};
use trans::value::{Users, Value};
use std::iter::{Filter, Map};
<|fim▁hole|>pub struct BasicBlock(pub BasicBlockRef);
pub type Preds<'a> = Map<'a, Value, BasicBlock, Filter<'a, Value, Users>>;
/**
* Wrapper for LLVM BasicBlockRef
*/
impl BasicBlock {
pub fn get(&self) -> BasicBlockRef {
let BasicBlock(v) = *self; v
}
pub fn as_value(self) -> Value {
unsafe {
Value(llvm::LLVMBasicBlockAsValue(self.get()))
}
}
pub fn pred_iter(self) -> Preds<'static> {
self.as_value().user_iter()
.filter(|user| user.is_a_terminator_inst())
.map(|user| user.get_parent().unwrap())
}
pub fn get_single_predecessor(self) -> Option<BasicBlock> {
let mut iter = self.pred_iter();
match (iter.next(), iter.next()) {
(Some(first), None) => Some(first),
_ => None
}
}
}<|fim▁end|> | |
<|file_name|>q_networks.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
import numpy as np
import os
import sys
from keras.layers import Activation, Dense, Input
from keras.layers.normalization import BatchNormalization
from keras.models import Model, Sequential
from keras.optimizers import RMSprop
NUM_OF_HIDDEN_NEURONS = 100
QNETWORK_NAME = 'online_network'
<|fim▁hole|>
class QNetworks:
def __init__(self, num_of_actions, num_of_states, num_of_hidden_neurons=NUM_OF_HIDDEN_NEURONS, tau=TAU):
self.NUM_OF_ACTIONS = num_of_actions
self.NUM_OF_HIDDEN_NEURONS = num_of_hidden_neurons
self.NUM_OF_STATES = num_of_states
self.TAU = tau
self.online_net = self.init_model(QNETWORK_NAME)
self.target_net = self.init_model(QNETWORK_NAME)
def do_soft_update(self):
weights = self.online_net.get_weights()
target_weights = self.target_net.get_weights()
for i in xrange(len(weights)):
target_weights[i] = self.TAU*weights[i] + (1.0-self.TAU)*target_weights[i]
self.target_net.set_weights(target_weights)
return
def do_hard_update(self):
weights = self.online_net.get_weights()
target_weights = self.target_net.get_weights()
for i in xrange(len(weights)):
target_weights[i] = weights[i]
self.target_net.set_weights(target_weights)
return
def get_weights(self):
# get weights of the online Q network
return self.online_net.get_weights()
def init_model(self, net_name):
model = Sequential()
model.add(Dense(self.NUM_OF_HIDDEN_NEURONS, input_shape=(self.NUM_OF_STATES,)))
model.add(Activation('relu'))
model.add(Dense(self.NUM_OF_HIDDEN_NEURONS))
model.add(Activation('relu'))
model.add(Dense(self.NUM_OF_HIDDEN_NEURONS))
model.add(Activation('relu'))
model.add(Dense(self.NUM_OF_ACTIONS))
model.add(Activation('linear'))
model.compile(loss='mse', optimizer='rmsprop')
filename = net_name+'/'+net_name
if os.path.isfile(filename+str(0)+'.txt'):
weights = model.get_weights()
for i in xrange(len(weights)):
loaded_weights = np.loadtxt(filename+str(i)+'.txt')
weights[i] = loaded_weights
model.set_weights(weights)
else:
print 'No model', filename, 'found. Creating a new model.'
return model
def save_models(self):
weights = self.online_net.get_weights()
for i in xrange(len(weights)):
np.savetxt(QNETWORK_NAME+'/'+QNETWORK_NAME+str(i)+'.txt', weights[i])
weights = self.target_net.get_weights()
for i in xrange(len(weights)):
np.savetxt(TARGETNET_NAME+'/'+TARGETNET_NAME+str(i)+'.txt', weights[i])
print("Saved models to disk.")<|fim▁end|> | TARGETNET_NAME = 'target_network'
TAU = 0.0001 # soft update / low pass filter
|
<|file_name|>test_base.py<|end_file_name|><|fim▁begin|>from django.core.serializers import json
from django.http import HttpResponseNotFound, HttpResponse
from django.views.generic import View
from rest_test_data.models import Simple
from rest_test_data.views import BaseTestDataRestView
from nose.tools import assert_equal, assert_is_instance
from mock import Mock, patch
def create_request(body=None):
request = Mock()
request.body = body
return request
def test_dispatch_model_not_found():
view = BaseTestDataRestView()
result = view.dispatch(None, app='something', model='notfoundmodel')
assert_is_instance(result, HttpResponseNotFound)
@patch.object(View, 'dispatch')
def test_dispatch_model_found(dispatch):
dispatch.return_value = ''
view = BaseTestDataRestView()
view.dispatch(create_request(), app='rest_test_data', model='simple')
assert_equal(view.model, Simple)
assert_equal(dispatch.call_count, 1)
@patch.object(BaseTestDataRestView, 'get_object')
@patch.object(View, 'dispatch')
def test_dispatch_get_object(dispatch, get_object):
dispatch.return_value = ''
view = BaseTestDataRestView()
result = view.dispatch(
create_request(),
app='rest_test_data',
model='simple',
pk='1'
)
get_object.assert_called_once_with(1, model=Simple)
assert_is_instance(result, HttpResponse)
assert_equal(dispatch.call_count, 1)<|fim▁hole|>def test_dispatch_get_object_failure(get_object):
get_object.side_effect = Exception
view = BaseTestDataRestView()
result = view.dispatch(None, app='rest_test_data', model='simple', pk='1')
get_object.assert_called_once_with(1, model=Simple)
assert_is_instance(result, HttpResponseNotFound)
def test_get_serializer():
view = BaseTestDataRestView()
assert_is_instance(view.serializer, json.Serializer)
@patch.object(View, 'dispatch')
def test_dispatch_wraps_string_result(dispatch):
dispatch.return_value = 'result!'
view = BaseTestDataRestView()
result = view.dispatch(
create_request(),
app='rest_test_data',
model='simple'
)
assert_is_instance(result, HttpResponse)
assert_equal(result['Content-Type'], 'application/json')
assert_equal(result.content, b'result!')
@patch.object(View, 'dispatch')
def test_dispatch_passes_http_response(dispatch):
dispatch.return_value = HttpResponse()
view = BaseTestDataRestView()
result = view.dispatch(
create_request(),
app='rest_test_data',
model='simple'
)
assert_equal(result, dispatch.return_value)
@patch.object(View, 'dispatch')
def test_dispatch_jsons_other(dispatch):
dispatch.return_value = {'test': 'data'}
view = BaseTestDataRestView()
result = view.dispatch(
create_request(),
app='rest_test_data',
model='simple'
)
assert_is_instance(result, HttpResponse)
assert_equal(result['Content-Type'], 'application/json')
assert_equal(result.content, b'{"test": "data"}')
def test_get_object_model():
model = Mock(**{'objects.get.return_value': 'object'})
assert_equal(BaseTestDataRestView.get_object(1, model), 'object')
model.objects.get.assert_called_once_with(pk=1)
@patch('rest_test_data.views.get_model')
def test_get_object_from_string(get_model):
BaseTestDataRestView.get_object('app.model:1')
get_model.assert_called_once_with('app', 'model')
get_model().objects.get.assert_called_once_with(pk=1)
@patch.object(BaseTestDataRestView, 'get_object')
def test_get_data(get_object):
result = BaseTestDataRestView.get_data({'data': {'test': 1},
'objects': {
'test_2': 'app.model:1',
'test_3': ['app.model:1'],
}})
get_object.assert_called_with('app.model:1')
assert_equal(result, {
'test': 1,
'test_2': get_object(),
'test_3': [get_object()]
})<|fim▁end|> |
@patch.object(BaseTestDataRestView, 'get_object') |
<|file_name|>identity_map.py<|end_file_name|><|fim▁begin|>import uuid
def is_username(val):
"""
If the value parses as a UUID, then it's an ID, not a username.
If it does not parse as such, then it must be a username.
"""
try:
uuid.UUID(val)
return False
except ValueError:
return True
def split_ids_and_usernames(identity_ids):
ids = set()
usernames = set()
for val in identity_ids:
if is_username(val):
usernames.add(val)
else:
ids.add(val)
return ids, usernames
class IdentityMap:
r"""
There's a common pattern of having a large batch of Globus Auth Identities which you
want to inspect. For example, you may have a list of identity IDs fetched from
Access Control Lists on Globus Endpoints. In order to display these identities to an
end user, you may want to resolve them to usernames.
However, naively looking up the identities one-by-one is very inefficient. It's best
to do batched lookups with multiple identities at once. In these cases, an
``IdentityMap`` can be used to do those batched lookups for you.
An ``IdentityMap`` is a mapping-like type which converts Identity IDs and Identity
Names to Identity records (dictionaries) using the Globus Auth API.
.. note::
``IdentityMap`` objects are not full Mappings in the same sense as python dicts
and similar objects. By design, they only implement a small part of the Mapping
protocol.
The basic usage pattern is
- create an ``IdentityMap`` with an AuthClient which will be used to call out to
Globus Auth
- seed the ``IdentityMap`` with IDs and Usernames via :py:meth:`~IdentityMap.add` (you
can also do this during initialization)
- retrieve identity IDs or Usernames from the map
Because the map can be populated with a collection of identity IDs and Usernames
prior to lookups being performed, it can improve the efficiency of these operations
up to 100x over individual lookups.
If you attempt to retrieve an identity which has not been previously added to the
map, it will be immediately added. But adding many identities beforehand will
improve performance.
The ``IdentityMap`` will cache its results so that repeated lookups of the same Identity
will not repeat work. It will also map identities both by ID and by Username,
regardless of how they're initially looked up.
.. warning::
If an Identity is not found in Globus Auth, it will trigger a KeyError when
looked up. Your code must be ready to handle KeyErrors when doing a lookup.
Correct usage looks something like so::
ac = globus_sdk.AuthClient(...)
idmap = globus_sdk.IdentityMap(
ac, ["[email protected]", "[email protected]"]
)
idmap.add("[email protected]")
# adding by ID is also valid
idmap.add("c699d42e-d274-11e5-bf75-1fc5bf53bb24")
# map ID to username
assert (
idmap["c699d42e-d274-11e5-bf75-1fc5bf53bb24"]["username"]
== "[email protected]"
)
# map username to ID
assert (
idmap["[email protected]"]["id"]
== "c699d42e-d274-11e5-bf75-1fc5bf53bb24"
)
And simple handling of errors::
try:
record = idmap["[email protected]"]
except KeyError:
username = "NO_SUCH_IDENTITY"
else:
username = record["username"]
or you may achieve this by using the :py:meth:`~.IdentityMap.get` method::<|fim▁hole|>
:param auth_client: The client object which will be used for lookups against Globus Auth
:type auth_client: :class:`AuthClient <globus_sdk.AuthClient>`
:param identity_ids: A list or other iterable of usernames or identity IDs (potentially
mixed together) which will be used to seed the ``IdentityMap`` 's tracking of
unresolved Identities.
:type identity_ids: iterable of str
:param id_batch_size: A non-default batch size to use when communicating with Globus
Auth. Leaving this set to the default is strongly recommended.
:type id_batch_size: int, optional
.. automethodlist:: globus_sdk.IdentityMap
include_methods=__getitem__,__delitem__
""" # noqa
_default_id_batch_size = 100
def __init__(self, auth_client, identity_ids=None, id_batch_size=None):
self.auth_client = auth_client
self.id_batch_size = id_batch_size or self._default_id_batch_size
# uniquify, copy, and split into IDs vs usernames
self.unresolved_ids, self.unresolved_usernames = split_ids_and_usernames(
[] if identity_ids is None else identity_ids
)
# the cache is a dict mapping IDs and Usernames
self._cache = {}
def _fetch_batch_including(self, key):
"""
Batch resolve identifiers (usernames or IDs), being sure to include the desired,
named key. The key also determines which kind of batch will be built --
usernames or IDs.
Store the results in the internal cache.
"""
# for whichever set of unresolved names is appropriate, build the batch to
# lookup up to *at most* the batch size
# also, remove the unresolved names from tracking so that they will not be
# looked up again
batch = []
set_to_use = (
self.unresolved_usernames if is_username(key) else self.unresolved_ids
)
for _ in range(0, min(self.id_batch_size - 1, len(set_to_use))):
batch.append(set_to_use.pop())
# avoid double-adding the provided key, but add it if it's missing
if key not in batch:
batch.append(key)
else:
try:
batch.append(set_to_use.pop())
except KeyError: # empty set, ignore
pass
response = self.auth_client.get_identities(
**(dict(usernames=batch) if is_username(key) else dict(ids=batch))
)
for x in response["identities"]:
self._cache[x["id"]] = x
self._cache[x["username"]] = x
def add(self, identity_id):
"""
Add a username or ID to the ``IdentityMap`` for batch lookups later.
Returns True if the ID was added for lookup.
Returns False if it was rejected as a duplicate of an already known name.
:param identity_id: A string Identity ID or Identity Name (a.k.a. "username") to
add
:type identity_id: str
"""
if identity_id in self._cache:
return False
if is_username(identity_id):
if identity_id in self.unresolved_usernames:
return False
else:
self.unresolved_usernames.add(identity_id)
return True
if identity_id in self.unresolved_ids:
return False
self.unresolved_ids.add(identity_id)
return True
def get(self, key, default=None):
"""
A dict-like get() method which accepts a default value.
"""
try:
return self[key]
except KeyError:
return default
def __getitem__(self, key):
"""
``IdentityMap`` supports dict-like lookups with ``map[key]``
"""
if key not in self._cache:
self._fetch_batch_including(key)
return self._cache[key]
def __delitem__(self, key):
"""
``IdentityMap`` supports ``del map[key]``. Note that this only removes lookup
values from the cache and will not impact the set of unresolved/pending IDs.
"""
del self._cache[key]<|fim▁end|> |
# internally handles the KeyError and returns the default value
record = idmap.get("[email protected]", None)
username = record["username"] if record is not None else "NO_SUCH_IDENTITY" |
<|file_name|>uCM.cpp<|end_file_name|><|fim▁begin|>//----------------------------------------------------------------------------
#pragma hdrstop
#include <stdio.h>
#include <memory>
#include "uCM.h"
//---------------------------------------------------------------------------
#pragma package(smart_init)
#pragma classgroup "Vcl.Controls.TControl"
#pragma resource "*.dfm"
TCM *CM;<|fim▁hole|>__fastcall TCM::TCM(TComponent* Owner)
: TDataModule(Owner)
{
FInstanceOwner = true;
}
__fastcall TCM::~TCM()
{
delete FSMClient;
}
TSMClient* TCM::GetSMClient(void)
{
if (FSMClient == NULL)
FSMClient= new TSMClient(DSRestConnection1, FInstanceOwner);
return FSMClient;
};<|fim▁end|> | //--------------------------------------------------------------------------- |
<|file_name|>package-info.java<|end_file_name|><|fim▁begin|><|fim▁hole|> * by the <a target="_top" href="https://github.com/tail-f-systems/JNC">JNC</a> plugin of <a target="_top" href="http://code.google.com/p/pyang/">pyang</a>.
* The generated classes may be used to manipulate pieces of configuration data
* with NETCONF operations such as edit-config, delete-config and lock. These
* operations are typically accessed through the JNC Java library by
* instantiating Device objects and setting up NETCONF sessions with real
* devices using a compatible YANG model.
* <p>
* @see <a target="_top" href="https://github.com/tail-f-systems/JNC">JNC project page</a>
* @see <a target="_top" href="ftp://ftp.rfc-editor.org/in-notes/rfc6020.txt">RFC 6020: YANG - A Data Modeling Language for the Network Configuration Protocol (NETCONF)</a>
* @see <a target="_top" href="ftp://ftp.rfc-editor.org/in-notes/rfc6241.txt">RFC 6241: Network Configuration Protocol (NETCONF)</a>
* @see <a target="_top" href="ftp://ftp.rfc-editor.org/in-notes/rfc6242.txt">RFC 6242: Using the NETCONF Protocol over Secure Shell (SSH)</a>
* @see <a target="_top" href="http://www.tail-f.com">Tail-f Systems</a>
*/
package hctaEpc.mmeSgsn.interface_.ge;<|fim▁end|> | /**
* This class hierarchy was generated from the Yang module hcta-epc |
<|file_name|>cloud_storage_test_base.py<|end_file_name|><|fim▁begin|># Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Base classes for a test and validator which upload results
(reference images, error images) to cloud storage."""
import os
import re
import tempfile
from telemetry import test
from telemetry.core import bitmap
from telemetry.page import cloud_storage
from telemetry.page import page_test
test_data_dir = os.path.abspath(os.path.join(
os.path.dirname(__file__), '..', '..', 'data', 'gpu'))
default_generated_data_dir = os.path.join(test_data_dir, 'generated')
error_image_cloud_storage_bucket = 'chromium-browser-gpu-tests'
def _CompareScreenshotSamples(screenshot, expectations, device_pixel_ratio):
for expectation in expectations:
location = expectation["location"]
x = location[0] * device_pixel_ratio
y = location[1] * device_pixel_ratio
if x < 0 or y < 0 or x > screenshot.width or y > screenshot.height:
raise page_test.Failure(
'Expected pixel location [%d, %d] is out of range on [%d, %d] image' %
(x, y, screenshot.width, screenshot.height))
actual_color = screenshot.GetPixelColor(x, y)
expected_color = bitmap.RgbaColor(
expectation["color"][0],
expectation["color"][1],
expectation["color"][2])
if not actual_color.IsEqual(expected_color, expectation["tolerance"]):
raise page_test.Failure('Expected pixel at ' + str(location) +
' to be ' +
str(expectation["color"]) + " but got [" +
str(actual_color.r) + ", " +
str(actual_color.g) + ", " +
str(actual_color.b) + "]")
class ValidatorBase(page_test.PageTest):
def __init__(self, test_method_name):
super(ValidatorBase, self).__init__(test_method_name)
# Parameters for cloud storage reference images.
self.vendor_id = None
self.device_id = None
self.vendor_string = None
self.device_string = None
self.msaa = False
###
### Routines working with the local disk (only used for local
### testing without a cloud storage account -- the bots do not use
### this code path).
###
def _UrlToImageName(self, url):
image_name = re.sub(r'^(http|https|file)://(/*)', '', url)
image_name = re.sub(r'\.\./', '', image_name)
image_name = re.sub(r'(\.|/|-)', '_', image_name)
return image_name
def _WriteImage(self, image_path, png_image):
output_dir = os.path.dirname(image_path)
if not os.path.exists(output_dir):
os.makedirs(output_dir)
png_image.WritePngFile(image_path)
def _WriteErrorImages(self, img_dir, img_name, screenshot, ref_png):
full_image_name = img_name + '_' + str(self.options.build_revision)
full_image_name = full_image_name + '.png'
# Always write the failing image.
self._WriteImage(
os.path.join(img_dir, 'FAIL_' + full_image_name), screenshot)
if ref_png:
# Save the reference image.
# This ensures that we get the right revision number.
self._WriteImage(
os.path.join(img_dir, full_image_name), ref_png)
# Save the difference image.
diff_png = screenshot.Diff(ref_png)
self._WriteImage(
os.path.join(img_dir, 'DIFF_' + full_image_name), diff_png)
###
### Cloud storage code path -- the bots use this.
###
def _ComputeGpuInfo(self, tab):
if ((self.vendor_id and self.device_id) or
(self.vendor_string and self.device_string)):
return
browser = tab.browser
if not browser.supports_system_info:
raise Exception('System info must be supported by the browser')
system_info = browser.GetSystemInfo()
if not system_info.gpu:
raise Exception('GPU information was absent')
device = system_info.gpu.devices[0]
if device.vendor_id and device.device_id:
self.vendor_id = device.vendor_id
self.device_id = device.device_id
elif device.vendor_string and device.device_string:
self.vendor_string = device.vendor_string
self.device_string = device.device_string
else:
raise Exception('GPU device information was incomplete')
self.msaa = not (
'disable_multisampling' in system_info.gpu.driver_bug_workarounds)
def _FormatGpuInfo(self, tab):
self._ComputeGpuInfo(tab)
msaa_string = '_msaa' if self.msaa else '_non_msaa'
if self.vendor_id:
return '%s_%04x_%04x%s' % (
self.options.os_type, self.vendor_id, self.device_id, msaa_string)
else:
return '%s_%s_%s%s' % (
self.options.os_type, self.vendor_string, self.device_string,
msaa_string)
def _FormatReferenceImageName(self, img_name, page, tab):
return '%s_v%s_%s.png' % (
img_name,
page.revision,
self._FormatGpuInfo(tab))
def _UploadBitmapToCloudStorage(self, bucket, name, bitmap, public=False):
# This sequence of steps works on all platforms to write a temporary
# PNG to disk, following the pattern in bitmap_unittest.py. The key to
# avoiding PermissionErrors seems to be to not actually try to write to
# the temporary file object, but to re-open its name for all operations.
temp_file = tempfile.NamedTemporaryFile().name
bitmap.WritePngFile(temp_file)
cloud_storage.Insert(bucket, name, temp_file, publicly_readable=public)
def _ConditionallyUploadToCloudStorage(self, img_name, page, tab, screenshot):
"""Uploads the screenshot to cloud storage as the reference image
for this test, unless it already exists. Returns True if the
upload was actually performed."""
if not self.options.refimg_cloud_storage_bucket:
raise Exception('--refimg-cloud-storage-bucket argument is required')
cloud_name = self._FormatReferenceImageName(img_name, page, tab)
if not cloud_storage.Exists(self.options.refimg_cloud_storage_bucket,
cloud_name):
self._UploadBitmapToCloudStorage(self.options.refimg_cloud_storage_bucket,
cloud_name,
screenshot)
return True
return False
def _DownloadFromCloudStorage(self, img_name, page, tab):
"""Downloads the reference image for the given test from cloud
storage, returning it as a Telemetry Bitmap object."""
# TODO(kbr): there's a race condition between the deletion of the
# temporary file and gsutil's overwriting it.
if not self.options.refimg_cloud_storage_bucket:
raise Exception('--refimg-cloud-storage-bucket argument is required')
temp_file = tempfile.NamedTemporaryFile().name
cloud_storage.Get(self.options.refimg_cloud_storage_bucket,
self._FormatReferenceImageName(img_name, page, tab),
temp_file)
return bitmap.Bitmap.FromPngFile(temp_file)
def _UploadErrorImagesToCloudStorage(self, image_name, screenshot, ref_img):
"""For a failing run, uploads the failing image, reference image (if
supplied), and diff image (if reference image was supplied) to cloud
storage. This subsumes the functionality of the
archive_gpu_pixel_test_results.py script."""
machine_name = re.sub('\W+', '_', self.options.test_machine_name)
upload_dir = '%s_%s_telemetry' % (self.options.build_revision, machine_name)
base_bucket = '%s/runs/%s' % (error_image_cloud_storage_bucket, upload_dir)
image_name_with_revision = '%s_%s.png' % (
image_name, self.options.build_revision)
self._UploadBitmapToCloudStorage(
base_bucket + '/gen', image_name_with_revision, screenshot,
public=True)
if ref_img:
self._UploadBitmapToCloudStorage(
base_bucket + '/ref', image_name_with_revision, ref_img, public=True)
diff_img = screenshot.Diff(ref_img)
self._UploadBitmapToCloudStorage(
base_bucket + '/diff', image_name_with_revision, diff_img,
public=True)
print ('See http://%s.commondatastorage.googleapis.com/'
'view_test_results.html?%s for this run\'s test results') % (
error_image_cloud_storage_bucket, upload_dir)
def _ValidateScreenshotSamples(self, url,
screenshot, expectations, device_pixel_ratio):
"""Samples the given screenshot and verifies pixel color values.
The sample locations and expected color values are given in expectations.
In case any of the samples do not match the expected color, it raises
a Failure and dumps the screenshot locally or cloud storage depending on
what machine the test is being run."""
try:
_CompareScreenshotSamples(screenshot, expectations, device_pixel_ratio)
except page_test.Failure:
image_name = self._UrlToImageName(url)
if self.options.test_machine_name:
self._UploadErrorImagesToCloudStorage(image_name, screenshot, None)
else:
self._WriteErrorImages(self.options.generated_dir, image_name,
screenshot, None)
raise
class TestBase(test.Test):
@classmethod
def AddTestCommandLineArgs(cls, group):
group.add_option('--build-revision',
help='Chrome revision being tested.',
default="unknownrev")
group.add_option('--upload-refimg-to-cloud-storage',
dest='upload_refimg_to_cloud_storage',
action='store_true', default=False,
help='Upload resulting images to cloud storage as reference images')
group.add_option('--download-refimg-from-cloud-storage',
dest='download_refimg_from_cloud_storage',
action='store_true', default=False,
help='Download reference images from cloud storage')
group.add_option('--refimg-cloud-storage-bucket',
help='Name of the cloud storage bucket to use for reference images; '
'required with --upload-refimg-to-cloud-storage and '
'--download-refimg-from-cloud-storage. Example: '
'"chromium-gpu-archive/reference-images"')
group.add_option('--os-type',
help='Type of operating system on which the pixel test is being run, '
'used only to distinguish different operating systems with the same '
'graphics card. Any value is acceptable, but canonical values are '
'"win", "mac", and "linux", and probably, eventually, "chromeos" '
'and "android").',
default='')
group.add_option('--test-machine-name',
help='Name of the test machine. Specifying this argument causes this '<|fim▁hole|> 'script to upload failure images and diffs to cloud storage directly, '
'instead of relying on the archive_gpu_pixel_test_results.py script.',
default='')
group.add_option('--generated-dir',
help='Overrides the default on-disk location for generated test images '
'(only used for local testing without a cloud storage account)',
default=default_generated_data_dir)<|fim▁end|> | |
<|file_name|>myNN.py<|end_file_name|><|fim▁begin|># References:
#
# https://www.tensorflow.org/guide/low_level_intro
#
# only needed for python 2.7
# from __future__ import absolute_import
# from __future__ import division
# from __future__ import print_function
import numpy as np
from numpy import array
from numpy import float32
# a complete input set on 7 bits
# useful for training various sorts of data
bin7 = array([
[0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 1],
[0, 0, 0, 0, 0, 1, 0],
[0, 0, 0, 0, 0, 1, 1],
[0, 0, 0, 0, 1, 0, 0],
[0, 0, 0, 0, 1, 0, 1],
[0, 0, 0, 0, 1, 1, 0],
[0, 0, 0, 0, 1, 1, 1],
[0, 0, 0, 1, 0, 0, 0],
[0, 0, 0, 1, 0, 0, 1],
[0, 0, 0, 1, 0, 1, 0],
[0, 0, 0, 1, 0, 1, 1],
[0, 0, 0, 1, 1, 0, 0],
[0, 0, 0, 1, 1, 0, 1],
[0, 0, 0, 1, 1, 1, 0],
[0, 0, 0, 1, 1, 1, 1],
[0, 0, 1, 0, 0, 0, 0],
[0, 0, 1, 0, 0, 0, 1],
[0, 0, 1, 0, 0, 1, 0],
[0, 0, 1, 0, 0, 1, 1],
[0, 0, 1, 0, 1, 0, 0],
[0, 0, 1, 0, 1, 0, 1],
[0, 0, 1, 0, 1, 1, 0],
[0, 0, 1, 0, 1, 1, 1],
[0, 0, 1, 1, 0, 0, 0],
[0, 0, 1, 1, 0, 0, 1],
[0, 0, 1, 1, 0, 1, 0],
[0, 0, 1, 1, 0, 1, 1],
[0, 0, 1, 1, 1, 0, 0],
[0, 0, 1, 1, 1, 0, 1],
[0, 0, 1, 1, 1, 1, 0],
[0, 0, 1, 1, 1, 1, 1],
[0, 1, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 1],
[0, 1, 0, 0, 0, 1, 0],
[0, 1, 0, 0, 0, 1, 1],
[0, 1, 0, 0, 1, 0, 0],
[0, 1, 0, 0, 1, 0, 1],
[0, 1, 0, 0, 1, 1, 0],
[0, 1, 0, 0, 1, 1, 1],
[0, 1, 0, 1, 0, 0, 0],
[0, 1, 0, 1, 0, 0, 1],
[0, 1, 0, 1, 0, 1, 0],
[0, 1, 0, 1, 0, 1, 1],
[0, 1, 0, 1, 1, 0, 0],
[0, 1, 0, 1, 1, 0, 1],
[0, 1, 0, 1, 1, 1, 0],
[0, 1, 0, 1, 1, 1, 1],
[0, 1, 1, 0, 0, 0, 0],
[0, 1, 1, 0, 0, 0, 1],
[0, 1, 1, 0, 0, 1, 0],
[0, 1, 1, 0, 0, 1, 1],
[0, 1, 1, 0, 1, 0, 0],
[0, 1, 1, 0, 1, 0, 1],
[0, 1, 1, 0, 1, 1, 0],
[0, 1, 1, 0, 1, 1, 1],
[0, 1, 1, 1, 0, 0, 0],
[0, 1, 1, 1, 0, 0, 1],
[0, 1, 1, 1, 0, 1, 0],
[0, 1, 1, 1, 0, 1, 1],
[0, 1, 1, 1, 1, 0, 0],
[0, 1, 1, 1, 1, 0, 1],
[0, 1, 1, 1, 1, 1, 0],
[0, 1, 1, 1, 1, 1, 1],
[1, 0, 0, 0, 0, 0, 0],
[1, 0, 0, 0, 0, 0, 1],
[1, 0, 0, 0, 0, 1, 0],
[1, 0, 0, 0, 0, 1, 1],
[1, 0, 0, 0, 1, 0, 0],
[1, 0, 0, 0, 1, 0, 1],
[1, 0, 0, 0, 1, 1, 0],
[1, 0, 0, 0, 1, 1, 1],
[1, 0, 0, 1, 0, 0, 0],
[1, 0, 0, 1, 0, 0, 1],
[1, 0, 0, 1, 0, 1, 0],
[1, 0, 0, 1, 0, 1, 1],
[1, 0, 0, 1, 1, 0, 0],
[1, 0, 0, 1, 1, 0, 1],
[1, 0, 0, 1, 1, 1, 0],
[1, 0, 0, 1, 1, 1, 1],
[1, 0, 1, 0, 0, 0, 0],
[1, 0, 1, 0, 0, 0, 1],
[1, 0, 1, 0, 0, 1, 0],
[1, 0, 1, 0, 0, 1, 1],
[1, 0, 1, 0, 1, 0, 0],
[1, 0, 1, 0, 1, 0, 1],
[1, 0, 1, 0, 1, 1, 0],
[1, 0, 1, 0, 1, 1, 1],
[1, 0, 1, 1, 0, 0, 0],
[1, 0, 1, 1, 0, 0, 1],
[1, 0, 1, 1, 0, 1, 0],
[1, 0, 1, 1, 0, 1, 1],
[1, 0, 1, 1, 1, 0, 0],
[1, 0, 1, 1, 1, 0, 1],
[1, 0, 1, 1, 1, 1, 0],
[1, 0, 1, 1, 1, 1, 1],
[1, 1, 0, 0, 0, 0, 0],
[1, 1, 0, 0, 0, 0, 1],
[1, 1, 0, 0, 0, 1, 0],
[1, 1, 0, 0, 0, 1, 1],
[1, 1, 0, 0, 1, 0, 0],
[1, 1, 0, 0, 1, 0, 1],
[1, 1, 0, 0, 1, 1, 0],
[1, 1, 0, 0, 1, 1, 1],
[1, 1, 0, 1, 0, 0, 0],
[1, 1, 0, 1, 0, 0, 1],
[1, 1, 0, 1, 0, 1, 0],
[1, 1, 0, 1, 0, 1, 1],
[1, 1, 0, 1, 1, 0, 0],
[1, 1, 0, 1, 1, 0, 1],
[1, 1, 0, 1, 1, 1, 0],
[1, 1, 0, 1, 1, 1, 1],
[1, 1, 1, 0, 0, 0, 0],
[1, 1, 1, 0, 0, 0, 1],
[1, 1, 1, 0, 0, 1, 0],
[1, 1, 1, 0, 0, 1, 1],
[1, 1, 1, 0, 1, 0, 0],
[1, 1, 1, 0, 1, 0, 1],
[1, 1, 1, 0, 1, 1, 0],
[1, 1, 1, 0, 1, 1, 1],
[1, 1, 1, 1, 0, 0, 0],
[1, 1, 1, 1, 0, 0, 1],
[1, 1, 1, 1, 0, 1, 0],
[1, 1, 1, 1, 0, 1, 1],
[1, 1, 1, 1, 1, 0, 0],
[1, 1, 1, 1, 1, 0, 1],
[1, 1, 1, 1, 1, 1, 0],
[1, 1, 1, 1, 1, 1, 1],
])
'''
Train the network to count to 3
column 0: less than 3
column 1: exactly 3
column 2: more than 3
'''
count3 = array([
[1, 0, 0],
[1, 0, 0],
[1, 0, 0],
[1, 0, 0],
[1, 0, 0],
[1, 0, 0],
[1, 0, 0],
[0, 1, 0],
[1, 0, 0],
[1, 0, 0],
[1, 0, 0],
[0, 1, 0],
[1, 0, 0],
[0, 1, 0],
[0, 1, 0],
[0, 0, 1],
[1, 0, 0],
[1, 0, 0],
[1, 0, 0],
[0, 1, 0],
[1, 0, 0],
[0, 1, 0],
[0, 1, 0],
[0, 0, 1],
[1, 0, 0],
[0, 1, 0],
[0, 1, 0],
[0, 0, 1],
[0, 1, 0],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1],
[1, 0, 0],
[1, 0, 0],
[1, 0, 0],
[0, 1, 0],
[1, 0, 0],
[0, 1, 0],
[0, 1, 0],
[0, 0, 1],
[1, 0, 0],
[0, 1, 0],
[0, 1, 0],
[0, 0, 1],
[0, 1, 0],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1],
[1, 0, 0],
[0, 1, 0],
[0, 1, 0],
[0, 0, 1],
[0, 1, 0],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1],
[0, 1, 0],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1],
[1, 0, 0],
[1, 0, 0],
[1, 0, 0],
[0, 1, 0],
[1, 0, 0],
[0, 1, 0],
[0, 1, 0],
[0, 0, 1],
[1, 0, 0],
[0, 1, 0],
[0, 1, 0],
[0, 0, 1],
[0, 1, 0],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1],
[1, 0, 0],
[0, 1, 0],
[0, 1, 0],
[0, 0, 1],
[0, 1, 0],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1],
[0, 1, 0],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1],<|fim▁hole|> [0, 1, 0],
[0, 0, 1],
[0, 1, 0],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1],
[0, 1, 0],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1],
[0, 1, 0],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1],
[0, 0, 1],
])
# this takes a looong time to index, and
# python may crash several times before indexing is complete
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, Activation
model = Sequential()
model.add(Dense(8,
activation=keras.activations.sigmoid,
))
model.add(Dense(3,
activation=keras.activations.sigmoid,
))
model.compile(
optimizer=tf.train.AdamOptimizer(0.001),
# loss=keras.losses.categorical_crossentropy,
loss=keras.losses.mse,
metrics=[keras.metrics.binary_accuracy]
)
# This is the process I used to train my weights
# model.fit(bin7, count3, epochs=2000)
# myWeights = model.get_weights()
# np.set_printoptions(suppress=True)
# np.set_printoptions(precision=2)
# print('myWeights =', myWeights)
# These are the weights I got, pretty-printed
myWeights = [
# first layer, 7x8
array([[ 1.2 , -1.16, -1.97, 2.16, 0.97, 0.86, -1.2 , 1.12],
[ 1.21, -1.17, -1.97, 2.16, 0.84, 0.76, -1.19, 1.22],
[ 1.19, -1.2 , -1.98, 2.15, 0.87, 0.84, -1.19, 1.13],
[ 1.21, -1.2 , -1.97, 2.15, 0.89, 0.8 , -1.2 , 1.16],
[ 1.21, -1.12, -1.97, 2.16, 0.99, 0.8 , -1.21, 1.18],
[ 1.23, -1.09, -1.98, 2.15, 1.12, 0.81, -1.24, 1.13],
[ 1.24, -1.11, -1.99, 2.14, 1. , 0.77, -1.23, 1.17]],
dtype=float32),
# biases for 8 intermediate nodes
array([-4.57, 3.13, 4. , -4.44, -1.08, -3.11, 4.39, -4.35],
dtype=float32),
# second layer, 8x3
array([[-2.37, -1.54, 2.82],
[ 2.57, -0.09, -3. ],
[ 3.42, -2.18, -4.26],
[-3.27, 1.66, 2.1 ],
[-1.64, 0.12, -0.26],
[-1.85, -1.73, 2.25],
[ 2.71, 0.95, -4.85],
[-2.82, -1.4 , 2.69]], dtype=float32),
# biases for 3 output nodes
array([ 0.21, -0.39, -1.22], dtype=float32)
]
# test the model and your weights
# model.fit(bin7, count3, epochs=1)
# model.set_weights(myWeights)
# predict3 = model.predict(bin7)
# np.set_printoptions(suppress=True)
# np.set_printoptions(precision=1)
# print('prediction =', predict3)
Examples = {
'count3' : [ bin7, count3, model, myWeights ],
}<|fim▁end|> | [0, 0, 1],
[0, 0, 1],
[1, 0, 0],
[0, 1, 0], |
<|file_name|>command.js<|end_file_name|><|fim▁begin|>// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
/**
* @fileoverview A command is an abstraction of an action a user can do in the
* UI.
*
* When the focus changes in the document for each command a canExecute event
* is dispatched on the active element. By listening to this event you can
* enable and disable the command by setting the event.canExecute property.
*
* When a command is executed a command event is dispatched on the active
* element. Note that you should stop the propagation after you have handled the
* command if there might be other command listeners higher up in the DOM tree.
*/
cr.define('cr.ui', function() {
/**
* This is used to identify keyboard shortcuts.
* @param {string} shortcut The text used to describe the keys for this
* keyboard shortcut.
* @constructor
*/
function KeyboardShortcut(shortcut) {
var mods = {};
var ident = '';
shortcut.split('-').forEach(function(part) {
var partLc = part.toLowerCase();
switch (partLc) {
case 'alt':
case 'ctrl':
case 'meta':
case 'shift':
mods[partLc + 'Key'] = true;
break;
default:
if (ident)
throw Error('Invalid shortcut');
ident = part;
}
});
this.ident_ = ident;
this.mods_ = mods;
}
KeyboardShortcut.prototype = {
/**
* Whether the keyboard shortcut object matches a keyboard event.
* @param {!Event} e The keyboard event object.
* @return {boolean} Whether we found a match or not.
*/
matchesEvent: function(e) {
if (e.keyIdentifier == this.ident_) {
// All keyboard modifiers needs to match.
var mods = this.mods_;
return ['altKey', 'ctrlKey', 'metaKey', 'shiftKey'].every(function(k) {
return e[k] == !!mods[k];
});
}
return false;
}
};
/**
* Creates a new command element.
* @constructor
* @extends {HTMLElement}
*/
var Command = cr.ui.define('command');
Command.prototype = {
__proto__: HTMLElement.prototype,
/**
* Initializes the command.
*/
decorate: function() {
CommandManager.init(this.ownerDocument);
if (this.hasAttribute('shortcut'))
this.shortcut = this.getAttribute('shortcut');
},
<|fim▁hole|> * If |element| isn't given, the active element is used instead.
* If the command is {@code disabled} this does nothing.
* @param {HTMLElement=} opt_element Optional element to dispatch event on.
*/
execute: function(opt_element) {
if (this.disabled)
return;
var doc = this.ownerDocument;
if (doc.activeElement) {
var e = new cr.Event('command', true, false);
e.command = this;
(opt_element || doc.activeElement).dispatchEvent(e);
}
},
/**
* Call this when there have been changes that might change whether the
* command can be executed or not.
* @param {Node=} opt_node Node for which to actuate command state.
*/
canExecuteChange: function(opt_node) {
dispatchCanExecuteEvent(this,
opt_node || this.ownerDocument.activeElement);
},
/**
* The keyboard shortcut that triggers the command. This is a string
* consisting of a keyIdentifier (as reported by WebKit in keydown) as
* well as optional key modifiers joinded with a '-'.
*
* Multiple keyboard shortcuts can be provided by separating them by
* whitespace.
*
* For example:
* "F1"
* "U+0008-Meta" for Apple command backspace.
* "U+0041-Ctrl" for Control A
* "U+007F U+0008-Meta" for Delete and Command Backspace
*
* @type {string}
*/
shortcut_: '',
get shortcut() {
return this.shortcut_;
},
set shortcut(shortcut) {
var oldShortcut = this.shortcut_;
if (shortcut !== oldShortcut) {
this.keyboardShortcuts_ = shortcut.split(/\s+/).map(function(shortcut) {
return new KeyboardShortcut(shortcut);
});
// Set this after the keyboardShortcuts_ since that might throw.
this.shortcut_ = shortcut;
cr.dispatchPropertyChange(this, 'shortcut', this.shortcut_,
oldShortcut);
}
},
/**
* Whether the event object matches the shortcut for this command.
* @param {!Event} e The key event object.
* @return {boolean} Whether it matched or not.
*/
matchesEvent: function(e) {
if (!this.keyboardShortcuts_)
return false;
return this.keyboardShortcuts_.some(function(keyboardShortcut) {
return keyboardShortcut.matchesEvent(e);
});
}
};
/**
* The label of the command.
* @type {string}
*/
cr.defineProperty(Command, 'label', cr.PropertyKind.ATTR);
/**
* Whether the command is disabled or not.
* @type {boolean}
*/
cr.defineProperty(Command, 'disabled', cr.PropertyKind.BOOL_ATTR);
/**
* Whether the command is hidden or not.
* @type {boolean}
*/
cr.defineProperty(Command, 'hidden', cr.PropertyKind.BOOL_ATTR);
/**
* Whether the command is checked or not.
* @type {boolean}
*/
cr.defineProperty(Command, 'checked', cr.PropertyKind.BOOL_ATTR);
/**
* Dispatches a canExecute event on the target.
* @param {cr.ui.Command} command The command that we are testing for.
* @param {Element} target The target element to dispatch the event on.
*/
function dispatchCanExecuteEvent(command, target) {
var e = new CanExecuteEvent(command, true);
target.dispatchEvent(e);
command.disabled = !e.canExecute;
}
/**
* The command managers for different documents.
*/
var commandManagers = {};
/**
* Keeps track of the focused element and updates the commands when the focus
* changes.
* @param {!Document} doc The document that we are managing the commands for.
* @constructor
*/
function CommandManager(doc) {
doc.addEventListener('focus', this.handleFocus_.bind(this), true);
// Make sure we add the listener to the bubbling phase so that elements can
// prevent the command.
doc.addEventListener('keydown', this.handleKeyDown_.bind(this), false);
}
/**
* Initializes a command manager for the document as needed.
* @param {!Document} doc The document to manage the commands for.
*/
CommandManager.init = function(doc) {
var uid = cr.getUid(doc);
if (!(uid in commandManagers)) {
commandManagers[uid] = new CommandManager(doc);
}
};
CommandManager.prototype = {
/**
* Handles focus changes on the document.
* @param {Event} e The focus event object.
* @private
*/
handleFocus_: function(e) {
var target = e.target;
// Ignore focus on a menu button or command item
if (target.menu || target.command)
return;
var commands = Array.prototype.slice.call(
target.ownerDocument.querySelectorAll('command'));
commands.forEach(function(command) {
dispatchCanExecuteEvent(command, target);
});
},
/**
* Handles the keydown event and routes it to the right command.
* @param {!Event} e The keydown event.
*/
handleKeyDown_: function(e) {
var target = e.target;
var commands = Array.prototype.slice.call(
target.ownerDocument.querySelectorAll('command'));
for (var i = 0, command; command = commands[i]; i++) {
if (!command.disabled && command.matchesEvent(e)) {
e.preventDefault();
// We do not want any other element to handle this.
e.stopPropagation();
command.execute();
return;
}
}
}
};
/**
* The event type used for canExecute events.
* @param {!cr.ui.Command} command The command that we are evaluating.
* @extends {Event}
* @constructor
* @class
*/
function CanExecuteEvent(command) {
var e = command.ownerDocument.createEvent('Event');
e.initEvent('canExecute', true, false);
e.__proto__ = CanExecuteEvent.prototype;
e.command = command;
return e;
}
CanExecuteEvent.prototype = {
__proto__: Event.prototype,
/**
* The current command
* @type {cr.ui.Command}
*/
command: null,
/**
* Whether the target can execute the command. Setting this also stops the
* propagation.
* @type {boolean}
*/
canExecute_: false,
get canExecute() {
return this.canExecute_;
},
set canExecute(canExecute) {
this.canExecute_ = !!canExecute;
this.stopPropagation();
}
};
// Export
return {
Command: Command,
CanExecuteEvent: CanExecuteEvent
};
});<|fim▁end|> | /**
* Executes the command by dispatching a command event on the given element. |
<|file_name|>crypto_box.rs<|end_file_name|><|fim▁begin|>// crypto_box.h
pub const crypto_box_SEEDBYTES: usize = crypto_box_curve25519xsalsa20poly1305_SEEDBYTES;
pub const crypto_box_PUBLICKEYBYTES: usize = crypto_box_curve25519xsalsa20poly1305_PUBLICKEYBYTES;
pub const crypto_box_SECRETKEYBYTES: usize = crypto_box_curve25519xsalsa20poly1305_SECRETKEYBYTES;
pub const crypto_box_BEFORENMBYTES: usize = crypto_box_curve25519xsalsa20poly1305_BEFORENMBYTES;
pub const crypto_box_NONCEBYTES: usize = crypto_box_curve25519xsalsa20poly1305_NONCEBYTES;
pub const crypto_box_ZEROBYTES: usize = crypto_box_curve25519xsalsa20poly1305_ZEROBYTES;
pub const crypto_box_BOXZEROBYTES: usize = crypto_box_curve25519xsalsa20poly1305_BOXZEROBYTES;
pub const crypto_box_MACBYTES: usize = crypto_box_curve25519xsalsa20poly1305_MACBYTES;
pub const crypto_box_PRIMITIVE: &'static str = "curve25519xsalsa20poly1305";
extern {
pub fn crypto_box_seedbytes() -> size_t;
pub fn crypto_box_publickeybytes() -> size_t;
pub fn crypto_box_secretkeybytes() -> size_t;
pub fn crypto_box_beforenmbytes() -> size_t;
pub fn crypto_box_noncebytes() -> size_t;
pub fn crypto_box_zerobytes() -> size_t;
pub fn crypto_box_boxzerobytes() -> size_t;
pub fn crypto_box_macbytes() -> size_t;
pub fn crypto_box_primitive() -> *const c_char;
pub fn crypto_box_seed_keypair(
pk: *mut [u8; crypto_box_PUBLICKEYBYTES],
sk: *mut [u8; crypto_box_SECRETKEYBYTES],
seed: *const [u8; crypto_box_SEEDBYTES])
-> c_int;
pub fn crypto_box_keypair(
pk: *mut [u8; crypto_box_PUBLICKEYBYTES],
sk: *mut [u8; crypto_box_SECRETKEYBYTES])
-> c_int;
pub fn crypto_box_beforenm(
k: *mut [u8; crypto_box_BEFORENMBYTES],
pk: *const [u8; crypto_box_PUBLICKEYBYTES],
sk: *const [u8; crypto_box_SECRETKEYBYTES])
-> c_int;
pub fn crypto_box_afternm(
c: *mut u8,
m: *const u8,
mlen: c_ulonglong,
n: *const [u8; crypto_box_NONCEBYTES],
k: *const [u8; crypto_box_BEFORENMBYTES])
-> c_int;
pub fn crypto_box_open_afternm(
m: *mut u8,
c: *const u8,
clen: c_ulonglong,
n: *const [u8; crypto_box_NONCEBYTES],
k: *const [u8; crypto_box_BEFORENMBYTES])
-> c_int;
pub fn crypto_box(
c: *mut u8,
m: *const u8,
mlen: c_ulonglong,
n: *const [u8; crypto_box_NONCEBYTES],
pk: *const [u8; crypto_box_PUBLICKEYBYTES],
sk: *const [u8; crypto_box_SECRETKEYBYTES])
-> c_int;
pub fn crypto_box_open(
m: *mut u8,
c: *const u8,
clen: c_ulonglong,
n: *const [u8; crypto_box_NONCEBYTES],
pk: *const [u8; crypto_box_PUBLICKEYBYTES],
sk: *const [u8; crypto_box_SECRETKEYBYTES])
-> c_int;
pub fn crypto_box_easy(
c: *mut u8,
m: *const u8,
mlen: c_ulonglong,
n: *const [u8; crypto_box_NONCEBYTES],
pk: *const [u8; crypto_box_PUBLICKEYBYTES],
sk: *const [u8; crypto_box_SECRETKEYBYTES])
-> c_int;
pub fn crypto_box_open_easy(
m: *mut u8,
c: *const u8,
clen: c_ulonglong,
n: *const [u8; crypto_box_NONCEBYTES],
pk: *const [u8; crypto_box_PUBLICKEYBYTES],
sk: *const [u8; crypto_box_SECRETKEYBYTES])
-> c_int;
pub fn crypto_box_detached(
c: *mut u8,
mac: *mut [u8; crypto_box_MACBYTES],
m: *const u8,
mlen: c_ulonglong,
n: *const [u8; crypto_box_NONCEBYTES],
pk: *const [u8; crypto_box_PUBLICKEYBYTES],
sk: *const [u8; crypto_box_SECRETKEYBYTES])
-> c_int;
pub fn crypto_box_open_detached(
m: *mut u8,
c: *const u8,
mac: *const [u8; crypto_box_MACBYTES],
clen: c_ulonglong,
n: *const [u8; crypto_box_NONCEBYTES],
pk: *const [u8; crypto_box_PUBLICKEYBYTES],
sk: *const [u8; crypto_box_SECRETKEYBYTES])
-> c_int;
}
#[test]
fn test_crypto_box_seedbytes() {
assert!(unsafe {
crypto_box_seedbytes() as usize
} == crypto_box_SEEDBYTES)
}
#[test]
fn test_crypto_box_publickeybytes() {
assert!(unsafe {
crypto_box_publickeybytes() as usize
} == crypto_box_PUBLICKEYBYTES)
}
#[test]
fn test_crypto_box_secretkeybytes() {
assert!(unsafe {
crypto_box_secretkeybytes() as usize
} == crypto_box_SECRETKEYBYTES)
}
#[test]
fn test_crypto_box_beforenmbytes() {
assert!(unsafe {
crypto_box_beforenmbytes() as usize
} == crypto_box_BEFORENMBYTES)
}
#[test]
fn test_crypto_box_noncebytes() {
assert!(unsafe {
crypto_box_noncebytes() as usize
} == crypto_box_NONCEBYTES)
}
#[test]
fn test_crypto_box_zerobytes() {
assert!(unsafe {
crypto_box_zerobytes() as usize
} == crypto_box_ZEROBYTES)
}
#[test]
fn test_crypto_box_boxzerobytes() {
assert!(unsafe {
crypto_box_boxzerobytes() as usize
} == crypto_box_BOXZEROBYTES)
}
#[test]
fn test_crypto_box_macbytes() {
assert!(unsafe {
crypto_box_macbytes() as usize
} == crypto_box_MACBYTES)
}<|fim▁hole|> unsafe {
let s = crypto_box_primitive();
let s = std::ffi::CStr::from_ptr(s).to_bytes();
assert!(s == crypto_box_PRIMITIVE.as_bytes());
}
}<|fim▁end|> | #[test]
fn test_crypto_box_primitive() { |
<|file_name|>needless_range_loop2.rs<|end_file_name|><|fim▁begin|>#![warn(clippy::needless_range_loop)]
fn calc_idx(i: usize) -> usize {
(i + i + 20) % 4
}
fn main() {
let ns = vec![2, 3, 5, 7];
for i in 3..10 {
println!("{}", ns[i]);
}
for i in 3..10 {
println!("{}", ns[i % 4]);
}
for i in 3..10 {
println!("{}", ns[i % ns.len()]);
}
for i in 3..10 {
println!("{}", ns[calc_idx(i)]);
}
for i in 3..10 {
println!("{}", ns[calc_idx(i) % 4]);
}
let mut ms = vec![1, 2, 3, 4, 5, 6];
for i in 0..ms.len() {
ms[i] *= 2;
}
assert_eq!(ms, vec![2, 4, 6, 8, 10, 12]);
let mut ms = vec![1, 2, 3, 4, 5, 6];
for i in 0..ms.len() {
let x = &mut ms[i];
*x *= 2;
}
assert_eq!(ms, vec![2, 4, 6, 8, 10, 12]);
<|fim▁hole|> println!("{}", g[i] + x);
}
assert_eq!(g, vec![20, 18, 15, 11, 6, 0]);
let mut g = vec![1, 2, 3, 4, 5, 6];
let glen = g.len();
for i in 0..glen {
g[i] = g[i + 1..].iter().sum();
}
assert_eq!(g, vec![20, 18, 15, 11, 6, 0]);
let x = 5;
let mut vec = vec![0; 9];
for i in x..x + 4 {
vec[i] += 1;
}
let x = 5;
let mut vec = vec![0; 10];
for i in x..=x + 4 {
vec[i] += 1;
}
let arr = [1, 2, 3];
for i in 0..3 {
println!("{}", arr[i]);
}
for i in 0..2 {
println!("{}", arr[i]);
}
for i in 1..3 {
println!("{}", arr[i]);
}
// Fix #5945
let mut vec = vec![1, 2, 3, 4];
for i in 0..vec.len() - 1 {
vec[i] += 1;
}
let mut vec = vec![1, 2, 3, 4];
for i in vec.len() - 3..vec.len() {
vec[i] += 1;
}
let mut vec = vec![1, 2, 3, 4];
for i in vec.len() - 3..vec.len() - 1 {
vec[i] += 1;
}
}
mod issue2277 {
pub fn example(list: &[[f64; 3]]) {
let mut x: [f64; 3] = [10.; 3];
for i in 0..3 {
x[i] = list.iter().map(|item| item[i]).sum::<f64>();
}
}
}<|fim▁end|> | let g = vec![1, 2, 3, 4, 5, 6];
let glen = g.len();
for i in 0..glen {
let x: u32 = g[i + 1..].iter().sum(); |
<|file_name|>test_enum.rs<|end_file_name|><|fim▁begin|>use bindgen::BindgenOptions;
use support::assert_bind_eq;
fn default_without_rust_enums() -> BindgenOptions {
BindgenOptions { rust_enums: false, .. Default::default() }
}
#[test]
fn with_simple_enum() {
assert_bind_eq(Default::default(), "headers/enum.h", "
#[derive(Copy, Clone)]
#[repr(u32)]
#[derive(Debug)]
pub enum Enum_Foo { Bar = 0, Qux = 1, }
#[derive(Copy, Clone)]
#[repr(i32)]
#[derive(Debug)]
pub enum Enum_Neg { MinusOne = -1, One = 1, }
");
assert_bind_eq(default_without_rust_enums(), "headers/enum.h", "
type Enum_Foo = u32;
const Bar: Enum_Foo = 0;
const Qux: Enum_Foo = 1;
type Enum_Neg = i32;
const MinusOne: Enum_Neg = -1;
const One: Enum_Neg = 1;
");
}
#[test]
fn with_packed_enums() {
assert_bind_eq(Default::default(), "headers/enum_packed.h", "
#[derive(Copy, Clone)]
#[repr(u8)]
#[derive(Debug)]
pub enum Enum_Foo { Bar = 0, Qux = 1, }
#[derive(Copy, Clone)]
#[repr(i8)]
#[derive(Debug)]
pub enum Enum_Neg { MinusOne = -1, One = 1, }
#[derive(Copy, Clone)]
#[repr(u16)]
#[derive(Debug)]
pub enum Enum_Bigger { Much = 255, Larger = 256, }
");
assert_bind_eq(default_without_rust_enums(), "headers/enum_packed.h", "
type Enum_Foo = u8;
const Bar: Enum_Foo = 0;
const Qux: Enum_Foo = 1;
type Enum_Neg = i8;
const MinusOne: Enum_Neg = -1;
const One: Enum_Neg = 1;
type Enum_Bigger = u16;
const Much: Enum_Bigger = 255;
const Larger: Enum_Bigger = 256;
");
}
#[test]
fn with_duplicate_enum_value() {
assert_bind_eq(Default::default(), "headers/enum_dupe.h", "
pub const Dupe: Enum_Foo = Enum_Foo::Bar;
#[derive(Copy, Clone)]
#[repr(u32)]
#[derive(Debug)]
pub enum Enum_Foo { Bar = 1, }
");
assert_bind_eq(default_without_rust_enums(), "headers/enum_dupe.h", "
type Enum_Foo = u32;
const Bar: Enum_Foo = 1;
const Dupe: Enum_Foo = 1;
");
}
#[test]
fn with_explicitly_typed_cxx_enum() {
assert_bind_eq(Default::default(), "headers/enum_explicit_type.hpp", "
#[derive(Copy, Clone)]
#[repr(u8)]
#[derive(Debug)]
pub enum Enum_Foo { Bar = 0, Qux = 1, }
#[derive(Copy, Clone)]
#[repr(i8)]
#[derive(Debug)]
pub enum Enum_Neg { MinusOne = -1, One = 1, }
#[derive(Copy, Clone)]
#[repr(u16)]
#[derive(Debug)]
pub enum Enum_Bigger { Much = 255, Larger = 256, }
#[derive(Copy, Clone)]
#[repr(i64)]
#[derive(Debug)]
pub enum Enum_MuchLong { MuchLow = -4294967296, }
#[derive(Copy, Clone)]
#[repr(u64)]
#[derive(Debug)]
pub enum Enum_MuchLongLong { MuchHigh = 4294967296, }
");
assert_bind_eq(default_without_rust_enums(), "headers/enum_explicit_type.hpp", "
type Enum_Foo = u8;
const Bar: Enum_Foo = 0;
const Qux: Enum_Foo = 1;
type Enum_Neg = i8;
const MinusOne: Enum_Neg = -1;
const One: Enum_Neg = 1;
type Enum_Bigger = u16;
const Much: Enum_Bigger = 255;
const Larger: Enum_Bigger = 256;
type Enum_MuchLong = i64;
const MuchLow: Enum_MuchLong = -4294967296;
type Enum_MuchLongLong = u64;
const MuchHigh: Enum_MuchLongLong = 4294967296;
");
}
#[test]
fn with_overflowed_enum_value() {
assert_bind_eq(Default::default(), "headers/overflowed_enum.hpp", "
#[derive(Copy, Clone)]
#[repr(u32)]
#[derive(Debug)]
pub enum Enum_Foo {
BAP_ARM = 9698489,
BAP_X86 = 11960045,
BAP_X86_64 = 3128633167,
}
#[derive(Copy, Clone)]
#[repr(u16)]
#[derive(Debug)]
pub enum Enum_Bar { One = 1, Big = 2, }
");
assert_bind_eq(default_without_rust_enums(), "headers/overflowed_enum.hpp", "
type Enum_Foo = u32;<|fim▁hole|> type Enum_Bar = u16;
const One: Enum_Bar = 1;
const Big: Enum_Bar = 2;
");
}<|fim▁end|> | const BAP_ARM: Enum_Foo = 9698489;
const BAP_X86: Enum_Foo = 11960045;
const BAP_X86_64: Enum_Foo = 3128633167; |
<|file_name|>Example_Dudko_Fit.py<|end_file_name|><|fim▁begin|># force floating point division. Can still use integer with //
from __future__ import division
# This file is used for importing the common utilities classes.
import numpy as np
import matplotlib.pyplot as plt
import sys
sys.path.append("../../../../../")
from EnergyLandscapes.Lifetime_Dudko2008.Python.TestExamples.Util import \
Example_Data
def PlotFit(data,BaseName):
fig = Example_Data.PlotHistograms(data)
fig.savefig(BaseName + "_Histogram.png")
fig = Example_Data.PlotLifetimesAndFit(data)
fig.savefig(BaseName + "_Lifetimes.png")
def run():
"""
"""
# figure 1 from dudko 2008
data = Example_Data.Dudko2008Fig1_Probabilities()
PlotFit(data,"../Out/Dudko2008_Fig1")
# figure 2 frm dudko 2008
data = Example_Data.Dudko2008Fig2_Probabilities()
PlotFit(data,"../Out/Dudko2008_Fig2")
<|fim▁hole|>
if __name__ == "__main__":
run()<|fim▁end|> | |
<|file_name|>map3d_population3.js<|end_file_name|><|fim▁begin|>$.ajax({
url: './data/population.json',
success: function (data) {
var max = -Infinity;
data = data.map(function (item) {
max = Math.max(item[2], max);
return {
geoCoord: item.slice(0, 2),
value: item[2]
}
});
data.forEach(function (item) {
item.barHeight = item.value / max * 50 + 0.1<|fim▁hole|> text: 'Gridded Population of the World (2000)',
subtext: 'Data from Socioeconomic Data and Applications Center',
sublink : 'http://sedac.ciesin.columbia.edu/data/set/gpw-v3-population-density/data-download#close',
x:'center',
y:'top',
textStyle: {
color: 'white'
}
},
tooltip: {
formatter: '{b}'
},
dataRange: {
min: 0,
max: max,
text:['High','Low'],
realtime: false,
calculable : true,
color: ['red','yellow','lightskyblue']
},
series: [{
type: 'map3d',
mapType: 'world',
baseLayer: {
backgroundColor: 'rgba(0, 150, 200, 0.5)'
},
data: [{}],
itemStyle: {
normal: {
areaStyle: {
color: 'rgba(0, 150, 200, 0.8)'
},
borderColor: '#777'
}
},
markBar: {
barSize: 0.6,
data: data
},
autoRotate: true,
}]
});
}
});<|fim▁end|> | });
myChart.setOption({
title : { |
<|file_name|>types.ts<|end_file_name|><|fim▁begin|>import Emitter, { Handler } from "../emmett";
const emitterNames = new Map();
const handler1: Handler = function(event) {
console.log("Received event:", event.type);
console.log(" - Data:", event.data);
console.log(" - Target:", emitterNames.get(event.target));
console.log("");
};
const handler2: Handler = function(event) {
console.log("(handler2 here)");
handler1(event);
};
const handler3: Handler = function(e) {
console.log("(handler3 here)");
};
const symbolE = Symbol("eventE");
const symbolF = Symbol("eventF");
// #on
const emitter1 = new Emitter();
emitterNames.set(emitter1, "Emitter 1");
emitter1.on("eventA", handler1);
emitter1.on(["eventB", "eventC"], handler1);
emitter1.on(/eventD/, handler2);
emitter1.on(symbolE, handler1);
emitter1.on([symbolF], handler2);
emitter1.on(handler3);
// #emit
console.log("Everybody should emit twice:");
console.log("");
[..."ABCD"].forEach(c => emitter1.emit("event" + c, { payload1: c }));
emitter1.emit([symbolE, symbolF], { payload: "common payload" });
emitter1.emit({
...[..."ABCD"].reduce(
(events, c) => ({
...events,
["event" + c]: { payload2: c }
}),
{}
),
[symbolE]: { payload2: symbolE },
[symbolF]: { payload2: symbolF }
});
// #off
emitter1.off("eventA");
emitter1.off("eventB", handler1);
emitter1.off(["eventC"], handler1);
emitter1.off({ eventD: handler1 });
emitter1.off(symbolE);
emitter1.off(handler2).off(handler3);
console.log("Nobody should emit:");
console.log("");
[..."ABCD"].forEach(c => emitter1.emit("event" + c, { payload1: c }));
emitter1.emit([symbolE, symbolF], { payload: "common payload" });
// #listeners
emitter1.listeners(symbolE);
// #unbindAll
emitter1.unbindAll();
// #once
const emitter2 = new Emitter();
emitterNames.set(emitter2, "Emitter 2");
console.log("Everybody should emit once:");<|fim▁hole|>emitter2.once(symbolE, handler1);
emitter2.once([symbolF], handler2);
emitter2.once(handler3);
[..."AABBCCDD"].forEach(c => emitter2.emit("event" + c, { payload: c }));
emitter2.emit([symbolE, symbolE, symbolF, symbolF], {
payload: "common payload"
});
// other methods and properties
let emitter3: Emitter = new Emitter();
emitter3 = emitter3.disable();
emitter3 = emitter3.enable();
const value: void = emitter3.kill();
const version: string = Emitter.version;<|fim▁end|> | console.log("");
emitter2.once("eventA", handler1);
emitter2.once(["eventB", "eventC"], handler1);
emitter2.once(/eventD/, handler2); |
<|file_name|>devConfigTemplate.py<|end_file_name|><|fim▁begin|>'''
Created on Sep 22, 2016
@author: rtorres
'''
import os
from flaskiwsapp.settings.baseConfig import BaseConfig
<|fim▁hole|> DEBUG_TB_ENABLED = True
SQLALCHEMY_DATABASE_URI = 'postgresql://localhost/example'
AUTH0_CALLBACK_URL = 'http://localhost/auth/callback'
AUTH0_CLIENT_ID = ''
AUTH0_CLIENT_SECRET = ''
AUTH0_DOMAIN = ''
APP_DOMAIN = 'localhost'
APP_URL = 'http://%s' % APP_DOMAIN
SERVER_NAME = 'locahost'<|fim▁end|> | class DevConfig(BaseConfig):
"""Development configuration"""
ENV = 'dev'
DEBUG = True |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>// Copyright 2015 Virgil Dupras
//
// This software is licensed under the "GPLv3" License as described in the "LICENSE" file,
// which should be included with this package. The terms are also available at
// http://www.gnu.org/licenses/gpl-3.0.html
//
use std::path::Path;
use civng::game::Game;
use civng::unit::{Unit, UnitType, Player};
use civng::hexpos::{Pos, OffsetPos};
extern crate rustty;
extern crate civng;
fn main() {
let mut game = Game::new(Path::new("resources/pangea-duel.Civ5Map"));
let unitpos = game.map().first_passable(Pos::origin());
let _ = game.add_unit(Unit::new(UnitType::Melee, Player::Me, unitpos));
let unitpos = game.map().first_passable(Pos::origin());
let _ = game.add_unit(Unit::new(UnitType::Ranged, Player::Me, unitpos));
let unitpos = game.map().first_passable(OffsetPos::new(4, 3).to_pos());
let _ = game.add_unit(Unit::new(UnitType::Melee, Player::NotMe, unitpos));
let unitpos = game.map().first_passable(OffsetPos::new(4, 3).to_pos());
let _ = game.add_unit(Unit::new(UnitType::Melee, Player::NotMe, unitpos));<|fim▁hole|> game.draw();
if !game.handle_events() {
break;
}
}
}<|fim▁end|> | game.new_turn();
loop { |
<|file_name|>ancestorscombinators.rs<|end_file_name|><|fim▁begin|>/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
/// Union and intersection can be made more efficient if the streams are uninterrupted streams of
/// ancestors. For example:
///
/// A-o o-B
/// \ /
/// o - C
/// |
/// o
/// |
/// ...
///
/// UnionNodeStream(A, B) would poll both streams until they are exhausted. That means that node C
/// and all of its ancestors would be generated twice. This is not necessary.
/// For IntersectNodeStream(A, B) the problem is even more acute. The stream will return just one
/// entry, however it will generate all ancestors of A and B twice, and there can be lots of them!
///
/// The stream below aims to solve the aforementioned problems. It's primary usage is in
/// Mercurial pull to find commits that need to be sent to a client.
use std::collections::hash_set::IntoIter;
use std::collections::{BTreeMap, HashSet};
use std::iter;
use std::sync::Arc;
use anyhow::Error;
use cloned::cloned;
use futures_ext::{BoxFuture, BoxStream, FutureExt as FBFutureExt, SelectAll, StreamExt};
use futures_old::future::{ok, Future};
use futures_old::stream::{self, iter_ok, Stream};
use futures_old::{try_ready, Async, IntoFuture, Poll};
use futures_util::future::{FutureExt, TryFutureExt};
use maplit::hashset;
use changeset_fetcher::{ArcChangesetFetcher, ChangesetFetcher};
use context::CoreContext;
use mononoke_types::ChangesetId;
use mononoke_types::Generation;
use reachabilityindex::{LeastCommonAncestorsHint, NodeFrontier};
use crate::errors::*;
use crate::setcommon::*;
use crate::BonsaiNodeStream;
use crate::UniqueHeap;
/// As the name suggests, it's a difference of unions of ancestors of nodes.
/// In mercurial revset's terms it's (::A) - (::B), where A and B are sets of nodes.
/// In Mononoke revset's terms it's equivalent to
///
/// ```ignore
/// let include: Vec<HgNodeHash> = vec![ ... ];
/// let exclude: Vec<HgNodeHash> = vec![ ... ];
/// ...
/// let mut include_ancestors = vec![];
/// for i in include.clone() {
/// include_ancestors.push(
/// AncestorsNodeStream::new(&repo, repo_generation.clone(), i).boxify()
/// );
/// }
///
/// let mut exclude_ancestors = vec![];
/// for i in exclude.clone() {
/// exclude_ancestors.push(
/// AncestorsNodeStream::new(&repo, repo_generation.clone(), i).boxify()
/// );
/// }
///
/// let include_ancestors = UnionNodeStream::new(
/// &repo, repo_generation.clone(), include_ancestors
/// ).boxify();
/// let exclude_ancestors = UnionNodeStream::new(
/// &repo, repo_generation.clone(), exclude_ancestors
/// ).boxify();
/// let expected =
/// SetDifferenceNodeStream::new(
/// &repo, repo_generation.clone(), include_ancestors, exclude_ancestors
/// );
/// ```
///
pub struct DifferenceOfUnionsOfAncestorsNodeStream {
ctx: CoreContext,
changeset_fetcher: ArcChangesetFetcher,
// Given a set "nodes", and a maximum generation "gen",
// return a set of nodes "C" which satisfies:
// - Max generation number in "C" is <= gen
// - Any ancestor of "nodes" with generation <= gen is also an ancestor of "C"
// It's used to move `exclude` NodeFrontier
lca_hint_index: Arc<dyn LeastCommonAncestorsHint>,
// Nodes that we know about, grouped by generation.
next_generation: BTreeMap<Generation, HashSet<ChangesetId>>,
// The generation of the nodes in `drain`. All nodes with bigger generation has already been
// returned
current_generation: Generation,
// Parents of entries from `drain`. We fetch generation number for them.
pending_changesets: SelectAll<BoxStream<(ChangesetId, Generation), Error>>,
// Stream of (Hashset, Generation) that needs to be excluded
exclude_ancestors_future: BoxFuture<NodeFrontier, Error>,
current_exclude_generation: Option<Generation>,
// Nodes which generation is equal to `current_generation`. They will be returned from the
// stream unless excluded.
drain: iter::Peekable<IntoIter<ChangesetId>>,
// max heap of all relevant unique generation numbers for include nodes
sorted_unique_generations: UniqueHeap<Generation>,
}
fn make_pending(
ctx: CoreContext,
changeset_fetcher: ArcChangesetFetcher,
hash: ChangesetId,
) -> BoxStream<(ChangesetId, Generation), Error> {
let new_repo_changesets = changeset_fetcher.clone();
let new_repo_gennums = changeset_fetcher.clone();
Ok::<_, Error>(hash)
.into_future()
.and_then({
cloned!(ctx);
move |hash| {
async move { new_repo_changesets.get_parents(ctx, hash).await }
.boxed()
.compat()
.map(|parents| parents.into_iter())
.map_err(|err| err.context(ErrorKind::ParentsFetchFailed).into())
}
})
.map(|parents| iter_ok::<_, Error>(parents))
.flatten_stream()
.and_then(move |node_hash| {
cloned!(ctx, new_repo_gennums);
async move { new_repo_gennums.get_generation_number(ctx, node_hash).await }
.boxed()
.compat()
.map(move |gen_id| (node_hash, gen_id))
.map_err(|err| err.context(ErrorKind::GenerationFetchFailed))
})
.boxify()
}
impl DifferenceOfUnionsOfAncestorsNodeStream {
pub fn new(
ctx: CoreContext,
changeset_fetcher: &ArcChangesetFetcher,
lca_hint_index: Arc<dyn LeastCommonAncestorsHint>,
hash: ChangesetId,
) -> BonsaiNodeStream {
Self::new_with_excludes(ctx, changeset_fetcher, lca_hint_index, vec![hash], vec![])
}
pub fn new_union(
ctx: CoreContext,
changeset_fetcher: &ArcChangesetFetcher,
lca_hint_index: Arc<dyn LeastCommonAncestorsHint>,
hashes: Vec<ChangesetId>,
) -> BonsaiNodeStream {
Self::new_with_excludes(ctx, changeset_fetcher, lca_hint_index, hashes, vec![])
}
pub fn new_with_excludes(
ctx: CoreContext,
changeset_fetcher: &ArcChangesetFetcher,
lca_hint_index: Arc<dyn LeastCommonAncestorsHint>,
hashes: Vec<ChangesetId>,
excludes: Vec<ChangesetId>,
) -> BonsaiNodeStream {
let changeset_fetcher = changeset_fetcher.clone();
add_generations_by_bonsai(
ctx.clone(),
stream::iter_ok(hashes.into_iter()).boxify(),
changeset_fetcher.clone(),
)
.collect()
.join(
add_generations_by_bonsai(
ctx.clone(),
stream::iter_ok(excludes.into_iter()).boxify(),
changeset_fetcher.clone(),
)
.collect(),
)
.map(move |(hashes_generations, exclude_generations)| {
Self::new_with_excludes_gen_num(
ctx,
&changeset_fetcher,
lca_hint_index,
hashes_generations,
exclude_generations,
)
})
.map_err(|err| err.context(ErrorKind::GenerationFetchFailed))
.from_err()
.flatten_stream()
.boxify()
}
pub fn new_with_excludes_gen_num(
ctx: CoreContext,
changeset_fetcher: &ArcChangesetFetcher,
lca_hint_index: Arc<dyn LeastCommonAncestorsHint>,
hashes_generations: Vec<(ChangesetId, Generation)>,
exclude_generations: Vec<(ChangesetId, Generation)>,
) -> BonsaiNodeStream {
let mut next_generation = BTreeMap::new();
let current_exclude_generation = exclude_generations
.iter()
.map(|(_node, gen)| gen)
.max()
.cloned();
let mut sorted_unique_generations = UniqueHeap::new();
for (hash, generation) in hashes_generations {
next_generation
.entry(generation.clone())
.or_insert_with(HashSet::new)
.insert(hash);
// insert into our sorted list of generations
sorted_unique_generations.push(generation);
}
Self {
ctx,
changeset_fetcher: changeset_fetcher.clone(),
lca_hint_index,
next_generation,
// Start with a fake state - maximum generation number and no entries
// for it (see drain below)
current_generation: Generation::max_gen(),
pending_changesets: SelectAll::default(),
exclude_ancestors_future: ok(NodeFrontier::from_iter(exclude_generations)).boxify(),
current_exclude_generation,
drain: hashset! {}.into_iter().peekable(),
sorted_unique_generations,
}
.boxify()
}
// Poll if a particular node should be excluded from the output.
fn exclude_node(
&mut self,
node: ChangesetId,
current_generation: Generation,
) -> Poll<bool, Error> {
loop {
// Poll the exclude_ancestors frontier future
let curr_exclude_ancestors = try_ready!(self.exclude_ancestors_future.poll());
if curr_exclude_ancestors.is_empty() {
// No exclude nodes to worry about
self.exclude_ancestors_future = ok(curr_exclude_ancestors).boxify();
return Ok(Async::Ready(false));
}
if self.current_exclude_generation == None {
// Recompute the current exclude generation
self.current_exclude_generation = curr_exclude_ancestors.max_gen();
}
// Attempt to extract the max generation of the frontier
if let Some(exclude_gen) = self.current_exclude_generation {
{
if exclude_gen < current_generation {
self.exclude_ancestors_future = ok(curr_exclude_ancestors).boxify();
return Ok(Async::Ready(false));
} else if exclude_gen == current_generation {
let mut should_exclude: Option<bool> = None;
{
if let Some(ref nodes) = curr_exclude_ancestors.get(¤t_generation)
{
should_exclude = Some(nodes.contains(&node));
}
}
if let Some(should_exclude) = should_exclude {
self.exclude_ancestors_future = ok(curr_exclude_ancestors).boxify();
return Ok(Async::Ready(should_exclude));
}
}
}
// Current generation in `exclude_ancestors` is bigger
// than `current_generation`.
// We need to skip.
// Replace the exclude with a new future
// And indicate the current exclude gen needs to be recalculated.
self.current_exclude_generation = None;
cloned!(self.lca_hint_index, self.ctx, self.changeset_fetcher);
self.exclude_ancestors_future = async move {
lca_hint_index
.lca_hint(
&ctx,
&changeset_fetcher,
curr_exclude_ancestors,
current_generation,
)
.await
}
.boxed()
.compat()
.boxify();
} else {
// the max frontier is still "None".
// So there are no nodes in our exclude frontier.
self.exclude_ancestors_future = ok(curr_exclude_ancestors).boxify();
return Ok(Async::Ready(false));
}
}
}
fn update_generation(&mut self) {
let highest_generation = self
.sorted_unique_generations
.pop()
.expect("Expected a non empty heap of generations");
let new_generation = self
.next_generation
.remove(&highest_generation)
.expect("Highest generation doesn't exist");
self.current_generation = highest_generation;
self.drain = new_generation.into_iter().peekable();
}
}
impl Stream for DifferenceOfUnionsOfAncestorsNodeStream {
type Item = ChangesetId;
type Error = Error;
fn poll(&mut self) -> Poll<Option<Self::Item>, Self::Error> {
loop {
// Empty the drain if any - return all items for this generation
while self.drain.peek().is_some() {
let current_generation = self.current_generation;
let next_in_drain = *self.drain.peek().unwrap();
if try_ready!(self.exclude_node(next_in_drain, current_generation)) {
self.drain.next();
continue;
} else {
let next_in_drain = self.drain.next();
self.pending_changesets.push(make_pending(
self.ctx.clone(),
self.changeset_fetcher.clone(),
next_in_drain.unwrap(),
));
return Ok(Async::Ready(next_in_drain));
}
}
// Wait until we've drained pending_changesets - we can't continue until we
// know about all parents of the just-output generation
loop {
match self.pending_changesets.poll()? {
Async::Ready(Some((hash, generation))) => {
self.next_generation
.entry(generation)
.or_insert_with(HashSet::new)
.insert(hash);
// insert into our sorted list of generations
self.sorted_unique_generations.push(generation);
}
Async::NotReady => return Ok(Async::NotReady),
Async::Ready(None) => break,
};
}
if self.next_generation.is_empty() {
// All parents output - nothing more to send
return Ok(Async::Ready(None));
}
<|fim▁hole|>
#[cfg(test)]
mod test {
use super::*;
use crate::fixtures::linear;
use crate::fixtures::merge_uneven;
use crate::tests::TestChangesetFetcher;
use context::CoreContext;
use fbinit::FacebookInit;
use revset_test_helper::{assert_changesets_sequence, string_to_bonsai};
use skiplist::SkiplistIndex;
#[fbinit::test]
async fn empty_ancestors_combinators(fb: FacebookInit) {
let ctx = CoreContext::test_mock(fb);
let repo = linear::getrepo(fb).await;
let changeset_fetcher: ArcChangesetFetcher =
Arc::new(TestChangesetFetcher::new(repo.clone()));
let repo = Arc::new(repo);
let stream = DifferenceOfUnionsOfAncestorsNodeStream::new_union(
ctx.clone(),
&changeset_fetcher,
Arc::new(SkiplistIndex::new()),
vec![],
)
.boxify();
assert_changesets_sequence(ctx.clone(), &repo, vec![], stream).await;
let excludes =
vec![string_to_bonsai(fb, &repo, "0ed509bf086fadcb8a8a5384dc3b550729b0fc17").await];
let stream = DifferenceOfUnionsOfAncestorsNodeStream::new_with_excludes(
ctx.clone(),
&changeset_fetcher,
Arc::new(SkiplistIndex::new()),
vec![],
excludes,
)
.boxify();
assert_changesets_sequence(ctx.clone(), &repo, vec![], stream).await;
}
#[fbinit::test]
async fn linear_ancestors_with_excludes(fb: FacebookInit) {
let ctx = CoreContext::test_mock(fb);
let repo = linear::getrepo(fb).await;
let changeset_fetcher: ArcChangesetFetcher =
Arc::new(TestChangesetFetcher::new(repo.clone()));
let repo = Arc::new(repo);
let nodestream = DifferenceOfUnionsOfAncestorsNodeStream::new_with_excludes(
ctx.clone(),
&changeset_fetcher,
Arc::new(SkiplistIndex::new()),
vec![string_to_bonsai(fb, &repo, "a9473beb2eb03ddb1cccc3fbaeb8a4820f9cd157").await],
vec![string_to_bonsai(fb, &repo, "0ed509bf086fadcb8a8a5384dc3b550729b0fc17").await],
)
.boxify();
assert_changesets_sequence(
ctx.clone(),
&repo,
vec![string_to_bonsai(fb, &repo, "a9473beb2eb03ddb1cccc3fbaeb8a4820f9cd157").await],
nodestream,
)
.await;
}
#[fbinit::test]
async fn linear_ancestors_with_excludes_empty(fb: FacebookInit) {
let ctx = CoreContext::test_mock(fb);
let repo = linear::getrepo(fb).await;
let changeset_fetcher: ArcChangesetFetcher =
Arc::new(TestChangesetFetcher::new(repo.clone()));
let repo = Arc::new(repo);
let nodestream = DifferenceOfUnionsOfAncestorsNodeStream::new_with_excludes(
ctx.clone(),
&changeset_fetcher,
Arc::new(SkiplistIndex::new()),
vec![string_to_bonsai(fb, &repo, "0ed509bf086fadcb8a8a5384dc3b550729b0fc17").await],
vec![string_to_bonsai(fb, &repo, "0ed509bf086fadcb8a8a5384dc3b550729b0fc17").await],
)
.boxify();
assert_changesets_sequence(ctx.clone(), &repo, vec![], nodestream).await;
}
#[fbinit::test]
async fn ancestors_union(fb: FacebookInit) {
let ctx = CoreContext::test_mock(fb);
let repo = merge_uneven::getrepo(fb).await;
let changeset_fetcher: ArcChangesetFetcher =
Arc::new(TestChangesetFetcher::new(repo.clone()));
let repo = Arc::new(repo);
let nodestream = DifferenceOfUnionsOfAncestorsNodeStream::new_union(
ctx.clone(),
&changeset_fetcher,
Arc::new(SkiplistIndex::new()),
vec![
string_to_bonsai(fb, &repo, "fc2cef43395ff3a7b28159007f63d6529d2f41ca").await,
string_to_bonsai(fb, &repo, "16839021e338500b3cf7c9b871c8a07351697d68").await,
],
)
.boxify();
assert_changesets_sequence(
ctx.clone(),
&repo,
vec![
string_to_bonsai(fb, &repo, "fc2cef43395ff3a7b28159007f63d6529d2f41ca").await,
string_to_bonsai(fb, &repo, "bc7b4d0f858c19e2474b03e442b8495fd7aeef33").await,
string_to_bonsai(fb, &repo, "795b8133cf375f6d68d27c6c23db24cd5d0cd00f").await,
string_to_bonsai(fb, &repo, "4f7f3fd428bec1a48f9314414b063c706d9c1aed").await,
string_to_bonsai(fb, &repo, "16839021e338500b3cf7c9b871c8a07351697d68").await,
string_to_bonsai(fb, &repo, "1d8a907f7b4bf50c6a09c16361e2205047ecc5e5").await,
string_to_bonsai(fb, &repo, "b65231269f651cfe784fd1d97ef02a049a37b8a0").await,
string_to_bonsai(fb, &repo, "d7542c9db7f4c77dab4b315edd328edf1514952f").await,
string_to_bonsai(fb, &repo, "3cda5c78aa35f0f5b09780d971197b51cad4613a").await,
string_to_bonsai(fb, &repo, "15c40d0abc36d47fb51c8eaec51ac7aad31f669c").await,
],
nodestream,
)
.await;
}
#[fbinit::test]
async fn merge_ancestors_from_merge_excludes(fb: FacebookInit) {
let ctx = CoreContext::test_mock(fb);
let repo = merge_uneven::getrepo(fb).await;
let changeset_fetcher: ArcChangesetFetcher =
Arc::new(TestChangesetFetcher::new(repo.clone()));
let repo = Arc::new(repo);
let nodestream = DifferenceOfUnionsOfAncestorsNodeStream::new_with_excludes(
ctx.clone(),
&changeset_fetcher,
Arc::new(SkiplistIndex::new()),
vec![string_to_bonsai(fb, &repo, "d35b1875cdd1ed2c687e86f1604b9d7e989450cb").await],
vec![
string_to_bonsai(fb, &repo, "fc2cef43395ff3a7b28159007f63d6529d2f41ca").await,
string_to_bonsai(fb, &repo, "16839021e338500b3cf7c9b871c8a07351697d68").await,
],
)
.boxify();
assert_changesets_sequence(
ctx.clone(),
&repo,
vec![
string_to_bonsai(fb, &repo, "d35b1875cdd1ed2c687e86f1604b9d7e989450cb").await,
string_to_bonsai(fb, &repo, "264f01429683b3dd8042cb3979e8bf37007118bc").await,
string_to_bonsai(fb, &repo, "5d43888a3c972fe68c224f93d41b30e9f888df7c").await,
],
nodestream,
)
.await;
}
#[fbinit::test]
async fn merge_ancestors_from_merge_excludes_union(fb: FacebookInit) {
let ctx = CoreContext::test_mock(fb);
let repo = merge_uneven::getrepo(fb).await;
let changeset_fetcher: ArcChangesetFetcher =
Arc::new(TestChangesetFetcher::new(repo.clone()));
let repo = Arc::new(repo);
let nodestream = DifferenceOfUnionsOfAncestorsNodeStream::new_with_excludes(
ctx.clone(),
&changeset_fetcher,
Arc::new(SkiplistIndex::new()),
vec![string_to_bonsai(fb, &repo, "d35b1875cdd1ed2c687e86f1604b9d7e989450cb").await],
vec![string_to_bonsai(fb, &repo, "16839021e338500b3cf7c9b871c8a07351697d68").await],
)
.boxify();
assert_changesets_sequence(
ctx.clone(),
&repo,
vec![
string_to_bonsai(fb, &repo, "d35b1875cdd1ed2c687e86f1604b9d7e989450cb").await,
string_to_bonsai(fb, &repo, "264f01429683b3dd8042cb3979e8bf37007118bc").await,
string_to_bonsai(fb, &repo, "5d43888a3c972fe68c224f93d41b30e9f888df7c").await,
string_to_bonsai(fb, &repo, "fc2cef43395ff3a7b28159007f63d6529d2f41ca").await,
string_to_bonsai(fb, &repo, "bc7b4d0f858c19e2474b03e442b8495fd7aeef33").await,
string_to_bonsai(fb, &repo, "795b8133cf375f6d68d27c6c23db24cd5d0cd00f").await,
string_to_bonsai(fb, &repo, "4f7f3fd428bec1a48f9314414b063c706d9c1aed").await,
string_to_bonsai(fb, &repo, "b65231269f651cfe784fd1d97ef02a049a37b8a0").await,
string_to_bonsai(fb, &repo, "d7542c9db7f4c77dab4b315edd328edf1514952f").await,
],
nodestream,
)
.await;
}
}<|fim▁end|> | self.update_generation();
}
}
} |
<|file_name|>toolbar-state.state-node.ts<|end_file_name|><|fim▁begin|>import { Injectable } from '@angular/core';
import { StateNode } from 'microedge-rxstate/dist';
import { ListToolbarStateModel } from './toolbar-state.model';<|fim▁hole|>
@Injectable()
export class ListToolbarState extends StateNode<ListToolbarStateModel> {
constructor(initialState: ListToolbarStateModel, dispatcher: ListToolbarStateDispatcher) {
super(initialState, dispatcher);
this
.register('config', ListToolbarConfigOrchestrator)
.begin();
}
}<|fim▁end|> | import { ListToolbarStateDispatcher } from './toolbar-state.rxstate';
import { ListToolbarConfigOrchestrator } from './config/config.orchestrator'; |
<|file_name|>SplashActivity.java<|end_file_name|><|fim▁begin|>package com.xidian.yetwish.reading.ui;
import android.content.Context;
import android.content.Intent;
import android.os.Bundle;
import android.os.Handler;
import android.os.Message;
import com.xidian.yetwish.reading.R;
import com.xidian.yetwish.reading.framework.utils.SharedPreferencesUtils;
import com.xidian.yetwish.reading.ui.main.ReadingActivity;
/**
* splash activity
* Created by Yetwish on 2016/4/8 0008.
*/
public class SplashActivity extends BaseActivity {
private static final int MSG_SPLASH = 0x01;
private Handler mHandler = new Handler(){
@Override
public void handleMessage(Message msg) {
super.handleMessage(msg);
switch (msg.what){
case MSG_SPLASH:
ReadingActivity.startActivity(SplashActivity.this);
finish();
break;
}
}
};
public static void startActivity(Context context,boolean splash){
Intent intent = new Intent(context,SplashActivity.class);
intent.putExtra(SharedPreferencesUtils.EXTRA_SPLASH,splash);
context.startActivity(intent);
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_splash);
drawStatusBar();
boolean splash = getIntent().getBooleanExtra(SharedPreferencesUtils.EXTRA_SPLASH,true);
if(splash){
new Thread(new Runnable() {
@Override
public void run() {
try {
Thread.sleep(1000);
}catch (InterruptedException e){
//TODO catch exception<|fim▁hole|> }finally {
mHandler.sendEmptyMessage(MSG_SPLASH);
}
}
}).start();
}
}
}<|fim▁end|> | |
<|file_name|>faux-editor.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
# ansible-vault is a script that encrypts/decrypts YAML files. See
# https://docs.ansible.com/playbooks_vault.html for more details.
from __future__ import (absolute_import, division, print_function)<|fim▁hole|>import os
def main(args):
path = os.path.abspath(args[1])
fo = open(path, 'r+')
content = fo.readlines()
content.append('faux editor added at %s\n' % time.time())
fo.seek(0)
fo.write(''.join(content))
fo.close()
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv[:]))<|fim▁end|> | __metaclass__ = type
import sys
import time |
<|file_name|>medicine.py<|end_file_name|><|fim▁begin|>class Medicine(object):
def __init__(self, name):
self.name = name
self.prescriptions = []<|fim▁hole|><|fim▁end|> |
def add_prescription(self, prescription):
self.prescriptions.append(prescription) |
<|file_name|>rents.js<|end_file_name|><|fim▁begin|>'use strict';
angular.module('dashboardApp')
.controller('RentsCtrl', function ($scope, Rents, $location, $routeParams, Auth, rents) {
$scope.rents = rents;
$scope.returnBook = function(rent){
Rents.returnBook({'rentId': rent.rentId}, function(res){
console.log(res);
rent.status = res.status;
rent.rent.returnDate = res.rent.returnDate;
});
};
$scope.getStyle = function(rent){
if (rent.rent && (new Date(rent.rent.endDate) < new Date() && !rent.rent.returnDate)) {
return 'warning';
}
};
$scope.query = function(){
if (!$scope.mayQuery) {
return false;
}
var query = {page: $scope.page + 1};
query = angular.extend(query, $routeParams, {user: Auth.getUser().userId});
console.log(query);
Rents.query(query,
function(rents){
if (!rents.length) {
$scope.mayQuery = false;
}
$scope.rents = $scope.rents.concat(rents);
$scope.page += 1;
},
function(error){
});<|fim▁hole|> };
$scope.page = 1;
$scope.mayQuery = true;
});<|fim▁end|> | |
<|file_name|>zte_mf180.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright (C) 2011-2012 Vodafone España, S.A.
# Author: Andrew Bird
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
from wader.common.consts import WADER_CONNTYPE_USB
from core.hardware.zte import (ZTEWCDMADevicePlugin,
ZTEWCDMACustomizer,
ZTEWrapper)
class ZTEMF180Wrapper(ZTEWrapper):
def send_ussd(self, ussd):
"""Sends the ussd command ``ussd``"""
# XXX: assumes it's the same as 637U
# MF180 wants request in ascii chars even though current
# set might be ucs2
return super(ZTEMF180Wrapper, self).send_ussd(ussd, force_ascii=True)
class ZTEMF180Customizer(ZTEWCDMACustomizer):
wrapper_klass = ZTEMF180Wrapper
class ZTEMF180(ZTEWCDMADevicePlugin):
""":class:`~core.plugin.DevicePlugin` for ZTE's MF180"""
name = "ZTE MF180"
version = "0.1"
author = u"Andrew Bird"
custom = ZTEMF180Customizer()
__remote_name__ = "MF180"
__properties__ = {
'ID_VENDOR_ID': [0x19d2],
'ID_MODEL_ID': [0x2003],
}<|fim▁hole|>zte_mf180 = ZTEMF180()<|fim▁end|> |
conntype = WADER_CONNTYPE_USB
|
<|file_name|>SchemaTest.java<|end_file_name|><|fim▁begin|>/*
* #%L
* Alfresco Repository
* %%
* Copyright (C) 2005 - 2016 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
<|fim▁hole|> * along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.util.schemacomp.model;
import static org.junit.Assert.*;
import static org.mockito.Mockito.verify;
import org.alfresco.test_category.BaseSpringTestsCategory;
import org.alfresco.test_category.OwnJVMTestsCategory;
import org.alfresco.util.schemacomp.DbProperty;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
import org.mockito.junit.MockitoJUnitRunner;
/**
* Tests for the Schema class.
*
* @author Matt Ward
*/
@RunWith(MockitoJUnitRunner.class)
@Category(BaseSpringTestsCategory.class)
public class SchemaTest extends DbObjectTestBase<Schema>
{
private Schema left;
private Schema right;
@Before
public void setUp()
{
left = new Schema("left_schema");
right = new Schema("right_schema");
}
@Override
protected Schema getThisObject()
{
return left;
}
@Override
protected Schema getThatObject()
{
return right;
}
@Override
protected void doDiffTests()
{
// We need to be warned if comparing, for example a version 500 schema with a
// version 501 schema.
inOrder.verify(comparisonUtils).compareSimple(
new DbProperty(left, "version"),
new DbProperty(right, "version"),
ctx);
// In addition to the base class functionality, Schema.diff() compares
// the DbObjects held in the other schema with its own DbObjects.
inOrder.verify(comparisonUtils).compareCollections(left.objects, right.objects, ctx);
}
@Test
public void acceptVisitor()
{
DbObject dbo1 = Mockito.mock(DbObject.class);
left.add(dbo1);
DbObject dbo2 = Mockito.mock(DbObject.class);
left.add(dbo2);
DbObject dbo3 = Mockito.mock(DbObject.class);
left.add(dbo3);
left.accept(visitor);
verify(dbo1).accept(visitor);
verify(dbo2).accept(visitor);
verify(dbo3).accept(visitor);
verify(visitor).visit(left);
}
@Test
public void sameAs()
{
// We have to assume that two schemas are always the same, regardless of name,
// otherwise unless the reference schema has the same name as the target database
// all the comparisons will fail - and users can choose to install databases with any schema
// name they choose.
assertTrue("Schemas should be considered the same", left.sameAs(right));
// Things are always the same as themselves.
assertTrue("Schemas are the same physical object", left.sameAs(left));
assertFalse("A table is not the same as a schema", left.sameAs(new Table("left_schema")));
assertFalse("null is not the same as a schema", left.sameAs(null));
}
}<|fim▁end|> | *
* You should have received a copy of the GNU Lesser General Public License
|
<|file_name|>DependencyInjector.java<|end_file_name|><|fim▁begin|>/*******************************************************************************
* Copyright (c) 2014 Open Door Logistics (www.opendoorlogistics.com)
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the GNU Lesser Public License v3
* which accompanies this distribution, and is available at http://www.gnu.org/licenses/lgpl.txt
******************************************************************************/
package com.opendoorlogistics.core.scripts.execution.dependencyinjection;
import java.awt.Dimension;
import javax.swing.JPanel;
import com.opendoorlogistics.api.HasApi;
import com.opendoorlogistics.api.components.ComponentControlLauncherApi.ControlLauncherCallback;
import com.opendoorlogistics.api.components.ComponentExecutionApi.ClosedStatusObservable;
import com.opendoorlogistics.api.components.ComponentExecutionApi.ModalDialogResult;
import com.opendoorlogistics.api.components.ODLComponent;
import com.opendoorlogistics.api.components.ProcessingApi;
import com.opendoorlogistics.api.distances.DistancesConfiguration;
import com.opendoorlogistics.api.distances.ODLCostMatrix;
import com.opendoorlogistics.api.geometry.LatLong;
import com.opendoorlogistics.api.geometry.ODLGeom;
import com.opendoorlogistics.api.tables.ODLDatastore;
import com.opendoorlogistics.api.tables.ODLTable;
import com.opendoorlogistics.api.tables.ODLTableReadOnly;
import com.opendoorlogistics.core.tables.decorators.datastores.dependencies.DataDependencies;
public interface DependencyInjector extends ProcessingApi, HasApi {
String getBatchKey();
ModalDialogResult showModalPanel(JPanel panel,String title, ModalDialogResult ...buttons);
ModalDialogResult showModalPanel(JPanel panel,String title,Dimension minSize, ModalDialogResult ...buttons);
<T extends JPanel & ClosedStatusObservable> void showModalPanel(T panel, String title);
ODLCostMatrix calculateDistances(DistancesConfiguration request, ODLTableReadOnly... tables);<|fim▁hole|>}<|fim▁end|> | ODLGeom calculateRouteGeom(DistancesConfiguration request, LatLong from, LatLong to);
void addInstructionDependencies(String instructionId, DataDependencies dependencies);
void submitControlLauncher(String instructionId,ODLComponent component,ODLDatastore<? extends ODLTable> parametersTableCopy, String reportTopLabel,ControlLauncherCallback cb); |
<|file_name|>response.py<|end_file_name|><|fim▁begin|>import re
<|fim▁hole|> # There are three possible server completion responses
OK = "OK" # indicates success
NO = "NO" # indicates failure
BAD = "BAD" # indicates a protocol error
class ListResponse(object):
def __init__(self, list_response):
match = list_re.match(list_response)
self.attributes = match.group(1).split()
self.hierarchy_delimiter = match.group(2)
self.name = match.group(3)<|fim▁end|> | list_re = re.compile(r'\((.*)\) \"(.*)\" \"(.*)\"')
class Response(object): |
<|file_name|>mathml_store_rules.js<|end_file_name|><|fim▁begin|>// Copyright 2013 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* @fileoverview Speech rules for mathml and mathjax nodes.
* @author [email protected] (Volker Sorge)
*/
goog.provide('cvox.MathmlStoreRules');
goog.require('cvox.MathStore');
goog.require('cvox.MathmlStore');
goog.require('cvox.MathmlStoreUtil');
goog.require('cvox.StoreUtil');
/**
* Rule initialization.
* @constructor
*/
cvox.MathmlStoreRules = function() {
// Custom functions used in the rules.
cvox.MathmlStoreRules.initCustomFunctions_();
cvox.MathmlStoreRules.initDefaultRules_(); // MathML rules.
cvox.MathmlStoreRules.initMathjaxRules_(); // MathJax Rules
cvox.MathmlStoreRules.initAliases_(); // MathJax Aliases for MathML rules.
cvox.MathmlStoreRules.initSpecializationRules_(); // Square, cube, etc.
cvox.MathmlStoreRules.initSemanticRules_();
};
goog.addSingletonGetter(cvox.MathmlStoreRules);
/**
* @type {cvox.MathStore}
*/
cvox.MathmlStoreRules.mathStore = cvox.MathmlStore.getInstance();
/**
* @override
*/
cvox.MathmlStoreRules.mathStore.initialize = cvox.MathmlStoreRules.getInstance;
// These are used to work around Closure's rules for aliasing.
/** @private */
cvox.MathmlStoreRules.defineDefaultMathmlRule_ = goog.bind(
cvox.MathmlStoreRules.mathStore.defineDefaultMathmlRule,
cvox.MathmlStoreRules.mathStore);
/** @private */
cvox.MathmlStoreRules.defineRule_ = goog.bind(
cvox.MathmlStoreRules.mathStore.defineRule,
cvox.MathmlStoreRules.mathStore);
/** @private */
cvox.MathmlStoreRules.defineRuleAlias_ = goog.bind(
cvox.MathmlStoreRules.mathStore.defineRuleAlias,
cvox.MathmlStoreRules.mathStore);
/** @private */
cvox.MathmlStoreRules.addContextFunction_ = goog.bind(
cvox.MathmlStoreRules.mathStore.contextFunctions.add,
cvox.MathmlStoreRules.mathStore.contextFunctions);
/** @private */
cvox.MathmlStoreRules.addCustomQuery_ = goog.bind(
cvox.MathmlStoreRules.mathStore.customQueries.add,
cvox.MathmlStoreRules.mathStore.customQueries);
goog.scope(function() {
var defineDefaultMathmlRule = cvox.MathmlStoreRules.defineDefaultMathmlRule_;
var defineRule = cvox.MathmlStoreRules.defineRule_;
var defineRuleAlias = cvox.MathmlStoreRules.defineRuleAlias_;
var addCTXF = cvox.MathmlStoreRules.addContextFunction_;
var addCQF = cvox.MathmlStoreRules.addCustomQuery_;
/**
* Initialize the custom functions.
* @private
*/
cvox.MathmlStoreRules.initCustomFunctions_ = function() {
addCTXF('CTXFnodeCounter', cvox.StoreUtil.nodeCounter);
addCTXF('CTXFmfSeparators', cvox.MathmlStoreUtil.mfencedSeparators);
addCTXF('CTXFcontentIterator', cvox.MathmlStoreUtil.contentIterator);
addCQF('CQFextender', cvox.MathmlStoreUtil.retrieveMathjaxExtender);
addCQF('CQFmathmlmunder', cvox.MathmlStoreUtil.checkMathjaxMunder);
addCQF('CQFmathmlmover', cvox.MathmlStoreUtil.checkMathjaxMover);
addCQF('CQFmathmlmsub', cvox.MathmlStoreUtil.checkMathjaxMsub);
addCQF('CQFmathmlmsup', cvox.MathmlStoreUtil.checkMathjaxMsup);
addCQF('CQFlookupleaf', cvox.MathmlStoreUtil.retrieveMathjaxLeaf);
};
/**
* Initialize the default mathrules.
* @private
*/
cvox.MathmlStoreRules.initDefaultRules_ = function() {
// Initial rule
defineDefaultMathmlRule('math', '[m] ./*');
defineDefaultMathmlRule('semantics', '[n] ./*[1]');
// Space elements
defineDefaultMathmlRule('mspace', '[p] (pause:250)');
defineDefaultMathmlRule('mstyle', '[m] ./*');
defineDefaultMathmlRule('mpadded', '[m] ./*');
defineDefaultMathmlRule('merror', '[m] ./*');
defineDefaultMathmlRule('mphantom', '[m] ./*');
// Token elements.
defineDefaultMathmlRule('mtext', '[t] text(); [p] (pause:200)');
defineDefaultMathmlRule('mi', '[n] text()');
defineDefaultMathmlRule('mo', '[n] text() (rate:-0.1)');
defineDefaultMathmlRule('mn', '[n] text()');
// Dealing with fonts.
defineRule('mtext-variant', 'default.default',
'[t] "comece"; [t] @mathvariant (pause:150);' +
'[t] text() (pause:150); [t] "fim"; ' +
'[t] @mathvariant (pause:200)',
'self::mathml:mtext', '@mathvariant', '@mathvariant!="normal"');
defineRule('mi-variant', 'default.default',
'[t] @mathvariant; [n] text()',
'self::mathml:mi', '@mathvariant', '@mathvariant!="normal"');
defineRuleAlias('mi-variant', 'self::mathml:mn', // mn
'@mathvariant', '@mathvariant!="normal"');
defineRule('mo-variant', 'default.default',
'[t] @mathvariant; [n] text() (rate:-0.1)',
'self::mathml:mo', '@mathvariant', '@mathvariant!="normal"');
defineDefaultMathmlRule(
'ms',
'[t] "cadeia" (pitch:0.5, rate:0.5); [t] text()');
// Script elements.
defineDefaultMathmlRule(
'msup', '[n] ./*[1]; [t] "super";' +
'[n] ./*[2] (pitch:0.35); [p] (pause:300)');
defineDefaultMathmlRule(
'msubsup',
'[n] ./*[1]; [t] "sub"; [n] ./*[2] (pitch:-0.35); [p] (pause:200);' +
'[t] "super"; [n] ./*[3] (pitch:0.35); [p] (pause:300)'
);
defineDefaultMathmlRule(
'msub',
'[n] ./*[1]; [t] "sub"; [n] ./*[2] (pitch:-0.35); [p] (pause:300)');
defineDefaultMathmlRule(
'mover', '[n] ./*[2] (pitch:0.35); [p] (pause:200);' +
' [t] "mais"; [n] ./*[1]; [p] (pause:400)');
defineDefaultMathmlRule(
'munder',
'[n] ./*[2] (pitch:-0.35); [t] "sob"; [n] ./*[1]; [p] (pause:400)');
// defineDefaultMathmlRule(
// 'munderover',
// '[n] ./*[2] (pitch:-0.35); [t] "em e"; [n] ./*[3] (pitch:0.35);' +
// ' [t] "mais"; [n] ./*[1]; [p] (pause:400)');
defineDefaultMathmlRule(
'munderover',
'[t] "somatório de"; [n] ./*[2] (pitch:-0.35);' +
' [t] "até" ; [n] ./*[3] (pitch:0.35); [p] (pause:400)');
// Layout elements.
defineDefaultMathmlRule('mrow', '[m] ./*');
defineDefaultMathmlRule(
'msqrt', '[t] "Raiz quadrada de"; [m] ./* (rate:0.2); [p] (pause:400)');
defineDefaultMathmlRule(
'mroot', '[t] "raiz de"; [n] ./*[2]; [t] "de";' +
'[n] ./*[1] (rate:0.2); [p] (pause:400)');
defineDefaultMathmlRule(
'mfrac', ' [p] (pause:400); [n] ./*[1] (pitch:0.3);' +
' [t] "dividido por"; [n] ./*[2] (pitch:-0.3); [p] (pause:400)');
defineRule(
'mfenced-single', 'default.default',
'[t] @open (context:"opening"); [m] ./* (separator:@separators);' +
'[t] @close (context:"closing")',
'self::mathml:mfenced', 'string-length(string(@separators))=1');
defineRule(
'mfenced-empty', 'default.default',
'[t] @open (context:"opening"); [m] ./*;' +
'[t] @close (context:"closing")',
'self::mathml:mfenced', 'string-length(string(@separators))=1',
'string(@separators)=" "');
defineRule(
'mfenced-comma', 'default.default',
'[t] @open (context:"opening"); [m] ./* (separator:"comma");' +
'[t] @close (context:"closing")',
'self::mathml:mfenced');
defineRule(
'mfenced-multi', 'default.default',
'[t] @open (context:"opening"); [m] ./* (sepFunc:CTXFmfSeparators,' +
'separator:@separators); [t] @close (context:"closing")',
'self::mathml:mfenced', 'string-length(string(@separators))>1');
// Mtable rules.
defineRule(
'mtable', 'default.default',
'[t] "matriz"; [m] ./* (ctxtFunc:CTXFnodeCounter,' +
'context:"row",pause:100)',
'self::mathml:mtable');
defineRule(
'mtr', 'default.default',
'[m] ./* (ctxtFunc:CTXFnodeCounter,context:"column",pause:100)',
'self::mathml:mtr');
defineRule(
'mtd', 'default.default',
'[m] ./*', 'self::mathml:mtd');
// Mtable superbrief rules.
defineRule(
'mtable', 'default.superbrief',
'[t] count(child::mathml:mtr); [t] "por";' +
'[t] count(child::mathml:mtr[1]/mathml:mtd); [t] "matriz";',
'self::mathml:mtable');
// Mtable short rules.
defineRule(
'mtable', 'default.short',
'[t] "matriz"; [m] ./*',
'self::mathml:mtable');
defineRule(
'mtr', 'default.short',
'[m] ./*', 'self::mathml:mtr');
<|fim▁hole|> '[t] "Elemento"; [t] count(./preceding-sibling::mathml:mtd)+1;' +
'[t] count(./parent::mathml:mtr/preceding-sibling::mathml:mtr)+1;' +
'[p] (pause:500); [m] ./*',
'self::mathml:mtd');
// Mmultiscripts rules.
defineRule(
'mmultiscripts-4', 'default.default',
'[n] ./*[1]; [p] (pause:200);' +
'[t] "esquerda sub"; [n] ./*[5] (pitch:-0.35); [p] (pause:200);' +
'[t] "deixou super"; [n] ./*[6] (pitch:0.35); [p] (pause:200);' +
'[t] "direito sub"; [n] ./*[2] (pitch:-0.35); [p] (pause:200);' +
'[t] "direito super"; [n] ./*[3] (pitch:0.35); [p] (pause:300);',
'self::mathml:mmultiscripts');
defineRule(
'mmultiscripts-3-1', 'default.default',
'[n] ./*[1]; [p] (pause:200);' +
'[t] "esquerda sub"; [n] ./*[5] (pitch:-0.35); [p] (pause:200);' +
'[t] "deixou super"; [n] ./*[6] (pitch:0.35); [p] (pause:200);' +
'[t] "direito super"; [n] ./*[3] (pitch:0.35); [p] (pause:300);',
'self::mathml:mmultiscripts', './mathml:none=./*[2]',
'./mathml:mprescripts=./*[4]');
defineRule(
'mmultiscripts-3-2', 'default.default',
'[n] ./*[1]; [p] (pause:200);' +
'[t] "esquerda sub"; [n] ./*[5] (pitch:-0.35); [p] (pause:200);' +
'[t] "deixou super"; [n] ./*[6] (pitch:0.35); [p] (pause:200);' +
'[t] "direito sub"; [n] ./*[2] (pitch:-0.35); [p] (pause:200);',
'self::mathml:mmultiscripts', './mathml:none=./*[3]',
'./mathml:mprescripts=./*[4]');
defineRule(
'mmultiscripts-3-3', 'default.default',
'[n] ./*[1]; [p] (pause:200);' +
'[t] "deixou super"; [n] ./*[6] (pitch:0.35); [p] (pause:200);' +
'[t] "direito sub"; [n] ./*[2] (pitch:-0.35); [p] (pause:200);' +
'[t] "direito super"; [n] ./*[3] (pitch:0.35); [p] (pause:300);',
'self::mathml:mmultiscripts', './mathml:none=./*[5]',
'./mathml:mprescripts=./*[4]');
defineRule(
'mmultiscripts-3-4', 'default.default',
'[n] ./*[1]; [p] (pause:200);' +
'[t] "esquerda sub"; [n] ./*[5] (pitch:-0.35); [p] (pause:200);' +
'[t] "direito sub"; [n] ./*[2] (pitch:-0.35); [p] (pause:200);' +
'[t] "direito super"; [n] ./*[3] (pitch:0.35); [p] (pause:300);',
'self::mathml:mmultiscripts', './mathml:none=./*[6]',
'./mathml:mprescripts=./*[4]');
defineRule(
'mmultiscripts-2-1', 'default.default',
'[n] ./*[1]; [p] (pause:200);' +
'[t] "esquerda sub"; [n] ./*[5] (pitch:-0.35); [p] (pause:200);' +
'[t] "deixou super"; [n] ./*[6] (pitch:0.35); [p] (pause:300);',
'self::mathml:mmultiscripts', './mathml:none=./*[2]',
'./mathml:none=./*[3]', './mathml:mprescripts=./*[4]');
defineRule(
'mmultiscripts-1-1', 'default.default',
'[n] ./*[1]; [p] (pause:200);' +
'[t] "deixou super"; [n] ./*[6] (pitch:0.35); [p] (pause:300);',
'self::mathml:mmultiscripts', './mathml:none=./*[2]',
'./mathml:none=./*[3]', './mathml:mprescripts=./*[4]',
'./mathml:none=./*[5]');
defineRule(
'mmultiscripts-1-2', 'default.default',
'[n] ./*[1]; [p] (pause:200);' +
'[t] "esquerda sub"; [n] ./*[5] (pitch:-0.35); [p] (pause:200);',
'self::mathml:mmultiscripts', './mathml:none=./*[2]',
'./mathml:none=./*[3]', './mathml:mprescripts=./*[4]',
'./mathml:none=./*[6]');
};
/**
* Initialize mathJax Rules
* @private
*/
cvox.MathmlStoreRules.initMathjaxRules_ = function() {
// Initial rule
defineRule('mj-math', 'default.default',
'[n] ./*[1]/*[1]/*[1]', 'self::span[@class="math"]');
// Token Elements
defineRule(
'mj-leaf', 'default.default',
'[n] CQFlookupleaf', 'self::span[@class="mi"]');
defineRuleAlias('mj-leaf', 'self::span[@class="mo"]');
defineRuleAlias('mj-leaf', 'self::span[@class="mn"]');
defineRuleAlias('mj-leaf', 'self::span[@class="mtext"]');
defineRule(
'mj-mo-ext', 'default.default',
'[n] CQFextender', 'self::span[@class="mo"]',
'./*[1]/*[1]/text()', './*[1]/*[2]/text()');
defineRule(
'mj-texatom', 'default.default',
'[n] ./*[1]', 'self::span[@class="texatom"]');
// Script elements.
defineRule(
'mj-msubsup', 'default.default',
'[n] ./*[1]/*[1]/*[1]; [t] "sub"; [n] ./*[1]/*[3]/*[1] (pitch:-0.35);' +
'[p] (pause:200); [t] "super"; [n] ./*[1]/*[2]/*[1] (pitch:0.35);' +
'[p] (pause:300)',
'self::span[@class="msubsup"]');
defineRule(
'mj-msub', 'default.default',
'[n] ./*[1]/*[1]/*[1]; [t] "sub";' +
'[n] ./*[1]/*[2]/*[1] (pitch:-0.35); [p] (pause:300)',
'self::span[@class="msub"]');
defineRule(
'mj-msup', 'default.default',
'[n] ./*[1]/*[1]/*[1]; [t] "super";' +
'[n] ./*[1]/*[2]/*[1] (pitch:0.35); [p] (pause:300)',
'self::span[@class="msup"]');
defineRule(
'mj-munderover', 'default.default',
'[n] ./*[1]/*[2]/*[1] (pitch:0.35); [t] "em e";' +
'[n] ./*[1]/*[3]/*[1] (pitch:-0.35); [t] "mais";' +
'[n] ./*[1]/*[1]/*[1]; [p] (pause:400)',
'self::span[@class="munderover"]');
defineRule(
'mj-munder', 'default.default',
'[n] ./*[1]/*[2]/*[1] (pitch:0.35); [t] "sob";' +
'[n] ./*[1]/*[1]/*[1]; [p] (pause:400)',
'self::span[@class="munder"]');
defineRule(
'mj-mover', 'default.default',
'[n] ./*[1]/*[2]/*[1] (pitch:0.35); [t] "mais";' +
'[n] ./*[1]/*[1]/*[1]; [p] (pause:400)',
'self::span[@class="mover"]');
// Layout elements.
defineRule(
'mj-mfrac', 'default.default',
'[p] (pause:250); [n] ./*[1]/*[1]/*[1] (pitch:0.3); [p] (pause:250);' +
' [t] "dividido por"; [n] ./*[1]/*[2]/*[1] (pitch:-0.3);' +
'[p] (pause:400)',
'self::span[@class="mfrac"]');
defineRule(
'mj-msqrt', 'default.default',
'[t] "Raiz quadrada de";' +
'[n] ./*[1]/*[1]/*[1] (rate:0.2); [p] (pause:400)',
'self::span[@class="msqrt"]');
defineRule(
'mj-mroot', 'default.default',
'[t] "raiz de"; [n] ./*[1]/*[4]/*[1]; [t] "de";' +
'[n] ./*[1]/*[1]/*[1] (rate:0.2); [p] (pause:400)',
'self::span[@class="mroot"]');
defineRule(
'mj-mfenced', 'default.default',
'[t] "abertura"; [n] ./*[1]; ' +
'[m] ./*[position()>1 and position()<last()];' +
' [t] "fechar"; [n] ./*[last()]',
'self::span[@class="mfenced"]');
// Mtable short rules.
defineRuleAlias('mj-leaf', 'self::span[@class="mtable"]');
// Mmultiscripts rules.
defineRuleAlias('mj-leaf', 'self::span[@class="mmultiscripts"]');
};
/**
* Initialize mathJax Aliases
* @private
*/
cvox.MathmlStoreRules.initAliases_ = function() {
// Space elements
defineRuleAlias('mspace', 'self::span[@class="mspace"]');
defineRuleAlias('mstyle', 'self::span[@class="mstyle"]');
defineRuleAlias('mpadded', 'self::span[@class="mpadded"]');
defineRuleAlias('merror', 'self::span[@class="merror"]');
defineRuleAlias('mphantom', 'self::span[@class="mphantom"]');
// Token elements.
defineRuleAlias('ms', 'self::span[@class="ms"]');
// Layout elements.
defineRuleAlias('mrow', 'self::span[@class="mrow"]');
// The following rules fix bugs in MathJax's LaTeX translation.
defineRuleAlias(
'mj-msub', 'self::span[@class="msubsup"]', 'CQFmathmlmsub');
defineRuleAlias(
'mj-msup', 'self::span[@class="msubsup"]', 'CQFmathmlmsup');
defineRuleAlias(
'mj-munder', 'self::span[@class="munderover"]', 'CQFmathmlmunder');
defineRuleAlias(
'mj-mover', 'self::span[@class="munderover"]', 'CQFmathmlmover');
};
/**
* Initialize specializations wrt. content of nodes.
* @private
*/
cvox.MathmlStoreRules.initSpecializationRules_ = function() {
// Some special nodes for square and cube.
// MathML
defineRule(
'square', 'default.default',
'[n] ./*[1]; [t] "quadrado" (pitch:0.35); [p] (pause:300)',
'self::mathml:msup', './*[2][text()=2]');
defineRuleAlias(
'square', 'self::mathml:msup',
'./mathml:mrow=./*[2]', 'count(./*[2]/*)=1', './*[2]/*[1][text()=2]');
defineRule(
'cube', 'default.default',
'[n] ./*[1]; [t] "cubo" (pitch:0.35); [p] (pause:300)',
'self::mathml:msup', './*[2][text()=3]');
defineRuleAlias(
'cube', 'self::mathml:msup',
'./mathml:mrow=./*[2]', 'count(./*[2]/*)=1', './*[2]/*[1][text()=3]');
defineRule(
'square-sub', 'default.default',
'[n] ./*[1]; [t] "sub"; [n] ./*[2] (pitch:-0.35);' +
'[p] (pause:300); [t] "quadrado" (pitch:0.35); [p] (pause:400)',
'self::mathml:msubsup', './*[3][text()=2]');
defineRuleAlias(
'square-sub', 'self::mathml:msubsup',
'./mathml:mrow=./*[3]', 'count(./*[3]/*)=1', './*[3]/*[1][text()=2]');
defineRule(
'cube-sub', 'default.default',
'[n] ./*[1]; [t] "sub"; [n] ./*[2] (pitch:-0.35);' +
'[p] (pause:300); [t] "cubo" (pitch:0.35); [p] (pause:400)',
'self::mathml:msubsup', './*[3][text()=3]');
defineRuleAlias(
'cube-sub', 'self::mathml:msubsup',
'./mathml:mrow=./*[3]', 'count(./*[3]/*)=1', './*[3]/*[1][text()=3]');
// MathJax
defineRule(
'mj-square', 'default.default',
'[n] ./*[1]/*[1]/*[1]; [t] "quadrado" (pitch:0.35); [p] (pause:300)',
'self::span[@class="msup"]', './*[1]/*[2]/*[1][text()=2]');
defineRuleAlias(
'mj-square', 'self::span[@class="msup"]',
'./*[1]/*[2]/*[1]=./*[1]/*[2]/span[@class="mrow"]',
'count(./*[1]/*[2]/*[1]/*)=1', './*[1]/*[2]/*[1]/*[1][text()=2]');
defineRuleAlias(
'mj-square', 'self::span[@class="msubsup"]', 'CQFmathmlmsup',
'./*[1]/*[2]/*[1][text()=2]');
defineRuleAlias(
'mj-square', 'self::span[@class="msubsup"]', 'CQFmathmlmsup',
'./*[1]/*[2]/*[1]=./*[1]/*[2]/span[@class="mrow"]',
'count(./*[1]/*[2]/*[1]/*)=1', './*[1]/*[2]/*[1]/*[1][text()=2]');
defineRule(
'mj-cube', 'default.default',
'[n] ./*[1]/*[1]/*[1]; [t] "cubo" (pitch:0.35); [p] (pause:300)',
'self::span[@class="msup"]', './*[1]/*[2]/*[1][text()=3]');
defineRuleAlias(
'mj-cube', 'self::span[@class="msup"]',
'./*[1]/*[2]/*[1]=./*[1]/*[2]/span[@class="mrow"]',
'count(./*[1]/*[2]/*[1]/*)=1', './*[1]/*[2]/*[1]/*[1][text()=3]');
defineRuleAlias(
'mj-cube', 'self::span[@class="msubsup"]', 'CQFmathmlmsup',
'./*[1]/*[2]/*[1][text()=3]');
defineRuleAlias(
'mj-cube', 'self::span[@class="msubsup"]', 'CQFmathmlmsup',
'./*[1]/*[2]/*[1]=./*[1]/*[2]/span[@class="mrow"]',
'count(./*[1]/*[2]/*[1]/*)=1', './*[1]/*[2]/*[1]/*[1][text()=3]');
defineRule(
'mj-square-sub', 'default.default',
'[n] ./*[1]/*[1]/*[1]; [t] "sub"; [n] ./*[1]/*[3]/*[1] (pitch:-0.35); ' +
'[p] (pause:300); [t] "quadrado" (pitch:0.35); [p] (pause:400)',
'self::span[@class="msubsup"]', './*[1]/*[2]/*[1][text()=2]');
defineRuleAlias(
'mj-square-sub', 'self::span[@class="msubsup"]',
'./*[1]/*[2]/*[1]=./*[1]/*[2]/span[@class="mrow"]',
'count(./*[1]/*[2]/*[1]/*)=1', './*[1]/*[2]/*[1]/*[1][text()=2]');
defineRule(
'mj-cube-sub', 'default.default',
'[n] ./*[1]/*[1]/*[1]; [t] "sub"; [n] ./*[1]/*[3]/*[1] (pitch:-0.35); ' +
'[p] (pause:300); [t] "cubo" (pitch:0.35); [p] (pause:400)',
'self::span[@class="msubsup"]', './*[1]/*[2]/*[1][text()=3]');
defineRuleAlias(
'mj-cube-sub', 'self::span[@class="msubsup"]',
'./*[1]/*[2]/*[1]=./*[1]/*[2]/span[@class="mrow"]',
'count(./*[1]/*[2]/*[1]/*)=1', './*[1]/*[2]/*[1]/*[1][text()=3]');
};
/**
* Initialize mathJax Aliases
* @private
*/
cvox.MathmlStoreRules.initSemanticRules_ = function() {
// Initial rule
defineRule(
'stree', 'default.default',
'[n] ./*[1]', 'self::stree');
defineRule(
'multrel', 'default.default',
'[t] "multirelation"; [m] children/* (sepFunc:CTXFcontentIterator)',
'self::multirel');
defineRule(
'variable-equality', 'default.default',
'[t] "equação de sequência"; [m] ./children/* ' +
'(context:"part",ctxtFunc:CTXFnodeCounter,separator:./text())',
'self::relseq[@role="equality"]', 'count(./children/*)>2',
'./children/punct[@role="ellipsis"]');// Make that better!
defineRule(
'multi-equality', 'default.default',
'[t] "equação de sequência"; [m] ./children/* ' +
'(context:"part",ctxtFunc:CTXFnodeCounter,separator:./text())',
'self::relseq[@role="equality"]', 'count(./children/*)>2');
defineRule(
'multi-equality', 'default.short',
'[t] "equação de sequência"; [m] ./children/* ' +
'(separator:./text())',
'self::relseq[@role="equality"]', 'count(./children/*)>2');
defineRule(
'equality', 'default.default',
'[t] "equação"; [t] "lado esquerdo"; [n] children/*[1];' +
'[p] (pause:200); [n] text() (pause:200);' +
'[t] "lado direito"; [n] children/*[2]',
'self::relseq[@role="equality"]', 'count(./children/*)=2');
defineRule(
'simple-equality', 'default.default',
'[n] children/*[1]; [p] (pause:200); [n] text() (pause:200);' +
'[n] children/*[2]',
'self::relseq[@role="equality"]', 'count(./children/*)=2',
'./children/identifier or ./children/number');
defineRule(
'simple-equality2', 'default.default',
'[n] children/*[1]; [p] (pause:200); [n] text() (pause:200);' +
'[n] children/*[2]',
'self::relseq[@role="equality"]', 'count(./children/*)=2',
'./children/function or ./children/appl');
defineRule(
'multrel', 'default.default',
'[m] children/* (separator:./text())',
'self::relseq');
defineRule(
'binary-operation', 'default.default',
'[m] children/* (separator:text());',
'self::infixop');
defineRule(
'variable-addition', 'default.default',
'[t] "soma com número variável de summands";' +
'[p] (pause:400); [m] children/* (separator:./text())',
'self::infixop[@role="addition"]', 'count(children/*)>2',
'children/punct[@role="ellipsis"]');// Make that better!
defineRule(
'multi-addition', 'default.default',
'[t] "soma,"; [t] count(./children/*); [t] ", summands";' +
'[p] (pause:400); [m] ./children/* (separator:./text())',
'self::infixop[@role="addition"]', 'count(./children/*)>2');
// Prefix Operator
defineRule(
'prefix', 'default.default',
'[t] "prefixo"; [n] text(); [t] "de" (pause 150);' +
'[n] children/*[1]',
'self::prefixop');
defineRule(
'negative', 'default.default',
'[t] "negativo"; [n] children/*[1]',
'self::prefixop', 'self::prefixop[@role="negative"]');
// Postfix Operator
defineRule(
'postfix', 'default.default',
'[n] children/*[1]; [t] "postfix"; [n] text() (pause 300)',
'self::postfixop');
defineRule(
'identifier', 'default.default',
'[n] text()', 'self::identifier');
defineRule(
'number', 'default.default',
'[n] text()', 'self::number');
defineRule(
'fraction', 'default.default',
'[p] (pause:250); [n] children/*[1] (pitch:0.3); [p] (pause:250);' +
' [t] "dividido por"; [n] children/*[2] (pitch:-0.3); [p] (pause:400)',
'self::fraction');
defineRule(
'superscript', 'default.default',
'[n] children/*[1]; [t] "super"; [n] children/*[2] (pitch:0.35);' +
'[p] (pause:300)',
'self::superscript');
defineRule(
'subscript', 'default.default',
'[n] children/*[1]; [t] "sub"; [n] children/*[2] (pitch:-0.35);' +
'[p] (pause:300)',
'self::subscript');
defineRule(
'ellipsis', 'default.default',
'[p] (pause:200); [t] "dot dot dot"; [p] (pause:300)',
'self::punct', 'self::punct[@role="ellipsis"]');
defineRule(
'fence-single', 'default.default',
'[n] text()',
'self::punct', 'self::punct[@role="openfence"]');
defineRuleAlias('fence-single', 'self::punct',
'self::punct[@role="closefence"]');
defineRuleAlias('fence-single', 'self::punct',
'self::punct[@role="vbar"]');
defineRuleAlias('fence-single', 'self::punct',
'self::punct[@role="application"]');
// TODO (sorge) Refine punctuations further.
defineRule(
'omit-punct', 'default.default',
'[p] (pause:200);',
'self::punct');
defineRule(
'omit-empty', 'default.default',
'',
'self::empty');
// Fences rules.
defineRule(
'fences-open-close', 'default.default',
'[p] (pause:100); [t] "aberto"; [n] children/*[1]; [p] (pause:200);' +
'[t] "fechar"',
'self::fenced[@role="leftright"]');
defineRule(
'fences-open-close-in-appl', 'default.default',
'[p] (pause:100); [n] children/*[1]; [p] (pause:200);',
'self::fenced[@role="leftright"]', './parent::children/parent::appl');
defineRule(
'fences-neutral', 'default.default',
'[p] (pause:100); [t] "valor absoluto de"; [n] children/*[1];' +
'[p] (pause:350);',
'self::fenced', 'self::fenced[@role="neutral"]');
defineRule(
'omit-fences', 'default.default',
'[p] (pause:500); [n] children/*[1]; [p] (pause:200);',
'self::fenced');
// Matrix rules.
defineRule(
'matrix', 'default.default',
'[t] "matriz"; [m] children/* ' +
'(ctxtFunc:CTXFnodeCounter,context:"row",pause:100)',
'self::matrix');
defineRule(
'matrix-row', 'default.default',
'[m] children/* (ctxtFunc:CTXFnodeCounter,context:"column",pause:100)',
'self::row[@role="matrix"]');
defineRule(
'matrix-cell', 'default.default',
'[n] children/*[1]', 'self::cell[@role="matrix"]');
// Vector rules.
defineRule(
'vector', 'default.default',
'[t] "vector"; [m] children/* ' +
'(ctxtFunc:CTXFnodeCounter,context:"element",pause:100)',
'self::vector');
// Cases rules.
defineRule(
'cases', 'default.default',
'[t] "caso, declaração de"; [m] children/* ' +
'(ctxtFunc:CTXFnodeCounter,context:"case",pause:100)',
'self::cases');
defineRule(
'cases-row', 'default.default',
'[m] children/*', 'self::row[@role="cases"]');
defineRule(
'cases-cell', 'default.default',
'[n] children/*[1]', 'self::cell[@role="cases"]');
defineRule(
'row', 'default.default',
'[m] ./* (ctxtFunc:CTXFnodeCounter,context:"column",pause:100)',
'self::row"');
defineRule(
'cases-end', 'default.default',
'[t] "caso, declaração de"; ' +
'[m] children/* (ctxtFunc:CTXFnodeCounter,context:"case",pause:100);' +
'[t] "fim casos"',
'self::cases', 'following-sibling::*');
// Multiline rules.
defineRule(
'multiline', 'default.default',
'[t] "várias linhas equação";' +
'[m] children/* (ctxtFunc:CTXFnodeCounter,context:"line",pause:100)',
'self::multiline');
defineRule(
'line', 'default.default',
'[m] children/*', 'self::line');
// Table rules.
defineRule(
'table', 'default.default',
'[t] "várias linhas equação";' +
'[m] children/* (ctxtFunc:CTXFnodeCounter,context:"row",pause:200)',
'self::table');
defineRule(
'table-row', 'default.default',
'[m] children/* (pause:100)', 'self::row[@role="table"]');
defineRuleAlias(
'cases-cell', 'self::cell[@role="table"]');
// Rules for punctuated expressions.
defineRule(
'end-punct', 'default.default',
'[m] children/*; [p] (pause:300)',
'self::punctuated', '@role="endpunct"');
defineRule(
'start-punct', 'default.default',
'[n] content/*[1]; [p] (pause:200); [m] children/*',
'self::punctuated', '@role="startpunct"');
defineRule(
'integral-punct', 'default.default',
'[n] children/*[1] (rate:0.2); [n] children/*[3] (rate:0.2)',
'self::punctuated', '@role="integral"');
defineRule(
'punctuated', 'default.default',
'[m] children/* (pause:100)',
'self::punctuated');
// Function rules
defineRule(
'function', 'default.default',
'[n] text()', 'self::function');
defineRule(
'appl', 'default.default',
'[n] children/*[1]; [n] content/*[1]; [n] children/*[2]', 'self::appl');
// Limit operator rules
defineRule(
'limboth', 'default.default',
'[n] children/*[1]; [t] "a partir de"; [n] children/*[2]; [t] "para";' +
'[n] children/*[3]', 'self::limboth');
defineRule(
'sum-only', 'default.default',
'[n] children/*[1]; [p] (pause 100); [t] "mais"; [n] children/*[2];' +
'[p] (pause 250);',
'self::limboth', 'self::limboth[@role="sum"]');
defineRule(
'limlower', 'default.default',
'[n] children/*[1]; [t] "mais"; [n] children/*[2];', 'self::limlower');
defineRule(
'limupper', 'default.default',
'[n] children/*[1]; [t] "sob"; [n] children/*[2];', 'self::limupper');
// Bigoperator rules
defineRule(
'largeop', 'default.default',
'[n] text()', 'self::largeop');
defineRule(
'bigop', 'default.default',
'[n] children/*[1]; [p] (pause 100); [t] "mais"; [n] children/*[2];' +
'[p] (pause 250);',
'self::bigop');
// Integral rules
defineRule(
'integral', 'default.default',
'[n] children/*[1]; [p] (pause 100); [n] children/*[2]; [p] (pause 200);' +
'[n] children/*[3] (rate:0.35);', 'self::integral');
defineRule(
'sqrt', 'default.default',
'[t] "dividido por"; [n] children/*[1] (rate:0.2); [p] (pause:400)',
'self::sqrt');
defineRule(
'square', 'default.default',
'[n] children/*[1]; [t] "quadrado" (pitch:0.35); [p] (pause:300)',
'self::superscript', 'children/*[2][text()=2]');
defineRule(
'text-no-mult', 'default.default',
'[n] children/*[1]; [p] (pause:200); [n] children/*[2]',
'self::infixop', 'children/text');
};
}); // goog.scope<|fim▁end|> | defineRule(
'mtd', 'default.short', |
<|file_name|>tex_utils.py<|end_file_name|><|fim▁begin|>import re
simple_cmd_match = re.compile(r'\\([^\\]+?)\{(.*?)\}')
graphics_cmd_match = re.compile(r'\\includegraphics\[.*?\]?\{(.*?)\}')
begin_cmd_match = re.compile(r'\\begin{([^}]+?)}(?:(?:\[([^\]]+?)\])|.*)')
newcmd_match = re.compile(r'\\.+?\{(.*?)\}\{(.*)\}')
# newcmd_match_with_var = re.compile(r'\\[^\\]+?\{(.*?)\}\{(.*?)\}')
vars_match = re.compile(r'\{(.+?)\}')
<|fim▁hole|> one_var = ''
for char in line.strip():
if char == '}':
open_braces -= 1
if open_braces > 0:
one_var += char
elif open_braces == 0 and one_var:
res.append(one_var)
one_var = ''
if char == '{':
open_braces += 1
return res
class FileIter:
def __init__(self, filename):
self.fn = filename
self.f = open(self.fn, 'r')
def get_line(self):
for line in self.f:
yield line
self.f.close()<|fim▁end|> |
def get_vars(line):
res = list()
open_braces = 0 |
<|file_name|>helpers.py<|end_file_name|><|fim▁begin|># Helper functions for the Maine Legislature project
import app_config
import collections
import copytext
import re
import json
import numbers
from unicodedata import normalize
from operator import itemgetter
CACHE = {}
def get_copy():
"""
Thank you Ryan for this neat trick to avoid thrashing the disk
https://github.com/INN/maine-legislature/blob/master/helpers.py#L361-L364
"""
if not CACHE.get('copy', None):
CACHE['copy'] = copytext.Copy(app_config.COPY_PATH)
return CACHE['copy']
CACHE = {}
def get_copy():
"""
Thank you Ryan for this neat trick to avoid thrashing the disk
https://github.com/INN/maine-legislature/blob/master/helpers.py#L361-L364
"""
if not CACHE.get('copy', None):
CACHE['copy'] = copytext.Copy(app_config.COPY_PATH)
return CACHE['copy']
# Please test the first two lines against "01234-4567": it should not return "001234-4567"
# Please test the first two lines against "61234-4567": it should not return "061234-4567"
def format_zip(zip):
if type(zip) == str:
return zip
try:
zip = str(zip)
zip = zip.replace('.0', '')
return zip
except ValueError:
return zip
def get_locations():
copy = get_copy()
locations = copy['locations']
for location in locations:
better_id = location['id'].split('.')
return locations
def get_location_ids():
locations = get_locations()
ids = []
for location in locations:
ids.append(location['id'])
return ids
def get_location_by_slug(slug):
locations = get_locations()
place = None
for location in locations:
if location['id'] == slug:
place = location
break
return place
def get_locations_statuses():
copy = get_copy()
statuses = copy['locations_statuses']
for status in statuses:
if isinstance( status['id'], numbers.Number):
status['id'] = int(float( status['id'] ))
return statuses
def get_location_history_by_slug(slug):
"""
return history, sorted by date then time -> dunno how well this will sort, but we shall see
"""
locations_statuses = get_locations_statuses()
history = []
for row in locations_statuses:
if row['id'] == slug:
history.append( row )
if len( history ) > 1 :
history = sorted( history, key=itemgetter( 'date', 'time' ), reverse=True )
return history<|fim▁hole|>def get_location_status_by_slug(slug):
history = get_location_history_by_slug(slug)
try:
return history[0]
except IndexError:
return {}
def get_location_status_color_by_slug(slug):
status = get_location_status_by_slug(slug)
try:
if status['color'] not in {'red', 'yellow', 'green', 'evacuated'}:
return u'unknown'
else:
return status['color']
except KeyError:
return u'unknown'
def get_location_status_updated_by_slug(slug):
status = get_location_status_by_slug(slug)
try:
return status['date'] + ' ' + status['time']
except KeyError:
return u''<|fim▁end|> | |
<|file_name|>build.py<|end_file_name|><|fim▁begin|>#
# This file is part of Mapnik (c++ mapping toolkit)
#
# Copyright (C) 2013 Artem Pavlenko
#
# Mapnik is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA<|fim▁hole|>
import os
import sys
import glob
from copy import copy
from subprocess import Popen, PIPE
Import('env')
lib_env = env.Clone()
def call(cmd, silent=True):
stdin, stderr = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE).communicate()
if not stderr:
return stdin.strip()
elif not silent:
print stderr
def ldconfig(*args,**kwargs):
call('ldconfig')
if env['LINKING'] == 'static':
lib_env.Append(CXXFLAGS="-fPIC")
mapnik_lib_link_flag = ''
# note: .data gets the actual list to allow a true copy
# and avoids unintended pollution of other environments
libmapnik_cxxflags = copy(lib_env['CXXFLAGS'].data)
libmapnik_defines = copy(lib_env['CPPDEFINES'])
ABI_VERSION = env['ABI_VERSION']
enabled_imaging_libraries = []
filesystem = 'boost_filesystem%s' % env['BOOST_APPEND']
regex = 'boost_regex%s' % env['BOOST_APPEND']
system = 'boost_system%s' % env['BOOST_APPEND']
# clear out and re-set libs for this env
# note: order matters on linux: see lorder | tsort
lib_env['LIBS'] = [filesystem,
regex]
if env['HAS_CAIRO']:
lib_env.Append(LIBS=env['CAIRO_ALL_LIBS'])
# maybe bz2
if len(env['EXTRA_FREETYPE_LIBS']):
lib_env['LIBS'].extend(copy(env['EXTRA_FREETYPE_LIBS']))
if '-DHAVE_PNG' in env['CPPDEFINES']:
lib_env['LIBS'].append('png')
enabled_imaging_libraries.append('png_reader.cpp')
if '-DMAPNIK_USE_PROJ4' in env['CPPDEFINES']:
lib_env['LIBS'].append('proj')
if '-DHAVE_TIFF' in env['CPPDEFINES']:
lib_env['LIBS'].append('tiff')
enabled_imaging_libraries.append('tiff_reader.cpp')
if '-DHAVE_WEBP' in env['CPPDEFINES']:
lib_env['LIBS'].append('webp')
enabled_imaging_libraries.append('webp_reader.cpp')
lib_env['LIBS'].append('xml2')
if '-DBOOST_REGEX_HAS_ICU' in env['CPPDEFINES']:
lib_env['LIBS'].append('icui18n')
lib_env['LIBS'].append(system)
lib_env['LIBS'].append('harfbuzz')
if '-DHAVE_JPEG' in env['CPPDEFINES']:
lib_env['LIBS'].append('jpeg')
enabled_imaging_libraries.append('jpeg_reader.cpp')
lib_env['LIBS'].append(env['ICU_LIB_NAME'])
lib_env['LIBS'].append('freetype')
if env['RUNTIME_LINK'] == 'static':
if 'icuuc' in env['ICU_LIB_NAME']:
lib_env['LIBS'].append('icudata')
if env['PLATFORM'] == 'Linux':
lib_env['LINKFLAGS'].append('-pthread')
if env['RUNTIME_LINK'] != 'static':
lib_env['LIBS'].insert(0, 'agg')
lib_env['LIBS'].append('z')
if env['PLATFORM'] == 'Darwin':
mapnik_libname = env.subst(env['MAPNIK_LIB_NAME'])
if env['FULL_LIB_PATH']:
lib_path = '%s/%s' % (env['MAPNIK_LIB_BASE'],mapnik_libname)
else:
lib_path = '@loader_path/'+mapnik_libname
mapnik_lib_link_flag += ' -Wl,-install_name,%s' % lib_path
_d = {'version':env['MAPNIK_VERSION_STRING'].replace('-pre','')}
mapnik_lib_link_flag += ' -current_version %(version)s -compatibility_version %(version)s' % _d
else: # unix, non-macos
mapnik_libname = env.subst(env['MAPNIK_LIB_NAME'])
if env['ENABLE_SONAME']:
mapnik_libname = env.subst(env['MAPNIK_LIB_NAME']) + (".%d.%d" % (int(ABI_VERSION[0]),int(ABI_VERSION[1])))
if env['PLATFORM'] == 'SunOS':
if env['CXX'].startswith('CC'):
mapnik_lib_link_flag += ' -R. -h %s' % mapnik_libname
else:
mapnik_lib_link_flag += ' -Wl,-h,%s' % mapnik_libname
else: # Linux and others
lib_env['LIBS'].append('dl')
mapnik_lib_link_flag += ' -Wl,-rpath-link,.'
if env['ENABLE_SONAME']:
mapnik_lib_link_flag += ' -Wl,-soname,%s' % mapnik_libname
if env['FULL_LIB_PATH']:
mapnik_lib_link_flag += ' -Wl,-rpath=%s' % env['MAPNIK_LIB_BASE']
else:
mapnik_lib_link_flag += ' -Wl,-z,origin -Wl,-rpath=\$$ORIGIN'
source = Split(
"""
expression_grammar.cpp
fs.cpp
request.cpp
well_known_srs.cpp
params.cpp
image_filter_types.cpp
miniz_png.cpp
color.cpp
conversions.cpp
image_compositing.cpp
image_scaling.cpp
box2d.cpp
datasource_cache.cpp
datasource_cache_static.cpp
debug.cpp
expression_node.cpp
expression_string.cpp
expression.cpp
transform_expression.cpp
feature_kv_iterator.cpp
feature_style_processor.cpp
feature_type_style.cpp
dasharray_parser.cpp
font_engine_freetype.cpp
font_set.cpp
function_call.cpp
gradient.cpp
graphics.cpp
parse_path.cpp
image_reader.cpp
image_util.cpp
layer.cpp
map.cpp
load_map.cpp
memory.cpp
palette.cpp
marker_helpers.cpp
transform_expression_grammar.cpp
plugin.cpp
rule.cpp
save_map.cpp
wkb.cpp
projection.cpp
proj_transform.cpp
scale_denominator.cpp
simplify.cpp
parse_transform.cpp
memory_datasource.cpp
symbolizer.cpp
symbolizer_keys.cpp
symbolizer_enumerations.cpp
unicode.cpp
raster_colorizer.cpp
mapped_memory_cache.cpp
marker_cache.cpp
svg/svg_parser.cpp
svg/svg_path_parser.cpp
svg/svg_points_parser.cpp
svg/svg_transform_parser.cpp
warp.cpp
css_color_grammar.cpp
text/font_library.cpp
text/vertex_cache.cpp
text/text_layout.cpp
text/text_line.cpp
text/itemizer.cpp
text/scrptrun.cpp
text/face.cpp
text/glyph_positions.cpp
text/placement_finder.cpp
text/properties_util.cpp
text/renderer.cpp
text/symbolizer_helpers.cpp
text/text_properties.cpp
text/font_feature_settings.cpp
text/formatting/base.cpp
text/formatting/list.cpp
text/formatting/text.cpp
text/formatting/format.cpp
text/formatting/layout.cpp
text/formatting/registry.cpp
text/placements/registry.cpp
text/placements/base.cpp
text/placements/dummy.cpp
text/placements/list.cpp
text/placements/simple.cpp
group/group_layout_manager.cpp
group/group_rule.cpp
group/group_symbolizer_helper.cpp
xml_tree.cpp
config_error.cpp
color_factory.cpp
renderer_common.cpp
renderer_common/render_pattern.cpp
renderer_common/process_group_symbolizer.cpp
"""
)
if env['PLUGIN_LINKING'] == 'static':
hit = False
for plugin in env['REQUESTED_PLUGINS']:
details = env['PLUGINS'][plugin]
if details['lib'] in env['LIBS'] or not details['lib']:
plugin_env = SConscript('../plugins/input/%s/build.py' % plugin)
if not plugin_env:
print("Notice: no 'plugin_env' variable found for plugin: '%s'" % plugin)
else:
hit = True
DEF = '-DMAPNIK_STATIC_PLUGIN_%s' % plugin.upper()
lib_env.Append(CPPDEFINES = DEF)
if DEF not in libmapnik_defines:
libmapnik_defines.append(DEF)
if plugin_env.has_key('SOURCES') and plugin_env['SOURCES']:
source += ['../plugins/input/%s/%s' % (plugin, src) for src in plugin_env['SOURCES']]
if plugin_env.has_key('CPPDEFINES') and plugin_env['CPPDEFINES']:
lib_env.AppendUnique(CPPDEFINES=plugin_env['CPPDEFINES'])
if plugin_env.has_key('CXXFLAGS') and plugin_env['CXXFLAGS']:
lib_env.AppendUnique(CXXFLAGS=plugin_env['CXXFLAGS'])
if plugin_env.has_key('LINKFLAGS') and plugin_env['LINKFLAGS']:
lib_env.AppendUnique(LINKFLAGS=plugin_env['LINKFLAGS'])
if plugin_env.has_key('CPPPATH') and plugin_env['CPPPATH']:
lib_env.AppendUnique(CPPPATH=copy(plugin_env['CPPPATH']))
if plugin_env.has_key('LIBS') and plugin_env['LIBS']:
lib_env.AppendUnique(LIBS=plugin_env['LIBS'])
else:
print("Notice: dependencies not met for plugin '%s', not building..." % plugin)
if hit:
lib_env.Append(CPPDEFINES = '-DMAPNIK_STATIC_PLUGINS')
libmapnik_defines.append('-DMAPNIK_STATIC_PLUGINS')
if env['HAS_CAIRO']:
lib_env.AppendUnique(LIBPATH=env['CAIRO_LIBPATHS'])
lib_env.Append(CPPDEFINES = '-DHAVE_CAIRO')
libmapnik_defines.append('-DHAVE_CAIRO')
lib_env.AppendUnique(CPPPATH=copy(env['CAIRO_CPPPATHS']))
source.append('cairo/cairo_context.cpp')
source.append('cairo/cairo_renderer.cpp')
source.append('cairo/cairo_render_vector.cpp')
source.append('cairo/process_markers_symbolizer.cpp')
source.append('cairo/process_text_symbolizer.cpp')
source.append('cairo/process_group_symbolizer.cpp')
source.append('cairo/process_line_symbolizer.cpp')
source.append('cairo/process_line_pattern_symbolizer.cpp')
source.append('cairo/process_polygon_symbolizer.cpp')
source.append('cairo/process_polygon_pattern_symbolizer.cpp')
source.append('cairo/process_debug_symbolizer.cpp')
source.append('cairo/process_point_symbolizer.cpp')
source.append('cairo/process_raster_symbolizer.cpp')
source.append('cairo/process_building_symbolizer.cpp')
for cpp in enabled_imaging_libraries:
source.append(cpp)
# agg backend
source += Split(
"""
agg/agg_renderer.cpp
agg/process_building_symbolizer.cpp
agg/process_line_symbolizer.cpp
agg/process_line_pattern_symbolizer.cpp
agg/process_text_symbolizer.cpp
agg/process_point_symbolizer.cpp
agg/process_polygon_symbolizer.cpp
agg/process_polygon_pattern_symbolizer.cpp
agg/process_raster_symbolizer.cpp
agg/process_shield_symbolizer.cpp
agg/process_markers_symbolizer.cpp
agg/process_group_symbolizer.cpp
agg/process_debug_symbolizer.cpp
"""
)
# clipper
source += Split(
"""
../deps/clipper/src/clipper.cpp
""")
if env['RUNTIME_LINK'] == "static":
source += glob.glob('../deps/agg/src/' + '*.cpp')
# grid backend
if env['GRID_RENDERER']:
source += Split(
"""
grid/grid.cpp
grid/grid_renderer.cpp
grid/process_building_symbolizer.cpp
grid/process_line_pattern_symbolizer.cpp
grid/process_line_symbolizer.cpp
grid/process_markers_symbolizer.cpp
grid/process_point_symbolizer.cpp
grid/process_polygon_pattern_symbolizer.cpp
grid/process_polygon_symbolizer.cpp
grid/process_raster_symbolizer.cpp
grid/process_shield_symbolizer.cpp
grid/process_text_symbolizer.cpp
grid/process_group_symbolizer.cpp
""")
lib_env.Append(CPPDEFINES = '-DGRID_RENDERER')
libmapnik_defines.append('-DGRID_RENDERER')
# https://github.com/mapnik/mapnik/issues/1438
if env['SVG_RENDERER']: # svg backend
source += Split(
"""
svg/output/svg_output_grammars.cpp
svg/output/svg_renderer.cpp
svg/output/svg_generator.cpp
svg/output/svg_output_attributes.cpp
svg/output/process_symbolizers.cpp
svg/output/process_building_symbolizer.cpp
svg/output/process_line_pattern_symbolizer.cpp
svg/output/process_line_symbolizer.cpp
svg/output/process_markers_symbolizer.cpp
svg/output/process_point_symbolizer.cpp
svg/output/process_polygon_pattern_symbolizer.cpp
svg/output/process_polygon_symbolizer.cpp
svg/output/process_raster_symbolizer.cpp
svg/output/process_shield_symbolizer.cpp
svg/output/process_text_symbolizer.cpp
svg/output/process_group_symbolizer.cpp
""")
lib_env.Append(CPPDEFINES = '-DSVG_RENDERER')
libmapnik_defines.append('-DSVG_RENDERER')
if env['XMLPARSER'] == 'libxml2' and env['HAS_LIBXML2']:
source += Split(
"""
libxml2_loader.cpp
""")
lib_env.Append(CPPDEFINES = '-DHAVE_LIBXML2')
libmapnik_defines.append('-DHAVE_LIBXML2')
else:
source += Split(
"""
rapidxml_loader.cpp
"""
)
# clone the env one more time to isolate mapnik_lib_link_flag
lib_env_final = lib_env.Clone()
lib_env_final.Prepend(LINKFLAGS=mapnik_lib_link_flag)
# cache library values for other builds to use
env['LIBMAPNIK_LIBS'] = copy(lib_env['LIBS'])
env['LIBMAPNIK_LINKFLAGS'] = copy(lib_env['LINKFLAGS'])
env['LIBMAPNIK_CXXFLAGS'] = libmapnik_cxxflags
env['LIBMAPNIK_DEFINES'] = libmapnik_defines
mapnik = None
if env['PLATFORM'] == 'Darwin' or not env['ENABLE_SONAME']:
target_path = env['MAPNIK_LIB_BASE_DEST']
if 'uninstall' not in COMMAND_LINE_TARGETS:
if env['LINKING'] == 'static':
mapnik = lib_env_final.StaticLibrary(env['MAPNIK_NAME'], source)
else:
mapnik = lib_env_final.SharedLibrary(env['MAPNIK_NAME'], source)
result = env.Install(target_path, mapnik)
env.Alias(target='install', source=result)
env['create_uninstall_target'](env, os.path.join(target_path,env.subst(env['MAPNIK_LIB_NAME'])))
else:
# Symlink command, only works if both files are in same directory
def symlink(env, target, source):
trgt = str(target[0])
src = str(source[0])
if os.path.islink(trgt) or os.path.exists(trgt):
os.remove(trgt)
os.symlink(os.path.basename(src), trgt)
major, minor, micro = ABI_VERSION
soFile = "%s.%d.%d.%d" % (os.path.basename(env.subst(env['MAPNIK_LIB_NAME'])), int(major), int(minor), int(micro))
target = os.path.join(env['MAPNIK_LIB_BASE_DEST'], soFile)
if 'uninstall' not in COMMAND_LINE_TARGETS:
if env['LINKING'] == 'static':
mapnik = lib_env_final.StaticLibrary(env['MAPNIK_NAME'], source)
else:
mapnik = lib_env_final.SharedLibrary(env['MAPNIK_NAME'], source)
result = env.InstallAs(target=target, source=mapnik)
env.Alias(target='install', source=result)
if result:
env.AddPostAction(result, ldconfig)
# Install symlinks
target1 = os.path.join(env['MAPNIK_LIB_BASE_DEST'], "%s.%d.%d" % \
(os.path.basename(env.subst(env['MAPNIK_LIB_NAME'])),int(major), int(minor)))
target2 = os.path.join(env['MAPNIK_LIB_BASE_DEST'], os.path.basename(env.subst(env['MAPNIK_LIB_NAME'])))
if 'uninstall' not in COMMAND_LINE_TARGETS:
link1 = env.Command(target1, target, symlink)
env.Alias(target='install', source=link1)
link2 = env.Command(target2, target1, symlink)
env.Alias(target='install', source=link2)
# delete in reverse order..
env['create_uninstall_target'](env, target2)
env['create_uninstall_target'](env, target1)
env['create_uninstall_target'](env, target)
# to enable local testing
lib_major_minor = "%s.%d.%d" % (os.path.basename(env.subst(env['MAPNIK_LIB_NAME'])), int(major), int(minor))
local_lib = os.path.basename(env.subst(env['MAPNIK_LIB_NAME']))
if os.path.islink(lib_major_minor) or os.path.exists(lib_major_minor):
os.remove(lib_major_minor)
os.symlink(local_lib,lib_major_minor)
Clean(mapnik,lib_major_minor);
if not env['RUNTIME_LINK'] == 'static':
Depends(mapnik, env.subst('../deps/agg/libagg.a'))<|fim▁end|> | #
# |
<|file_name|>ModAction.java<|end_file_name|><|fim▁begin|>package stream.flarebot.flarebot.mod.modlog;
public enum ModAction {
BAN(true, ModlogEvent.USER_BANNED),
SOFTBAN(true, ModlogEvent.USER_SOFTBANNED),
FORCE_BAN(true, ModlogEvent.USER_BANNED),
TEMP_BAN(true, ModlogEvent.USER_TEMP_BANNED),
UNBAN(false, ModlogEvent.USER_UNBANNED),
KICK(true, ModlogEvent.USER_KICKED),
TEMP_MUTE(true, ModlogEvent.USER_TEMP_MUTED),
MUTE(true, ModlogEvent.USER_MUTED),
UNMUTE(false, ModlogEvent.USER_UNMUTED),
WARN(true, ModlogEvent.USER_WARNED);
private boolean infraction;
private ModlogEvent event;
ModAction(boolean infraction, ModlogEvent modlogEvent) {
this.infraction = infraction;
this.event = modlogEvent;
}
public boolean isInfraction() {
return infraction;
}
@Override
public String toString() {
return name().charAt(0) + name().substring(1).toLowerCase().replaceAll("_", " ");
}
public String getLowercaseName() {
return toString().toLowerCase();
}
<|fim▁hole|> }
}<|fim▁end|> | public ModlogEvent getEvent() {
return event; |
<|file_name|>transactionBuilderFactory.ts<|end_file_name|><|fim▁begin|>import should from 'should';
import { register } from '../../../../src/index';
import { KeyPair, TransactionBuilderFactory } from '../../../../src/coin/cspr/';
import * as testData from '../../../resources/cspr/cspr';
import { Transaction } from '../../../../src/coin/cspr/transaction';
const factory = register('tcspr', TransactionBuilderFactory);
const owner1Address = new KeyPair({ pub: testData.ACCOUNT_1.publicKey }).getAddress();
const owner2Address = new KeyPair({ pub: testData.ACCOUNT_2.publicKey }).getAddress();
const owner3Address = new KeyPair({ pub: testData.ACCOUNT_3.publicKey }).getAddress();
const sourceAddress = new KeyPair({ pub: testData.ROOT_ACCOUNT.publicKey }).getAddress();
const initTxWalletInitBuilder = () => {
const txBuilder = factory.getWalletInitializationBuilder();
txBuilder.fee({ gasLimit: testData.FEE.gasLimit, gasPrice: testData.FEE.gasPrice });
txBuilder.owner(owner1Address);
txBuilder.owner(owner2Address);
txBuilder.owner(owner3Address);
txBuilder.source({ address: sourceAddress });
return txBuilder;
};
const initTransferTxBuilder = () => {
const txBuilder = factory.getTransferBuilder();
txBuilder.fee({ gasLimit: testData.FEE.gasLimit, gasPrice: testData.FEE.gasPrice });
txBuilder.source({ address: owner1Address });
txBuilder.to(owner2Address);
txBuilder.amount(testData.MIN_MOTES_AMOUNT);
txBuilder.transferId(255);
return txBuilder;
};
describe('should build ', () => {
describe('serialized wallet initialization transactions', () => {
it('a non signed transaction from serialized', async () => {
const builder = initTxWalletInitBuilder();
const tx = (await builder.build()) as Transaction;
const txJson = tx.toJson();
const builder2 = factory.from(tx.toBroadcastFormat());
const tx2 = (await builder2.build()) as Transaction;
const tx2Json = tx2.toJson();
should.deepEqual(tx2Json, txJson, 'from implementation from factory should recreate original transaction');
});
it('a signed transaction from serialized', async () => {
const builder = initTxWalletInitBuilder();
builder.sign({ key: testData.ROOT_ACCOUNT.privateKey });
const tx = (await builder.build()) as Transaction;
const txJson = tx.toJson();
const builder2 = factory.from(tx.toBroadcastFormat());
const tx2 = (await builder2.build()) as Transaction;
const tx2Json = tx2.toJson();
should.deepEqual(tx2Json, txJson, 'from implementation from factory should recreate original transaction');
should.deepEqual(
tx2.casperTx.approvals,
tx.casperTx.approvals,
'from implementation from factory should get approvals correctly',
);
});
it('a signed transaction using extended key from serialized', async () => {
const builder = initTxWalletInitBuilder();
builder.sign({ key: testData.ROOT_ACCOUNT.xPrivateKey });
const tx = (await builder.build()) as Transaction;
const txJson = tx.toJson();
const builder2 = factory.from(tx.toBroadcastFormat());
const tx2 = (await builder2.build()) as Transaction;
const tx2Json = tx2.toJson();
should.deepEqual(tx2Json, txJson, 'from implementation from factory should recreate original transaction');
should.deepEqual(
tx2.casperTx.approvals,
tx.casperTx.approvals,
'from implementation from factory should get approvals correctly',
);
});
it('an offline multisig transaction', async () => {
const builder = initTxWalletInitBuilder();
builder.sign({ key: testData.ROOT_ACCOUNT.privateKey });
builder.sign({ key: testData.ACCOUNT_1.privateKey });
const tx = (await builder.build()) as Transaction;
const txJson = tx.toJson();
const builder2 = factory.from(tx.toBroadcastFormat());
const tx2 = (await builder2.build()) as Transaction;
const tx2Json = tx2.toJson();
should.deepEqual(tx2Json, txJson, 'from implementation from factory should recreate original transaction');
should.deepEqual(
tx2.casperTx.approvals,
tx.casperTx.approvals,
'from implementation from factory should get approvals correctly',
);
});
it('an offline multisig transaction using extended keys', async () => {
const builder = initTxWalletInitBuilder();
builder.sign({ key: testData.ROOT_ACCOUNT.xPrivateKey });
builder.sign({ key: testData.ACCOUNT_1.xPrivateKey });
const tx = (await builder.build()) as Transaction;
const txJson = tx.toJson();
const builder2 = factory.from(tx.toBroadcastFormat());
const tx2 = (await builder2.build()) as Transaction;
const tx2Json = tx2.toJson();
should.deepEqual(tx2Json, txJson, 'from implementation from factory should recreate original transaction');
should.deepEqual(
tx2.casperTx.approvals,
tx.casperTx.approvals,
'from implementation from factory should get approvals correctly',
);
});
it('an offline multisig transaction using one extended key', async () => {
const builder = initTxWalletInitBuilder();
builder.sign({ key: testData.ROOT_ACCOUNT.xPrivateKey });
builder.sign({ key: testData.ACCOUNT_1.privateKey });
const tx = (await builder.build()) as Transaction;
const txJson = tx.toJson();
const builder2 = factory.from(tx.toBroadcastFormat());
const tx2 = (await builder2.build()) as Transaction;
const tx2Json = tx2.toJson();
should.deepEqual(tx2Json, txJson, 'from implementation from factory should recreate original transaction');
should.deepEqual(
tx2.casperTx.approvals,
tx.casperTx.approvals,
'from implementation from factory should get approvals correctly',
);
});
});
describe('serialized transfer transactions', () => {
it('a non signed transaction from serialized', async () => {
const builder = initTransferTxBuilder();
const tx = (await builder.build()) as Transaction;
const txJson = tx.toJson();
const builder2 = factory.from(tx.toBroadcastFormat());
const tx2 = (await builder2.build()) as Transaction;
const tx2Json = tx2.toJson();
should.deepEqual(tx2Json, txJson, 'from implementation from factory should recreate original transaction');
});
it('a signed transaction from serialized', async () => {
const builder = initTransferTxBuilder();
builder.sign({ key: testData.ROOT_ACCOUNT.privateKey });
const tx = (await builder.build()) as Transaction;
const txJson = tx.toJson();<|fim▁hole|>
should.deepEqual(tx2Json, txJson, 'from implementation from factory should recreate original transaction');
should.deepEqual(
tx2.casperTx.approvals,
tx.casperTx.approvals,
'from implementation from factory should get approvals correctly',
);
});
it('a signed transaction with extended key from serialized', async () => {
const builder = initTransferTxBuilder();
builder.sign({ key: testData.ROOT_ACCOUNT.xPrivateKey });
const tx = (await builder.build()) as Transaction;
const txJson = tx.toJson();
const builder2 = factory.from(tx.toBroadcastFormat());
const tx2 = (await builder2.build()) as Transaction;
const tx2Json = tx2.toJson();
should.deepEqual(tx2Json, txJson, 'from implementation from factory should recreate original transaction');
should.deepEqual(
tx2.casperTx.approvals,
tx.casperTx.approvals,
'from implementation from factory should get approvals correctly',
);
});
it('an offline multisig transaction', async () => {
const builder = initTransferTxBuilder();
builder.sign({ key: testData.ROOT_ACCOUNT.privateKey });
builder.sign({ key: testData.ACCOUNT_1.privateKey });
const tx = (await builder.build()) as Transaction;
const txJson = tx.toJson();
const builder2 = factory.from(tx.toBroadcastFormat());
const tx2 = (await builder2.build()) as Transaction;
const tx2Json = tx2.toJson();
should.deepEqual(tx2Json, txJson, 'from implementation from factory should recreate original transaction');
should.deepEqual(
tx2.casperTx.approvals,
tx.casperTx.approvals,
'from implementation from factory should get approvals correctly',
);
});
it('an offline multisig transaction using extended keys', async () => {
const builder = initTransferTxBuilder();
builder.sign({ key: testData.ROOT_ACCOUNT.xPrivateKey });
builder.sign({ key: testData.ACCOUNT_1.xPrivateKey });
const tx = (await builder.build()) as Transaction;
const txJson = tx.toJson();
const builder2 = factory.from(tx.toBroadcastFormat());
const tx2 = (await builder2.build()) as Transaction;
const tx2Json = tx2.toJson();
should.deepEqual(tx2Json, txJson, 'from implementation from factory should recreate original transaction');
should.deepEqual(
tx2.casperTx.approvals,
tx.casperTx.approvals,
'from implementation from factory should get approvals correctly',
);
});
it('an offline multisig transaction using one extended key', async () => {
const builder = initTransferTxBuilder();
builder.sign({ key: testData.ROOT_ACCOUNT.xPrivateKey });
builder.sign({ key: testData.ACCOUNT_1.privateKey });
const tx = (await builder.build()) as Transaction;
const txJson = tx.toJson();
const builder2 = factory.from(tx.toBroadcastFormat());
const tx2 = (await builder2.build()) as Transaction;
const tx2Json = tx2.toJson();
should.deepEqual(tx2Json, txJson, 'from implementation from factory should recreate original transaction');
should.deepEqual(
tx2.casperTx.approvals,
tx.casperTx.approvals,
'from implementation from factory should get approvals correctly',
);
});
});
describe('should reject signing ', () => {
const factory = register('tcspr', TransactionBuilderFactory);
it('a wallet init transaction with modified signer', async () => {
const builder = initTxWalletInitBuilder();
builder.sign({ key: testData.ROOT_ACCOUNT.privateKey });
const tx = (await builder.build()) as Transaction;
const txJson = JSON.parse(tx.toBroadcastFormat());
const signer = txJson['deploy']['approvals'][0]['signer'];
txJson['deploy']['approvals'][0]['signer'] = '01' + signer.slice(2);
const builder2 = factory.from(JSON.stringify(txJson));
const tx2 = (await builder2.build()) as Transaction;
const keypair = new KeyPair({ prv: testData.ROOT_ACCOUNT.privateKey });
should.throws(
() => tx2.sign(keypair),
(e) => e.message === testData.ERROR_ALREADY_SIGNED_WITH_INVALID_KEY,
);
});
it('a wallet init transaction with modified signer using extended key', async () => {
const builder = initTxWalletInitBuilder();
builder.sign({ key: testData.ROOT_ACCOUNT.xPrivateKey });
const tx = (await builder.build()) as Transaction;
const txJson = JSON.parse(tx.toBroadcastFormat());
const signer = txJson['deploy']['approvals'][0]['signer'];
txJson['deploy']['approvals'][0]['signer'] = '01' + signer.slice(2);
const builder2 = factory.from(JSON.stringify(txJson));
const tx2 = (await builder2.build()) as Transaction;
const keypair = new KeyPair({ prv: testData.ROOT_ACCOUNT.privateKey });
should.throws(
() => tx2.sign(keypair),
(e) => e.message === testData.ERROR_ALREADY_SIGNED_WITH_INVALID_KEY,
);
});
it('a transfer transaction with modified signer', async () => {
const builder = initTransferTxBuilder();
builder.sign({ key: testData.ROOT_ACCOUNT.privateKey });
const tx = (await builder.build()) as Transaction;
const txJson = JSON.parse(tx.toBroadcastFormat());
const signer = txJson['deploy']['approvals'][0]['signer'];
txJson['deploy']['approvals'][0]['signer'] = '01' + signer.slice(2);
const builder2 = factory.from(JSON.stringify(txJson));
const tx2 = (await builder2.build()) as Transaction;
const keypair = new KeyPair({ prv: testData.ROOT_ACCOUNT.privateKey });
should.throws(
() => tx2.sign(keypair),
(e) => e.message === testData.ERROR_ALREADY_SIGNED_WITH_INVALID_KEY,
);
});
it('a transfer transaction with modified signer with extended key', async () => {
const builder = initTransferTxBuilder();
builder.sign({ key: testData.ROOT_ACCOUNT.xPrivateKey });
const tx = (await builder.build()) as Transaction;
const txJson = JSON.parse(tx.toBroadcastFormat());
const signer = txJson['deploy']['approvals'][0]['signer'];
txJson['deploy']['approvals'][0]['signer'] = '01' + signer.slice(2);
const builder2 = factory.from(JSON.stringify(txJson));
const tx2 = (await builder2.build()) as Transaction;
const keypair = new KeyPair({ prv: testData.ROOT_ACCOUNT.privateKey });
should.throws(
() => tx2.sign(keypair),
(e) => e.message === testData.ERROR_ALREADY_SIGNED_WITH_INVALID_KEY,
);
});
it('a transaction with invalid session data', async () => {
const builder = initTransferTxBuilder();
builder.sign({ key: testData.ROOT_ACCOUNT.privateKey });
const tx = (await builder.build()) as Transaction;
const txJson = JSON.parse(tx.toBroadcastFormat());
txJson['deploy']['session'] = { OtherType: '' };
should.throws(
() => {
factory.from(JSON.stringify(txJson));
},
(e) => e.message.startsWith(testData.INVALID_TRANSACTION_ERROR),
);
});
it('a transaction with invalid session data using extended key', async () => {
const builder = initTransferTxBuilder();
builder.sign({ key: testData.ROOT_ACCOUNT.xPrivateKey });
const tx = (await builder.build()) as Transaction;
const txJson = JSON.parse(tx.toBroadcastFormat());
txJson['deploy']['session'] = { OtherType: '' };
should.throws(
() => {
factory.from(JSON.stringify(txJson));
},
(e) => e.message.startsWith(testData.INVALID_TRANSACTION_ERROR),
);
});
it('a transaction with empty raw transaction', async () => {
should.throws(
() => {
factory.from('{}');
},
(e) => e.message.startsWith(testData.INVALID_TRANSACTION_ERROR),
);
});
it('a transaction with undefined as raw transaction', async () => {
should.throws(
() => {
factory.from(undefined as unknown as string);
},
(e) => e.message.startsWith(testData.INVALID_RAW_TRANSACTION_ERROR),
);
});
it('a transaction with invalid contract', async () => {
const builder = initTxWalletInitBuilder();
builder.sign({ key: testData.ROOT_ACCOUNT.privateKey });
const tx = (await builder.build()) as Transaction;
const txJson = JSON.parse(tx.toBroadcastFormat());
txJson['deploy']['session']['ModuleBytes']['module_bytes'] = testData.INVALID_WALLET_INIT_CONTRACT;
should.throws(
() => {
factory.from(JSON.stringify(txJson));
},
(e) => e.message.startsWith(testData.INVALID_TRANSACTION_ERROR),
);
});
it('a transaction with invalid contract using extended key', async () => {
const builder = initTxWalletInitBuilder();
builder.sign({ key: testData.ROOT_ACCOUNT.xPrivateKey });
const tx = (await builder.build()) as Transaction;
const txJson = JSON.parse(tx.toBroadcastFormat());
txJson['deploy']['session']['ModuleBytes']['module_bytes'] = testData.INVALID_WALLET_INIT_CONTRACT;
should.throws(
() => {
factory.from(JSON.stringify(txJson));
},
(e) => e.message.startsWith(testData.INVALID_TRANSACTION_ERROR),
);
});
it('a transaction with invalid session data', async () => {
const builder = initTxWalletInitBuilder();
builder.sign({ key: testData.ROOT_ACCOUNT.privateKey });
const tx = (await builder.build()) as Transaction;
const txJson = JSON.parse(tx.toBroadcastFormat());
txJson['deploy']['session'] = { OtherType: '' };
should.throws(
() => {
factory.from(JSON.stringify(txJson));
},
(e) => e.message.startsWith(testData.INVALID_TRANSACTION_ERROR),
);
});
it('a transaction with invalid session data using extended key', async () => {
const builder = initTxWalletInitBuilder();
builder.sign({ key: testData.ROOT_ACCOUNT.xPrivateKey });
const tx = (await builder.build()) as Transaction;
const txJson = JSON.parse(tx.toBroadcastFormat());
txJson['deploy']['session'] = { OtherType: '' };
should.throws(
() => {
factory.from(JSON.stringify(txJson));
},
(e) => e.message.startsWith(testData.INVALID_TRANSACTION_ERROR),
);
});
});
});<|fim▁end|> |
const builder2 = factory.from(tx.toBroadcastFormat());
const tx2 = (await builder2.build()) as Transaction;
const tx2Json = tx2.toJson(); |
<|file_name|>TwitterComponentConfiguration.java<|end_file_name|><|fim▁begin|>/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.twitter.springboot;
import org.springframework.boot.context.properties.ConfigurationProperties;
/**
* This component integrates with Twitter to send tweets or search for tweets
* and more.
*
* Generated by camel-package-maven-plugin - do not edit this file!
*/
@ConfigurationProperties(prefix = "camel.component.twitter")
public class TwitterComponentConfiguration {
/**
* The access token
*/
private String accessToken;
/**
* The access token secret
*/
private String accessTokenSecret;
/**
* The consumer key
*/
private String consumerKey;
/**
* The consumer secret
*/
private String consumerSecret;
/**
* The http proxy host which can be used for the camel-twitter.
*/
private String httpProxyHost;
/**
* The http proxy user which can be used for the camel-twitter.
*/
private String httpProxyUser;
/**
* The http proxy password which can be used for the camel-twitter.
*/
private String httpProxyPassword;
/**
* The http proxy port which can be used for the camel-twitter.
*/
private int httpProxyPort;
public String getAccessToken() {
return accessToken;
}
public void setAccessToken(String accessToken) {
this.accessToken = accessToken;
}
public String getAccessTokenSecret() {
return accessTokenSecret;
}
public void setAccessTokenSecret(String accessTokenSecret) {
this.accessTokenSecret = accessTokenSecret;
}
public String getConsumerKey() {
return consumerKey;
}
public void setConsumerKey(String consumerKey) {
this.consumerKey = consumerKey;
}
public String getConsumerSecret() {
return consumerSecret;
}<|fim▁hole|>
public void setConsumerSecret(String consumerSecret) {
this.consumerSecret = consumerSecret;
}
public String getHttpProxyHost() {
return httpProxyHost;
}
public void setHttpProxyHost(String httpProxyHost) {
this.httpProxyHost = httpProxyHost;
}
public String getHttpProxyUser() {
return httpProxyUser;
}
public void setHttpProxyUser(String httpProxyUser) {
this.httpProxyUser = httpProxyUser;
}
public String getHttpProxyPassword() {
return httpProxyPassword;
}
public void setHttpProxyPassword(String httpProxyPassword) {
this.httpProxyPassword = httpProxyPassword;
}
public int getHttpProxyPort() {
return httpProxyPort;
}
public void setHttpProxyPort(int httpProxyPort) {
this.httpProxyPort = httpProxyPort;
}
}<|fim▁end|> | |
<|file_name|>normalizeFiles.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: UTF-8 -*-
# Normalize soundfiles in a folder, and write them to a new folder
# called normalized/
# Import Python modules
import contextlib
import os
import shutil
import sys
import wave
# Import user modules
def normalize():
""" Normalizes a set of sound files to norm-To dB
return -->> 1
"""
# Get the names of the files in sortFolder.
files = os.listdir(folderToSort)
# Make a directory for the renamed sorted files:
dirname = folderToSort + 'normalized/'
try:
os.makedirs(dirname)
except OSError:
if os.path.exists(dirname):
pass
else:
raise
for singleFile in files:
#Only work with .wav files
if singleFile[-4:] == '.wav':
inputFile = folderToSort + singleFile
outfile = dirname + singleFile
command = 'sox --norm={0} {1} {2}'.format(normalizeTo, inputFile,
outfile)
os.system(command)
return 1
def inputCheck(argValues):
""" Check whether the input data is valid. If not print usage
information.
argValues ---> a list of the scripts command-line parameters.
"""
return 1
# Check that the input parameters are valid. Get the name of the folder
# that contains the sound files and the sort type from the command-line
# arguments.<|fim▁hole|>inputCheck(argValues)
folderToSort = argValues[1]
try:
normalizeTo = argValues[2]
except IndexError:
normalizeTo = -3
print 'Normalizing to -3dB'
# Exectue the script.
normalize()<|fim▁end|> | argValues = sys.argv |
<|file_name|>0004_photo_rover.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-09-20 05:49
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
<|fim▁hole|>
dependencies = [
('photos', '0003_rover'),
]
operations = [
migrations.AddField(
model_name='photo',
name='rover',
field=models.ForeignKey(default=None, on_delete=django.db.models.deletion.CASCADE, related_name='photos', to='photos.Rover'),
preserve_default=False,
),
]<|fim▁end|> |
class Migration(migrations.Migration): |
<|file_name|>test_utils.py<|end_file_name|><|fim▁begin|>from maskgen.jpeg import utils
import unittest
from test_support import TestSupport
class TestJpegUtils(TestSupport):
def test_load(self):
self.assertEqual(91,utils.estimate_qf(self.locateFile('tests/images/test_project1.jpg')))<|fim▁hole|> unittest.main()<|fim▁end|> |
if __name__ == '__main__': |
<|file_name|>fastslim.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# This program processes an address trace generated by the Valgrind lackey tool
# to create a reduced trace according to the Fastslim-Demand algorithm
# described in "FastSlim: prefetch-safe trace reduction for I/O cache
# simulation" by Wei Jin, Xiaobai Sun, and Jeffrey S. Chase in ACM Transactions
# on Modeling and Computer Simulation, Vol. 11, No. 2 (April 2001),
# pages 125-160. http://doi.acm.org/10.1145/384169.384170
import fileinput
import sys
import argparse
from operator import attrgetter
class TraceItem(object):
def __init__(self, reftype, pg, tstamp):
self.reftype = reftype
self.pg = pg
self.tstamp = tstamp
self.marked = False
def __eq__(self, other):
return self.pg == other.pg
def __repr__(self):
return self.reftype + " " + format(self.pg*4096,'x')
def __hash__(self):
return hash(self.pg)
ts = 0 # "timestamp" (entry number in original trace)
tracebuffer = set() # The set of entries in the buffer
toprint = [] # The list of entries waiting to be printed in order
# Emit in timestamp order may have to hold onto items until the trace buffer
# is emptied, because there may be marked items in the trace buffer with
# earlier timestamps that have to appear in the output first.
# So, we put entries into a list as they are first seen and then
# emit_marked adds all marked items to the list.
# The list is then sorted by timestamp and printed.<|fim▁hole|> for ti in tracebuffer:
if ti.marked:
toprint.append(ti)
toprint.sort(key=attrgetter('tstamp'))
for ti in toprint:
print ti
tracebuffer.clear()
del toprint[:]
# Parse command line arguments
parser = argparse.ArgumentParser(description="Reduce address trace from valgrind using fastslim-demand algorithm.")
parser.add_argument('-k', '--keepcode', action='store_true', help="include code pages in compressed trace")
parser.add_argument('-b', '--buffersize', type=int, default=4, help="number of entries in trace buffer")
parser.add_argument('tracefile', nargs='?', default="-")
args = parser.parse_args()
# Process input trace
for line in fileinput.input(args.tracefile):
if line[0] == '=':
continue
reftype = line[0:2].strip()
if reftype == "I" and args.keepcode == False:
continue
addrstr = line.split(',')[0][3:].strip()
try:
addr = int(addrstr, 16)
except ValueError:
#print "This does not appear to be valgrind output, skipping: " + line
continue
pg = addr / 4096
ti = TraceItem(reftype,pg,ts)
if ti in tracebuffer:
ti.marked = True
ti.tstamp = ts
else:
if (len(tracebuffer) == args.buffersize):
emit_marked_in_ts_order()
toprint.append(ti)
tracebuffer.add(ti)
ts = ts + 1<|fim▁end|> | def emit_marked_in_ts_order(): |
<|file_name|>non-legacy-modes.rs<|end_file_name|><|fim▁begin|>// run-pass
struct X {
repr: isize
}
fn apply<T, F>(x: T, f: F) where F: FnOnce(T) {
f(x);
}
fn check_int(x: isize) {
assert_eq!(x, 22);
}
fn check_struct(x: X) {
check_int(x.repr);
}<|fim▁hole|> apply(X {repr: 22}, check_struct);
}<|fim▁end|> |
pub fn main() {
apply(22, check_int); |
<|file_name|>TreeBlockContext.java<|end_file_name|><|fim▁begin|>package antlr;
/* ANTLR Translator Generator
* Project led by Terence Parr at http://www.cs.usfca.edu
* Software rights: http://www.antlr.org/license.html
*
* $Id: //depot/code/org.antlr/release/antlr-2.7.7/antlr/TreeBlockContext.java#2 $
*/
/**The context needed to add root,child elements to a Tree. There
* is only one alternative (i.e., a list of children). We subclass to
* specialize. MakeGrammar.addElementToCurrentAlt will work correctly
* now for either a block of alts or a Tree child list.
*
* The first time addAlternativeElement is called, it sets the root element
* rather than adding it to one of the alternative lists. Rather than have
* the grammar duplicate the rules for grammar atoms etc... we use the same
* grammar and same refToken behavior etc... We have to special case somewhere
* and here is where we do it.
*/
class TreeBlockContext extends BlockContext {
protected boolean nextElementIsRoot = true;
public void addAlternativeElement(AlternativeElement e) {
TreeElement tree = (TreeElement)block;
if (nextElementIsRoot) {
tree.root = (GrammarAtom)e;
nextElementIsRoot = false;
}
else {
super.addAlternativeElement(e);
}
}<|fim▁hole|><|fim▁end|> | } |
<|file_name|>db.cpp<|end_file_name|><|fim▁begin|>/**
* db.hpp
*
* Database abstraction part of Disc Data Base.
*
* Copyright (c) 2010-2011 Wincent Balin
*
* Based upon ddb.pl, created years before and serving faithfully until today.
*
* Uses SQLite database version 3.
*
* Published under MIT license. See LICENSE file for further information.
*/
#include "db.hpp"
#include <sstream>
#include <utility>
#include <cassert>
#include <boost/foreach.hpp>
// Use shortcut from example
#define foreach BOOST_FOREACH
// Deprecated features not wanted
#define BOOST_FILESYSTEM_NO_DEPRECATED
#include <boost/filesystem.hpp>
// Use a shortcut
namespace fs = boost::filesystem;
DB::DB(Print* print)
{
// Store pointer to the printer
p = print;
// Perform initialization
init();
}
void
DB::init(void)
{
// Reset database pointer
db = NULL;
// Set version
version = 1;
// Define database format
format.push_back("CREATE TABLE ddb (directory TEXT NOT NULL, file TEXT, disc TEXT NOT NULL)");
format.push_back("CREATE INDEX ddb_index ON ddb (directory, file, disc)");
format.push_back("CREATE TABLE ddb_version(version INTEGER NOT NULL)");
std::ostringstream ddb_version_table_contents;
ddb_version_table_contents << "INSERT INTO ddb_version VALUES (" << version << ")";
format.push_back(ddb_version_table_contents.str());
}
DB::~DB(void) throw(DBError)
{
// Close database
close();
}
void
DB::open(const char* dbname, bool initialize) throw(DBError)
{
std::string error_message = std::string("Could not open file ") + dbname;
int result;
// Assume database is not open already
assert(db == NULL);
p->msg("Opening database...", Print::VERBOSE);
// Open database
result =
sqlite3_open_v2(dbname, &db, SQLITE_OPEN_READWRITE, NULL);
if(result != SQLITE_OK)
throw(DBError(error_message), DBError::FILE_ERROR);
p->msg("Done.", Print::DEBUG);<|fim▁hole|>DB::close(void) throw(DBError)
{
std::string error_message = "Could not close database";
int result;
p->msg("Closing database...", Print::VERBOSE);
// Close database
result =
sqlite3_close(db);
// If something went wrong, throw an exception
if(result != SQLITE_OK)
throw(DBError(error_message, DBError::FILE_ERROR));
p->msg("Done.", Print::DEBUG);
}
bool
DB::has_correct_format(void) throw(DBError)
{
const char* version_check = "SELECT COUNT(*) AS count, version FROM ddb_version";
std::string error_message = "Could not check database correctness";
int result;
bool format_is_correct = false;
// Prepare SQL statement
sqlite3_stmt* stmt;
result =
sqlite3_prepare_v2(db, version_check, -1, &stmt, NULL);
if(result != SQLITE_OK)
throw(DBError(error_message, DBError::PREPARE_STATEMENT));
// Execute SQL statement
result =
sqlite3_step(stmt);
if(result != SQLITE_ROW)
throw(DBError(error_message, DBError::EXECUTE_STATEMENT));
// Result should have only one row
if(sqlite3_column_int(stmt, 0) == 1)
{
// Version in database should be equal to the version of this class
format_is_correct = (sqlite3_column_int(stmt, 1) == version);
}
// Finalize SQL statement
result =
sqlite3_finalize(stmt);
if(result != SQLITE_OK)
throw(DBError(error_message, DBError::FINALIZE_STATEMENT));
if(!format_is_correct)
p->msg("Database has wrong format!", Print::INFO);
// Return correctness
return format_is_correct;
}
bool
DB::is_disc_present(const char* discname) throw(DBError)
{
const char* disc_presence_check = "SELECT DISTINCT disc FROM ddb WHERE disc LIKE ?";
int result;
bool disc_present = false;
std::string error_message = "Could not check disc presence";
// Prepare SQL statement
sqlite3_stmt* stmt;
result =
sqlite3_prepare_v2(db, disc_presence_check, -1, &stmt, NULL);
if(result != SQLITE_OK)
throw(DBError(error_message, DBError::PREPARE_STATEMENT));
// Execute SQL statement
result =
sqlite3_step(stmt);
// Check whether we have at least one disc in the database
if(result == SQLITE_ROW)
{
disc_present = true;
}
else if(result == SQLITE_DONE)
{
disc_present = false;
}
else
{
// We got an error
throw(DBError(error_message, DBError::EXECUTE_STATEMENT));
}
// Finalize SQL statement
result =
sqlite3_finalize(stmt);
if(result != SQLITE_OK)
throw(DBError(error_message, DBError::FINALIZE_STATEMENT));
// Return disc presence
return disc_present;
}
void
DB::add_disc(const char* disc_name, const char* starting_path) throw(DBError)
{
const char* begin_transaction = "BEGIN";
const char* add_entry = "INSERT INTO ddb (directory, file, disc) VALUES (?, ?, ?)";
const char* end_transaction = "COMMIT";
std::string error_message = std::string("Could not add disc ") + disc_name;
int result;
// Declare disc root directory
fs::path disc_path(starting_path);
// Check, whether disc path is a directory
if(!fs::is_directory(disc_path))
throw(DBError(std::string("Path ") + starting_path + " is not a directory", DBError::FILE_ERROR));
// Container of file names
std::vector<std::pair<fs::path, bool> > filenames;
// Open directory and iterate through it recursively
fs::path current_path;
bool current_path_is_directory;
fs::recursive_directory_iterator end;
for(fs::recursive_directory_iterator dir(disc_path);
dir != end;
dir++)
{
current_path = dir->path();
current_path_is_directory = fs::is_directory(current_path);
// Put current file name into vector
filenames.push_back(std::make_pair(current_path, current_path_is_directory));
}
// Sort filenames
sort(filenames.begin(),filenames.end());
// Print file names, if verbosity is set high enough
if(p->get_verbosity() >= Print::VERBOSE_DEBUG)
{
std::pair<fs::path, bool> it;
foreach(it, filenames)
{
std::cout << (it.second ? "Directory" : "File") << " " << it.first << std::endl;
}
}
// Begin transaction
result =
sqlite3_exec(db, begin_transaction, NULL, NULL, NULL);
if(result != SQLITE_OK)
throw(DBError(error_message, DBError::BEGIN_TRANSACTION));
// Prepare SQL statement
sqlite3_stmt* stmt;
sqlite3_prepare_v2(db, add_entry, -1, &stmt, NULL);
if(result != SQLITE_OK)
throw(DBError(error_message, DBError::PREPARE_STATEMENT));
// Bind disc name
sqlite3_bind_text(stmt, 3, disc_name, -1, SQLITE_STATIC);
if(result != SQLITE_OK)
throw(DBError(error_message, DBError::BIND_PARAMETER));
p->msg("Inserting files into database...", Print::VERBOSE);
// Add files
bool file_is_directory;
fs::path path;
std::string d_entry;
std::string f_entry;
std::pair<fs::path, bool> it;
foreach(it, filenames)
{
path = it.first;
file_is_directory = it.second;
d_entry = file_is_directory ?
path.generic_string() :
path.parent_path().generic_string();
f_entry = file_is_directory ?
"NULL" :
path.filename().generic_string();
// Reset SQL statement
result =
sqlite3_reset(stmt);
if(result != SQLITE_OK)
throw(DBError(error_message, DBError::RESET_STATEMENT));
// Bind directory and file
result =
sqlite3_bind_text(stmt, 1, d_entry.c_str(), -1, SQLITE_STATIC);
if(result != SQLITE_OK)
throw(DBError(error_message, DBError::BIND_PARAMETER));
result =
sqlite3_bind_text(stmt, 2, f_entry.c_str(), -1, SQLITE_STATIC);
if(result != SQLITE_OK)
throw(DBError(error_message, DBError::BIND_PARAMETER));
// Execute SQL statement
result =
sqlite3_step(stmt);
// Check for errors
if(result != SQLITE_DONE)
throw(DBError(error_message, DBError::EXECUTE_STATEMENT));
}
// End transaction
result =
sqlite3_exec(db, end_transaction, NULL, NULL, NULL);
if(result != SQLITE_OK)
throw(DBError(error_message, DBError::END_TRANSACTION));
p->msg("Done.", Print::DEBUG);
}
void
DB::remove_disc(const char* disc_name) throw(DBError)
{
const char* remove_query = "DELETE FROM ddb WHERE disc=?";
int result;
std::string error_message = std::string("Could not remove disc ") + disc_name;
// Initialize and prepare SQL statement
sqlite3_stmt* stmt;
result =
sqlite3_prepare_v2(db, remove_query, -1, &stmt, NULL);
if(result != SQLITE_OK)
throw(DBError(error_message, DBError::PREPARE_STATEMENT));
// Bind disc name
result =
sqlite3_bind_text(stmt, 1, disc_name, -1, SQLITE_STATIC);
if(result != SQLITE_OK)
throw(DBError(error_message, DBError::BIND_PARAMETER));
// Execute SQL statement
result =
sqlite3_step(stmt);
if(result != SQLITE_DONE)
throw(DBError(error_message, DBError::EXECUTE_STATEMENT));
// Clean up
result =
sqlite3_finalize(stmt);
if(result != SQLITE_OK)
throw(DBError(error_message, DBError::FINALIZE_STATEMENT));
}
void
DB::list_files(const char* disc_name, bool directories_only) throw(DBError)
{
const char* list_files_query = "SELECT directory,file FROM ddb WHERE disc LIKE ?";
const char* list_directories_query = "SELECT DISTINCT directory FROM ddb WHERE disc LIKE ?";
const char* list_query = directories_only ? list_directories_query : list_files_query;
std::string error_message = "Could not list " + directories_only ? "directories" : "files";
int result;
// Prepare statement
sqlite3_stmt* stmt;
result =
sqlite3_prepare_v2(db, list_query, -1, &stmt, NULL);
if(result != SQLITE_OK)
throw(DBError(error_message, DBError::PREPARE_STATEMENT));
// Bind disc name
result =
sqlite3_bind_text(stmt, 1, disc_name, -1, SQLITE_STATIC);
if(result != SQLITE_OK)
throw(DBError(error_message, DBError::BIND_PARAMETER));
// Get data
const char* directory;
const char* file;
while(true)
{
// Execute SQL statement
result =
sqlite3_step(stmt);
// Check whether we have at least one disc in the database
if(result == SQLITE_ROW)
{
if(directories_only)
{
directory = reinterpret_cast<const char*>(sqlite3_column_text(stmt, 0));
p->add_directory(disc_name, directory);
}
else
{
directory = reinterpret_cast<const char*>(sqlite3_column_text(stmt, 0));
file = reinterpret_cast<const char*>(sqlite3_column_text(stmt, 1));
p->add_file(disc_name, directory, file);
}
}
else if(result == SQLITE_DONE)
{
// No more results
break;
}
else
{
// We got an error
throw(DBError(error_message, DBError::EXECUTE_STATEMENT));
}
}
// Finalize statement
result =
sqlite3_finalize(stmt);
if(result != SQLITE_OK)
throw(DBError(error_message, DBError::FINALIZE_STATEMENT));
}<|fim▁end|> | }
void |
<|file_name|>base.py<|end_file_name|><|fim▁begin|>"""
============================
Base RPC Handler for Tornado
============================
This is a basic server implementation, designed for use within the
Tornado framework. The classes in this library should not be used
directly, but rather though the XML or JSON RPC implementations.
You can use the utility functions like 'private' and 'start_server'.
"""
from tornado.web import RequestHandler
import tornado.web
import tornado.ioloop
import tornado.httpserver
from tornado.concurrent import Future, TracebackFuture
from tornado import gen
from tornado.stack_context import ExceptionStackContext, run_with_stack_context
import types
import traceback
from tornadorpc_evok.utils import getcallargs
# Configuration element
class Config(object):
verbose = True
short_errors = True
config = Config()
class BaseRPCParser(object):
"""
This class is responsible for managing the request, dispatch,
and response formatting of the system. It is tied into the
_RPC_ attribute of the BaseRPCHandler (or subclasses) and
populated as necessary throughout the request. Use the
.faults attribute to take advantage of the built-in error
codes.
"""
content_type = 'text/plain'
def __init__(self, library, encode=None, decode=None):
# Attaches the RPC library and encode / decode functions.
self.library = library
if not encode:
encode = getattr(library, 'dumps')
if not decode:
decode = getattr(library, 'loads')
self.encode = encode
self.decode = decode
self.requests_in_progress = 0
self.responses = []
@property
def faults(self):
# Grabs the fault tree on request
return Faults(self)
def response(self, handler):
"""
This is the callback for a single finished dispatch.
Once all the dispatches have been run, it calls the
parser library to parse responses and then calls the
handler's async method.
"""
handler._requests -= 1
if handler._requests > 0:
return
# We are finished with requests, send response
if handler._RPC_finished:
# We've already sent the response
raise Exception("Error trying to send response twice.")
handler._RPC_finished = True
responses = tuple(handler._results)
response_text = self.parse_responses(responses)
if type(response_text) not in types.StringTypes:
# Likely a fault, or something messed up
response_text = self.encode(response_text)
# Calling the async callback
handler.on_result(response_text)
def traceback(self, method_name='REQUEST', params=[]):
err_lines = traceback.format_exc().splitlines()
err_title = "ERROR IN %s" % method_name
if len(params) > 0:
err_title = '%s - (PARAMS: %s)' % (err_title, repr(params))
err_sep = ('-'*len(err_title))[:79]
err_lines = [err_sep, err_title, err_sep]+err_lines
if config.verbose:
if len(err_lines) >= 7 and config.short_errors:<|fim▁hole|> # Minimum number of lines to see what happened
# Plus title and separators
print '\n'.join(err_lines[0:4]+err_lines[-3:])
else:
print '\n'.join(err_lines)
# Log here
return
def parse_request(self, request_body):
"""
Extend this on the implementing protocol. If it
should error out, return the output of the
'self.faults.fault_name' response. Otherwise,
it MUST return a TUPLE of TUPLE. Each entry
tuple must have the following structure:
('method_name', params)
...where params is a list or dictionary of
arguments (positional or keyword, respectively.)
So, the result should look something like
the following:
( ('add', [5,4]), ('add', {'x':5, 'y':4}) )
"""
return ([], [])
def parse_responses(self, responses):
"""
Extend this on the implementing protocol. It must
return a response that can be returned as output to
the client.
"""
return self.encode(responses, methodresponse=True)
def check_method(self, attr_name, obj):
"""
Just checks to see whether an attribute is private
(by the decorator or by a leading underscore) and
returns boolean result.
"""
assert(not attr_name.startswith('_'))
attr = getattr(obj, attr_name)
assert( not getattr(attr, 'private', False))
return attr
class BaseRPCHandler(RequestHandler):
"""
This is the base handler to be subclassed by the actual
implementations and by the end user.
"""
_RPC_ = None
#_requests = 1
rpcrequests = None
_error = None
_RPC_finished = False
def prepare(self):
"""
Parse request_body, prepares self.rpcrequest
On error call finish or set self._error - to be serialized by export procedure
"""
try:
requests = self._RPC_.parse_request(self.request.body)
if not isinstance(requests, types.TupleType):
# SHOULD be the result of a fault call,
# according tothe parse_request spec below.
if isinstance(requests, basestring):
# Should be the response text of a fault
# This will break in Python 3.x
self.finish(requests)
elif hasattr(requests, 'response'):
# Fault types should have a 'response' method
self.finish(requests.response())
elif hasattr(requests, 'faultCode'):
# XML-RPC fault types need to be properly dispatched. This
# should only happen if there was an error parsing the
self._error = requests
else:
# No idea, hopefully the handler knows what it is doing.
self.finish(requests)
return
self.rpcrequests = requests
except (AttributeError,Exception):
self._RPC_.traceback()
self._error = self._RPC_.faults.parse_error()
@tornado.web.asynchronous
@gen.coroutine
def post(self):
# Dispatches request methods
# rpcrequests are prepared in self.prepare()
if self._error:
responses = (self._error,)
else:
futures = [self._dispatch(method, args) for method,args in self.rpcrequests ]
if len(futures) == 1:
response = yield futures[0]
responses = (response,)
else:
responses = yield futures
responses = tuple(responses)
response_text = self._RPC_.parse_responses(responses)
self.set_header('Content-Type', self._RPC_.content_type)
self.finish(response_text)
#self._RPC_.run(self, request_body)
@gen.coroutine
def _dispatch(self, method_name, params):
"""
This method walks the attribute tree in the method
and passes the parameters, either in positional or
keyword form, into the appropriate method on the
Handler class. Currently supports only positional
or keyword arguments, not mixed.
"""
try:
assert(not hasattr(RequestHandler, method_name))
print method_name
method = self
method_list = dir(method)
method_list.sort()
attr_tree = method_name.split('.')
for attr_name in attr_tree:
method = self._RPC_.check_method(attr_name, method)
assert(callable(method))
assert(not method_name.startswith('_'))
assert(not getattr(method, 'private', False))
except Exception,e :
raise gen.Return(self._RPC_.faults.method_not_found())
args = []
kwargs = {}
try:
if isinstance(params, dict):
# The parameters are keyword-based
kwargs = params
elif type(params) in (list, tuple):
# The parameters are positional
args = params
else:
# Bad argument formatting?
raise Exception()
# Validating call arguments
final_kwargs, extra_args = getcallargs(method, *args, **kwargs)
except Exception:
raise gen.Return(self._RPC_.faults.invalid_params())
try:
if getattr(method, 'coroutine', False):
method=tornado.gen.coroutine(method)
response = yield method(*extra_args, **final_kwargs)
else:
response = method(*extra_args, **final_kwargs)
except Exception:
self._RPC_.traceback(method_name, params)
raise gen.Return(self._RPC_.faults.internal_error())
raise gen.Return(response)
class FaultMethod(object):
"""
This is the 'dynamic' fault method so that the message can
be changed on request from the parser.faults call.
"""
def __init__(self, fault, code, message):
self.fault = fault
self.code = code
self.message = message
def __call__(self, message=None):
if message:
self.message = message
return self.fault(self.code, self.message)
class Faults(object):
"""
This holds the codes and messages for the RPC implementation.
It is attached (dynamically) to the Parser when called via the
parser.faults query, and returns a FaultMethod to be called so
that the message can be changed. If the 'dynamic' attribute is
not a key in the codes list, then it will error.
USAGE:
parser.fault.parse_error('Error parsing content.')
If no message is passed in, it will check the messages dictionary
for the same key as the codes dict. Otherwise, it just prettifies
the code 'key' from the codes dict.
"""
codes = {
'parse_error': -32700,
'method_not_found': -32601,
'invalid_request': -32600,
'invalid_params': -32602,
'internal_error': -32603
}
messages = {}
def __init__(self, parser, fault=None):
self.library = parser.library
self.fault = fault
if not self.fault:
self.fault = getattr(self.library, 'Fault')
def __getattr__(self, attr):
message = 'Error'
if attr in self.messages.keys():
message = self.messages[attr]
else:
message = ' '.join(map(str.capitalize, attr.split('_')))
fault = FaultMethod(self.fault, self.codes[attr], message)
return fault
"""
Utility Functions
"""
def private(func):
"""
Use this to make a method private.
It is intended to be used as a decorator.
If you wish to make a method tree private, just
create and set the 'private' variable to True
on the tree object itself.
"""
func.private = True
return func
#def async(func):
# """
# Use this to make a method asynchronous
# It is intended to be used as a decorator.
# Make sure you call "self.result" on any
# async method. Also, trees do not currently
# support async methods.
# """
# func.async = True
# return func
def coroutine(func):
func.coroutine = True
return func
def start_server(handlers, route=r'/', port=8080):
"""
This is just a friendly wrapper around the default
Tornado instantiation calls. It simplifies the imports
and setup calls you'd make otherwise.
USAGE:
start_server(handler_class, route=r'/', port=8181)
"""
if type(handlers) not in (types.ListType, types.TupleType):
handler = handlers
handlers = [(route, handler)]
if route != '/RPC2':
# friendly addition for /RPC2 if it's the only one
handlers.append(('/RPC2', handler))
application = tornado.web.Application(handlers)
http_server = tornado.httpserver.HTTPServer(application)
http_server.listen(port)
loop_instance = tornado.ioloop.IOLoop.instance()
""" Setting the '_server' attribute if not set """
for (route, handler) in handlers:
try:
setattr(handler, '_server', loop_instance)
except AttributeError:
handler._server = loop_instance
loop_instance.start()
return loop_instance
"""
The following is a test implementation which should work
for both the XMLRPC and the JSONRPC clients.
"""
class TestMethodTree(object):
def power(self, x, y=2):
return pow(x, y)
@private
def private(self):
# Shouldn't be called
return False
class TestRPCHandler(BaseRPCHandler):
_RPC_ = None
def add(self, x, y):
return x+y
def ping(self, x):
return x
def noargs(self):
return 'Works!'
tree = TestMethodTree()
def _private(self):
# Shouldn't be called
return False
@private
def private(self):
# Also shouldn't be called
return False<|fim▁end|> | |
<|file_name|>pythonutil.py<|end_file_name|><|fim▁begin|>from socket import inet_ntoa
from struct import pack
def calcDottedNetmask(mask):
bits = 0
for i in xrange(32 - mask, 32):
bits |= (1 << i)<|fim▁hole|><|fim▁end|> | packed_value = pack('!I', bits)
addr = inet_ntoa(packed_value)
return addr |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|>from .xanim import Anim
from .sanim import SiegeAnim
version = (0, 3, 0) # Version specifier for PyCoD<|fim▁end|> | # <pep8 compliant>
from .xmodel import Model
|
<|file_name|>TrackedEntityRegistrationSMSListener.java<|end_file_name|><|fim▁begin|>package org.hisp.dhis.sms.listener;
/*
* Copyright (c) 2004-2018, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import java.util.*;
import org.apache.commons.lang3.StringUtils;
import org.hisp.dhis.organisationunit.OrganisationUnit;
import org.hisp.dhis.program.Program;
import org.hisp.dhis.program.ProgramInstanceService;
import org.hisp.dhis.sms.command.SMSCommand;
import org.hisp.dhis.sms.command.SMSCommandService;
import org.hisp.dhis.sms.command.code.SMSCode;
import org.hisp.dhis.sms.incoming.IncomingSms;
import org.hisp.dhis.sms.incoming.SmsMessageStatus;
import org.hisp.dhis.sms.parse.ParserType;
import org.hisp.dhis.sms.parse.SMSParserException;
import org.hisp.dhis.system.util.SmsUtils;
import org.hisp.dhis.trackedentity.TrackedEntityAttribute;
import org.hisp.dhis.trackedentity.TrackedEntityInstance;
import org.hisp.dhis.trackedentity.TrackedEntityInstanceService;
import org.hisp.dhis.trackedentity.TrackedEntityTypeService;
import org.hisp.dhis.trackedentityattributevalue.TrackedEntityAttributeValue;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.transaction.annotation.Transactional;
@Transactional
public class TrackedEntityRegistrationSMSListener
extends BaseSMSListener
{
private static final String SUCCESS_MESSAGE = "Tracked Entity Registered Successfully with uid. ";
// -------------------------------------------------------------------------
// Dependencies
// -------------------------------------------------------------------------
@Autowired
private SMSCommandService smsCommandService;
@Autowired
private TrackedEntityTypeService trackedEntityTypeService;
@Autowired
private TrackedEntityInstanceService trackedEntityInstanceService;
@Autowired
private ProgramInstanceService programInstanceService;
// -------------------------------------------------------------------------
// IncomingSmsListener implementation
// -------------------------------------------------------------------------
@Override
protected void postProcess( IncomingSms sms, SMSCommand smsCommand, Map<String, String> parsedMessage )
{
String message = sms.getText();
<|fim▁hole|> Collection<OrganisationUnit> orgUnits = getOrganisationUnits( sms );
Program program = smsCommand.getProgram();
OrganisationUnit orgUnit = SmsUtils.selectOrganisationUnit( orgUnits, parsedMessage, smsCommand );
if ( !program.hasOrganisationUnit( orgUnit ) )
{
sendFeedback( SMSCommand.NO_OU_FOR_PROGRAM, senderPhoneNumber, WARNING );
throw new SMSParserException( SMSCommand.NO_OU_FOR_PROGRAM );
}
TrackedEntityInstance trackedEntityInstance = new TrackedEntityInstance();
trackedEntityInstance.setOrganisationUnit( orgUnit );
trackedEntityInstance.setTrackedEntityType( trackedEntityTypeService.getTrackedEntityByName( smsCommand.getProgram().getTrackedEntityType().getName() ) );
Set<TrackedEntityAttributeValue> patientAttributeValues = new HashSet<>();
smsCommand.getCodes().stream()
.filter( code -> parsedMessage.containsKey( code.getCode() ) )
.forEach( code ->
{
TrackedEntityAttributeValue trackedEntityAttributeValue = this.createTrackedEntityAttributeValue( parsedMessage, code, trackedEntityInstance) ;
patientAttributeValues.add( trackedEntityAttributeValue );
});
int trackedEntityInstanceId = 0;
if ( patientAttributeValues.size() > 0 )
{
trackedEntityInstanceId = trackedEntityInstanceService.createTrackedEntityInstance( trackedEntityInstance,
null, null, patientAttributeValues );
}
else
{
sendFeedback( "No TrackedEntityAttribute found", senderPhoneNumber, WARNING );
}
TrackedEntityInstance tei = trackedEntityInstanceService.getTrackedEntityInstance( trackedEntityInstanceId );
programInstanceService.enrollTrackedEntityInstance( tei, smsCommand.getProgram(), new Date(), date, orgUnit );
sendFeedback( StringUtils.defaultIfBlank( smsCommand.getSuccessMessage(), SUCCESS_MESSAGE + tei.getUid() ), senderPhoneNumber, INFO );
update( sms, SmsMessageStatus.PROCESSED, true );
}
@Override
protected SMSCommand getSMSCommand( IncomingSms sms )
{
return smsCommandService.getSMSCommand( SmsUtils.getCommandString( sms ),
ParserType.TRACKED_ENTITY_REGISTRATION_PARSER );
}
private TrackedEntityAttributeValue createTrackedEntityAttributeValue( Map<String, String> parsedMessage,
SMSCode code, TrackedEntityInstance trackedEntityInstance )
{
String value = parsedMessage.get( code.getCode() );
TrackedEntityAttribute trackedEntityAttribute = code.getTrackedEntityAttribute();
TrackedEntityAttributeValue trackedEntityAttributeValue = new TrackedEntityAttributeValue();
trackedEntityAttributeValue.setAttribute( trackedEntityAttribute );
trackedEntityAttributeValue.setEntityInstance( trackedEntityInstance );
trackedEntityAttributeValue.setValue( value );
return trackedEntityAttributeValue;
}
}<|fim▁end|> | Date date = SmsUtils.lookForDate( message );
String senderPhoneNumber = StringUtils.replace( sms.getOriginator(), "+", "" );
|
<|file_name|>scrape_mbz_instruments.py<|end_file_name|><|fim▁begin|># !/usr/local/bin/python3.4.2
# ----Copyright (c) 2017 Carnegie Hall | The MIT License (MIT)----
# ----For the full license terms, please visit https://github.com/CarnegieHall/linked-data/blob/master/LICENSE----
##needs further refinement to eliminate non-instrument link results
## Argument[0] is script to run
import csv
import httplib2
import json
import os<|fim▁hole|>
h = httplib2.Http()
link = 'https://musicbrainz.org/instruments'
uri_root = 'https://musicbrainz.org'
resp, html_doc = h.request(link, "GET")
soup = BeautifulSoup(html_doc, "lxml")
for result in soup.body.select(
'a[href^"/instrument/"]'):
label = result.contents[0].string
uri = ''.join([uri_root, result.get('href')])
mbz_instDict[str(uri)] = label
mbz_instDict_path = os.path.join(
os.path.dirname(__file__), os.pardir, 'source-files', 'mbz_instDict.json')
mbz_instList_path = os.path.join(
os.path.dirname(__file__), os.pardir, 'source-files', 'mbz_instList.csv')
with open(mbz_instDict_path, 'w') as f1:
json.dump(mbz_instDict, f1)
with open(mbz_instList_path, 'w', newline='') as csvfile:
w = csv.writer(csvfile, dialect='excel', delimiter=',')
for k,v in mbz_instDict.items():
w.writerow([k,v])
print("Finished gathering MusicBrainz instrument URIs and labels")<|fim▁end|> | import sys
from bs4 import BeautifulSoup
mbz_instDict = {} |
<|file_name|>test_swe_lun_eclipse.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import swisseph as swe
import unittest
class TestSweLunEclipse(unittest.TestCase):
@classmethod
def setUpClass(cls):
swe.set_ephe_path()
def test_01(self):
jd = 2454466.5
flags = swe.FLG_SWIEPH
geopos = (12.1, 49.0, 330)
rflags, tret = swe.lun_eclipse_when(jd, flags, 0)
self.assertEqual(rflags, 4)
self.assertEqual(len(tret), 10)
t1 = (2454517.6430690456, 0.0, 2454517.57172334, 2454517.7144189165,
2454517.6258038115, 2454517.6603509136, 2454517.525389122,
2454517.7608554545, 0.0, 0.0)
for i in range(10):
self.assertAlmostEqual(tret[i], t1[i])
tjdut = tret[0]
rflags, tret, attr = swe.lun_eclipse_when_loc(tjdut, geopos, flags)
self.assertEqual(rflags, 29584)
self.assertEqual(len(tret), 10)<|fim▁hole|>
self.assertEqual(len(attr), 20)
t1 = (0.8076127691060245, 1.8366497324296667, 0.0, 0.0,
326.9885866287668, 21.362590458352507, 21.402251051495636,
0.5301609960196174, 0.8076127691060245, 138.0, 28.0, 28.0,
28.0, 28.0, 28.0, 28.0, 28.0, 28.0, 28.0, 28.0)
for i in range(20):
self.assertAlmostEqual(attr[i], t1[i])
rflags, attr = swe.lun_eclipse_how(tjdut, geopos, flags)
self.assertEqual(rflags, 4)
self.assertEqual(len(attr), 20)
t1 = (1.1061093373639495, 2.145134309769692, 0.0, 0.0,
73.8203145568749, 26.299290272560974, 26.330700027276947,
0.3801625589840114, 1.1061093373639495, 133.0, 26.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0)
for i in range(20):
self.assertAlmostEqual(attr[i], t1[i])
if __name__ == '__main__':
unittest.main()
# vi: sw=4 ts=4 et<|fim▁end|> | t1 = (2454695.3820517384, 0.0, 2454695.316710297, 2454695.447390333,
0.0, 0.0, 2454695.2672055247, 2454695.496797575, 0.0, 0.0)
for i in range(10):
self.assertAlmostEqual(tret[i], t1[i]) |
<|file_name|>CompositeShortNamesCache.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2000-2012 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.psi.impl;
import com.intellij.openapi.project.Project;
import com.intellij.psi.PsiClass;
import com.intellij.psi.PsiField;
import com.intellij.psi.PsiFile;
import com.intellij.psi.PsiMethod;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.psi.search.PsiShortNamesCache;
import com.intellij.util.ArrayUtil;
import com.intellij.util.CommonProcessors;
import com.intellij.util.Processor;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.indexing.IdFilter;
import gnu.trove.THashSet;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.Arrays;
import java.util.Set;
public class CompositeShortNamesCache extends PsiShortNamesCache {
private final PsiShortNamesCache[] myCaches;
public CompositeShortNamesCache(Project project) {
myCaches = project.isDefault() ? new PsiShortNamesCache[0] : project.getExtensions(PsiShortNamesCache.EP_NAME);
}
@Override
@NotNull
public PsiFile[] getFilesByName(@NotNull String name) {
Merger<PsiFile> merger = null;
for (PsiShortNamesCache cache : myCaches) {
PsiFile[] classes = cache.getFilesByName(name);
if (classes.length != 0) {
if (merger == null) merger = new Merger<>();
merger.add(classes);
}
}
PsiFile[] result = merger == null ? null : merger.getResult();
return result != null ? result : PsiFile.EMPTY_ARRAY;
}
@Override
@NotNull
public String[] getAllFileNames() {
Merger<String> merger = new Merger<>();
for (PsiShortNamesCache cache : myCaches) {
merger.add(cache.getAllFileNames());
}
String[] result = merger.getResult();
return result != null ? result : ArrayUtil.EMPTY_STRING_ARRAY;
}
@Override
@NotNull
public PsiClass[] getClassesByName(@NotNull String name, @NotNull GlobalSearchScope scope) {
Merger<PsiClass> merger = null;
for (PsiShortNamesCache cache : myCaches) {
PsiClass[] classes = cache.getClassesByName(name, scope);
if (classes.length != 0) {
if (merger == null) merger = new Merger<>();
merger.add(classes);
}
}
PsiClass[] result = merger == null ? null : merger.getResult();
return result != null ? result : PsiClass.EMPTY_ARRAY;
}
@Override
@NotNull
public String[] getAllClassNames() {
Merger<String> merger = new Merger<>();
for (PsiShortNamesCache cache : myCaches) {
String[] names = cache.getAllClassNames();
merger.add(names);
}
String[] result = merger.getResult();
return result != null ? result : ArrayUtil.EMPTY_STRING_ARRAY;
}
@Override
public boolean processAllClassNames(@NotNull Processor<String> processor) {
CommonProcessors.UniqueProcessor<String> uniqueProcessor = new CommonProcessors.UniqueProcessor<>(processor);
for (PsiShortNamesCache cache : myCaches) {
if (!cache.processAllClassNames(uniqueProcessor)) {
return false;
}
}
return true;
}
@Override
public boolean processAllClassNames(@NotNull Processor<String> processor, @NotNull GlobalSearchScope scope, IdFilter filter) {
for (PsiShortNamesCache cache : myCaches) {
if (!cache.processAllClassNames(processor, scope, filter)) {
return false;
}
}
return true;
}
@Override
public boolean processAllMethodNames(@NotNull Processor<String> processor, @NotNull GlobalSearchScope scope, IdFilter filter) {
for (PsiShortNamesCache cache : myCaches) {
if (!cache.processAllMethodNames(processor, scope, filter)) {
return false;
}
}
return true;
}
@Override
public boolean processAllFieldNames(@NotNull Processor<String> processor, @NotNull GlobalSearchScope scope, IdFilter filter) {
for (PsiShortNamesCache cache : myCaches) {
if (!cache.processAllFieldNames(processor, scope, filter)) {
return false;
}
}
return true;
}
@Override
@NotNull
public PsiMethod[] getMethodsByName(@NotNull String name, @NotNull GlobalSearchScope scope) {
Merger<PsiMethod> merger = null;
for (PsiShortNamesCache cache : myCaches) {
PsiMethod[] methods = cache.getMethodsByName(name, scope);
if (methods.length != 0) {
if (merger == null) merger = new Merger<>();
merger.add(methods);
}
}
PsiMethod[] result = merger == null ? null : merger.getResult();
return result == null ? PsiMethod.EMPTY_ARRAY : result;
}
@Override
@NotNull
public PsiMethod[] getMethodsByNameIfNotMoreThan(@NonNls @NotNull final String name, @NotNull final GlobalSearchScope scope, final int maxCount) {
Merger<PsiMethod> merger = null;
for (PsiShortNamesCache cache : myCaches) {
PsiMethod[] methods = cache.getMethodsByNameIfNotMoreThan(name, scope, maxCount);
if (methods.length == maxCount) return methods;
if (methods.length != 0) {
if (merger == null) merger = new Merger<>();
merger.add(methods);
}
}
PsiMethod[] result = merger == null ? null : merger.getResult();
return result == null ? PsiMethod.EMPTY_ARRAY : result;
}
@NotNull
@Override
public PsiField[] getFieldsByNameIfNotMoreThan(@NonNls @NotNull String name, @NotNull GlobalSearchScope scope, int maxCount) {
Merger<PsiField> merger = null;
for (PsiShortNamesCache cache : myCaches) {
PsiField[] fields = cache.getFieldsByNameIfNotMoreThan(name, scope, maxCount);
if (fields.length == maxCount) return fields;
if (fields.length != 0) {
if (merger == null) merger = new Merger<>();
merger.add(fields);
}
}
PsiField[] result = merger == null ? null : merger.getResult();
return result == null ? PsiField.EMPTY_ARRAY : result;
}
@Override
public boolean processMethodsWithName(@NonNls @NotNull String name,
@NotNull GlobalSearchScope scope,
@NotNull Processor<PsiMethod> processor) {
return processMethodsWithName(name, processor, scope, null);
}
@Override
public boolean processMethodsWithName(@NonNls @NotNull String name,
@NotNull Processor<? super PsiMethod> processor,
@NotNull GlobalSearchScope scope,
@Nullable IdFilter idFilter) {
for (PsiShortNamesCache cache : myCaches) {
if (!cache.processMethodsWithName(name, processor, scope, idFilter)) return false;
}
return true;
}
@Override
@NotNull
public String[] getAllMethodNames() {
Merger<String> merger = new Merger<>();
for (PsiShortNamesCache cache : myCaches) {<|fim▁hole|> }
@Override
@NotNull
public PsiField[] getFieldsByName(@NotNull String name, @NotNull GlobalSearchScope scope) {
Merger<PsiField> merger = null;
for (PsiShortNamesCache cache : myCaches) {
PsiField[] classes = cache.getFieldsByName(name, scope);
if (classes.length != 0) {
if (merger == null) merger = new Merger<>();
merger.add(classes);
}
}
PsiField[] result = merger == null ? null : merger.getResult();
return result == null ? PsiField.EMPTY_ARRAY : result;
}
@Override
@NotNull
public String[] getAllFieldNames() {
Merger<String> merger = null;
for (PsiShortNamesCache cache : myCaches) {
String[] classes = cache.getAllFieldNames();
if (classes.length != 0) {
if (merger == null) merger = new Merger<>();
merger.add(classes);
}
}
String[] result = merger == null ? null : merger.getResult();
return result == null ? ArrayUtil.EMPTY_STRING_ARRAY : result;
}
@Override
public boolean processFieldsWithName(@NotNull String key,
@NotNull Processor<? super PsiField> processor,
@NotNull GlobalSearchScope scope,
@Nullable IdFilter filter) {
for (PsiShortNamesCache cache : myCaches) {
if (!cache.processFieldsWithName(key, processor, scope, filter)) return false;
}
return true;
}
@Override
public boolean processClassesWithName(@NotNull String key,
@NotNull Processor<? super PsiClass> processor,
@NotNull GlobalSearchScope scope,
@Nullable IdFilter filter) {
for (PsiShortNamesCache cache : myCaches) {
if (!cache.processClassesWithName(key, processor, scope, filter)) return false;
}
return true;
}
private static class Merger<T> {
private T[] mySingleItem;
private Set<T> myAllItems;
public void add(@NotNull T[] items) {
if (items.length == 0) return;
if (mySingleItem == null) {
mySingleItem = items;
return;
}
if (myAllItems == null) {
T[] elements = mySingleItem;
myAllItems = ContainerUtil.addAll(new THashSet<>(elements.length), elements);
}
ContainerUtil.addAll(myAllItems, items);
}
public T[] getResult() {
if (myAllItems == null) return mySingleItem;
return myAllItems.toArray(mySingleItem);
}
}
@SuppressWarnings({"HardCodedStringLiteral"})
@Override
public String toString() {
return "Composite cache: " + Arrays.asList(myCaches);
}
}<|fim▁end|> | merger.add(cache.getAllMethodNames());
}
String[] result = merger.getResult();
return result != null ? result : ArrayUtil.EMPTY_STRING_ARRAY; |
<|file_name|>bam_conversion.py<|end_file_name|><|fim▁begin|>"""Converting BAM to BEDPE and normalized BigWig files."""
import os
from resolwe.process import (
Cmd,
DataField,
FileField,
FloatField,
Process,
SchedulingClass,
StringField,
)
class BamToBedpe(Process):
"""Takes in a BAM file and calculates a normalization factor in BEDPE format.
Done by sorting with Samtools and transformed with Bedtools.
"""
slug = "bedtools-bamtobed"
name = "Bedtools bamtobed"
requirements = {
"expression-engine": "jinja",
"executor": {
"docker": {"image": "public.ecr.aws/genialis/resolwebio/rnaseq:6.0.0"}
},
"resources": {"cores": 1, "memory": 8192},
}
data_name = "Bedtools bamtobed ({{alignment|sample_name|default('?')}})"
version = "1.2.0"
process_type = "data:bedpe"
category = "Other"
entity = {"type": "sample"}
scheduling_class = SchedulingClass.BATCH
class Input:
"""Input fields."""
alignment = DataField("alignment:bam", label="Alignment BAM file")
class Output:
"""Output fields."""
bedpe = FileField(label="BEDPE file")
species = StringField(label="Species")
build = StringField(label="Build")
def run(self, inputs, outputs):
"""Run the analysis."""
path = inputs.alignment.output.bam.path
basename = os.path.basename(path)
assert basename.endswith(".bam")
name = basename[:-4]
bedpe_file = f"{name}.bedpe"
samtools_param = ["-n", path]
bedtools_param = ["-bedpe", "-i"]
(
Cmd["samtools"]["sort"][samtools_param]
| Cmd["bedtools"]["bamtobed"][bedtools_param]
> bedpe_file
)()
if not os.path.exists(bedpe_file):
self.error("Converting BAM to BEDPE with Bedtools bamtobed failed.")
outputs.bedpe = bedpe_file
outputs.species = inputs.alignment.output.species
outputs.build = inputs.alignment.output.build
class ScaleBigWig(Process):
"""Creates a scaled BigWig file."""
slug = "scale-bigwig"
name = "Deeptools bamCoverage"
requirements = {
"expression-engine": "jinja",
"executor": {
"docker": {"image": "public.ecr.aws/genialis/resolwebio/rnaseq:6.0.0"}
},
"resources": {"cores": 1, "memory": 16384},
}
data_name = "Scale BigWig ({{alignment|sample_name|default('?')}})"
version = "1.2.0"
process_type = "data:coverage:bigwig"
category = "Other"
entity = {"type": "sample"}
scheduling_class = SchedulingClass.BATCH
class Input:
"""Input fields."""
alignment = DataField("alignment:bam", label="Alignment BAM file")
bedpe = DataField(
"bedpe",
label="BEDPE Normalization factor",
description="The BEDPE file describes disjoint genome features, "
"such as structural variations or paired-end sequence alignments. "
"It is used to estimate the scale factor.",
)
scale = FloatField(
label="Scale for the normalization factor",
description="Magnitude of the scale factor. "
"The scaling factor is calculated by dividing the scale "
"with the number of features in BEDPE "
"(scale/(number of features)).",
default=10000,
)
class Output:
"""Output fields."""
bigwig = FileField(label="bigwig file")
species = StringField(label="Species")
build = StringField(label="Build")
def run(self, inputs, outputs):
"""Run the analysis."""
path = inputs.alignment.output.bam.path
basename = os.path.basename(path)
assert basename.endswith(".bam")
name = basename[:-4]<|fim▁hole|> out_index = f"{name}.bai"
with open(inputs.bedpe.output.bedpe.path) as f:
spike_count = f.readlines()
spike_count = len(spike_count)
scale_factor = inputs.scale / spike_count
bam_coverage_param = [
"--bam",
path,
"--scaleFactor",
scale_factor,
"--outFileName",
out_file,
"--numberOfProcessors",
self.requirements.resources.cores,
"--outFileFormat",
"bigwig",
]
(Cmd["samtools"]["index"][path][out_index])()
self.progress(0.5)
(Cmd["bamCoverage"][bam_coverage_param])()
if not os.path.exists(out_file):
self.error("Generation of a scaled BigWig file with bamCoverage failed.")
outputs.bigwig = out_file
outputs.species = inputs.alignment.output.species
outputs.build = inputs.alignment.output.build<|fim▁end|> | out_file = f"{name}.SInorm.bigwig" |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from .dual_structured_quad import (
DualRectilinearGraph,
DualStructuredQuadGraph,
DualUniformRectilinearGraph,
)
from .structured_quad import (
RectilinearGraph,
StructuredQuadGraph,
UniformRectilinearGraph,
)<|fim▁hole|> "RectilinearGraph",
"UniformRectilinearGraph",
"DualUniformRectilinearGraph",
"DualRectilinearGraph",
"DualStructuredQuadGraph",
]<|fim▁end|> |
__all__ = [
"StructuredQuadGraph", |
<|file_name|>classes_a.js<|end_file_name|><|fim▁begin|>var searchData=
[
['screeningsolver',['ScreeningSolver',['../classhdim_1_1internal_1_1_screening_solver.html',1,'hdim::internal']]],
['sgd',['SGD',['../classhdim_1_1hdim_1_1_s_g_d.html',1,'hdim::hdim']]],
['sgd_5fsr',['SGD_SR',['../classhdim_1_1hdim_1_1_s_g_d___s_r.html',1,'hdim::hdim']]],
['softthres',['SoftThres',['../structhdim_1_1_soft_thres.html',1,'hdim']]],
['solver',['Solver',['../classhdim_1_1internal_1_1_solver.html',1,'hdim::internal']]],
['solver',['Solver',['../classocl_1_1internal_1_1_solver.html',1,'ocl::internal']]],
['solver',['Solver',['../classhdim_1_1vcl_1_1internal_1_1_solver.html',1,'hdim::vcl::internal']]],
['solver_5fd',['Solver_d',['../classhdim_1_1hdim_1_1_solver__d.html',1,'hdim::hdim']]],
['solver_5ff',['Solver_f',['../classhdim_1_1hdim_1_1_solver__f.html',1,'hdim::hdim']]],
['srsolver_5fd',['SRSolver_d',['../classhdim_1_1hdim_1_1_s_r_solver__d.html',1,'hdim::hdim']]],
['srsolver_5ff',['SRSolver_f',['../classhdim_1_1hdim_1_1_s_r_solver__f.html',1,'hdim::hdim']]],
['subgradientsolver',['SubGradientSolver',['../classhdim_1_1vcl_1_1internal_1_1_sub_gradient_solver.html',1,'hdim::vcl::internal']]],
['subgradientsolver',['SubGradientSolver',['../classhdim_1_1internal_1_1_sub_gradient_solver.html',1,'hdim::internal']]],<|fim▁hole|> ['swig_5fcast_5finfo',['swig_cast_info',['../structswig__cast__info.html',1,'']]],
['swig_5fconst_5finfo',['swig_const_info',['../structswig__const__info.html',1,'']]],
['swig_5fglobalvar',['swig_globalvar',['../structswig__globalvar.html',1,'']]],
['swig_5fmodule_5finfo',['swig_module_info',['../structswig__module__info.html',1,'']]],
['swig_5ftype_5finfo',['swig_type_info',['../structswig__type__info.html',1,'']]],
['swig_5fvarlinkobject',['swig_varlinkobject',['../structswig__varlinkobject.html',1,'']]],
['swigptr_5fpyobject',['SwigPtr_PyObject',['../classswig_1_1_swig_ptr___py_object.html',1,'swig']]],
['swigpyclientdata',['SwigPyClientData',['../struct_swig_py_client_data.html',1,'']]],
['swigpyobject',['SwigPyObject',['../struct_swig_py_object.html',1,'']]],
['swigpypacked',['SwigPyPacked',['../struct_swig_py_packed.html',1,'']]],
['swigvar_5fpyobject',['SwigVar_PyObject',['../structswig_1_1_swig_var___py_object.html',1,'swig']]]
];<|fim▁end|> | ['subgradientsolver',['SubGradientSolver',['../classhdim_1_1ocl_1_1internal_1_1_sub_gradient_solver.html',1,'hdim::ocl::internal']]],
['supportsift',['SupportSift',['../structhdim_1_1_support_sift.html',1,'hdim']]], |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>#![feature(plugin)]
#![feature(test)]
#![plugin(peg_syntax_ext)]
#[macro_use]
extern crate nom;
extern crate clap;
extern crate termion;
extern crate byteorder;
extern crate env_logger;
extern crate vec_map;
extern crate tempdir;
#[macro_use]
extern crate log;
mod db;
use db::server::Server;
use db::database::{Database, QueryResult};
use clap::{Arg, App, SubCommand};
use std::io::{self, Read};
use std::io::{Write, stdout, stdin};
use std::fs;
use termion::input::TermRead;
use termion::{color, style};
use std::path::PathBuf;
fn main() {
println!("Starting the worst database ever created!! (exit to exit)");
let server = Server::new();
let app = App::new("TotalRecallDB")
.version("v1.0")
.author("Jon Haddad, <[email protected]>")
.subcommand(SubCommand::with_name("test"))
.get_matches();
if let Some(matches) = app.subcommand_matches("test") {
run_test_repl();
}
}
fn run_test_repl() {
let _ = env_logger::init();
println!("Running test repl");
// use local dir "dbs"
let dbdir = "trdb";<|fim▁hole|> warn!("Error creating directory {}", x);
}
let mut db = Database::new(PathBuf::from(dbdir));
let mut stdin = stdin();
let mut stdout = stdout();
let prompt = "embedded>";
loop {
write!(stdout, "{}[?] {}{} ", color::Fg(color::Green), style::Reset, prompt).unwrap();
stdout.lock().flush().unwrap();
match TermRead::read_line(&mut stdin) {
Ok(Some(buffer)) => {
if buffer == "exit" {
write!(stdout, "Exiting\r\n");
stdout.lock().flush().unwrap();
return;
}
let x= match db.execute(&buffer) {
Ok(QueryResult::StreamCreated) =>
String::from("Stream Created.\n"),
Ok(QueryResult::Insert(id)) =>
format!("Inserted {}", id),
_ => String::from("Fail?")
};
println!("{}", x);
},
Ok(None) => {},
Err(e) => {}
}
}
}<|fim▁end|> | if let Err(x) = fs::create_dir(dbdir) { |
<|file_name|>sim_state.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import functools
import itertools
import contextlib
import weakref
import logging
l = logging.getLogger("angr.sim_state")
import claripy
import ana
from archinfo import arch_from_id
from .misc.ux import deprecated
def arch_overrideable(f):
@functools.wraps(f)
def wrapped_f(self, *args, **kwargs):
if hasattr(self.arch, f.__name__):
arch_f = getattr(self.arch, f.__name__)
return arch_f(self, *args, **kwargs)
else:
return f(self, *args, **kwargs)
return wrapped_f
from .state_plugins import default_plugins
# This is a counter for the state-merging symbolic variables
merge_counter = itertools.count()
class SimState(ana.Storable): # pylint: disable=R0904
"""
The SimState represents the state of a program, including its memory, registers, and so forth.
:ivar regs: A convenient view of the state's registers, where each register is a property
:ivar mem: A convenient view of the state's memory, a :class:`angr.state_plugins.view.SimMemView`
:ivar registers: The state's register file as a flat memory region
:ivar memory: The state's memory as a flat memory region
:ivar solver: The symbolic solver and variable manager for this state
:ivar inspect: The breakpoint manager, a :class:`angr.state_plugins.inspect.SimInspector`
:ivar log: Information about the state's history
:ivar scratch: Information about the current execution step
:ivar posix: MISNOMER: information about the operating system or environment model
:ivar libc: Information about the standard library we are emulating
:ivar cgc: Information about the cgc environment
:ivar uc_manager: Control of under-constrained symbolic execution
:ivar unicorn: Control of the Unicorn Engine
"""
def __init__(self, project=None, arch=None, plugins=None, memory_backer=None, permissions_backer=None, mode=None, options=None,
add_options=None, remove_options=None, special_memory_filler=None, os_name=None):
self.project = project
self.arch = arch if arch is not None else project.arch.copy() if project is not None else None
if type(self.arch) is str:
self.arch = arch_from_id(self.arch)
# the options
if options is None:
if mode is None:
l.warning("SimState defaulting to symbolic mode.")
mode = "symbolic"
options = o.modes[mode]
options = set(options)
if add_options is not None:
options |= add_options
if remove_options is not None:
options -= remove_options
self.options = options
self.mode = mode
# plugins
self.plugins = { }
if plugins is not None:
for n,p in plugins.iteritems():
self.register_plugin(n, p)
if not self.has_plugin('memory'):
# we don't set the memory endness because, unlike registers, it's hard to understand
# which endness the data should be read
if o.ABSTRACT_MEMORY in self.options:
# We use SimAbstractMemory in static mode
# Convert memory_backer into 'global' region
if memory_backer is not None:
memory_backer = {'global': memory_backer}
# TODO: support permissions backer in SimAbstractMemory
self.register_plugin('memory', SimAbstractMemory(memory_backer=memory_backer, memory_id="mem"))
elif o.FAST_MEMORY in self.options:
self.register_plugin('memory', SimFastMemory(memory_backer=memory_backer, memory_id="mem"))
else:
self.register_plugin('memory', SimSymbolicMemory(memory_backer=memory_backer, permissions_backer=permissions_backer, memory_id="mem"))
if not self.has_plugin('registers'):
if o.FAST_REGISTERS in self.options:
self.register_plugin('registers', SimFastMemory(memory_id="reg", endness=self.arch.register_endness))
else:
self.register_plugin('registers', SimSymbolicMemory(memory_id="reg", endness=self.arch.register_endness))
# OS name
self.os_name = os_name
# This is used in static mode as we don't have any constraints there
self._satisfiable = True
# states are big, so let's give them UUIDs for ANA right away to avoid
# extra pickling
self.make_uuid()
self.uninitialized_access_handler = None
self._special_memory_filler = special_memory_filler
# this is a global condition, applied to all added constraints, memory reads, etc
self._global_condition = None
self.ip_constraints = []
def _ana_getstate(self):
s = dict(ana.Storable._ana_getstate(self))
s['plugins'] = { k:v for k,v in s['plugins'].iteritems() if k not in ('inspector', 'regs', 'mem') }
return s
def _ana_setstate(self, s):
ana.Storable._ana_setstate(self, s)
for p in self.plugins.values():
p.set_state(self._get_weakref() if not isinstance(p, SimAbstractMemory) else self)
if p.STRONGREF_STATE:
p.set_strongref_state(self)
def _get_weakref(self):
return weakref.proxy(self)
def _get_strongref(self):
return self
def __repr__(self):
try:
ip_str = "%#x" % self.addr
except (SimValueError, SimSolverModeError):
ip_str = repr(self.regs.ip)
return "<SimState @ %s>" % ip_str
#
# Easier access to some properties
#
@property
def ip(self):
"""
Get the instruction pointer expression, trigger SimInspect breakpoints, and generate SimActions.
Use ``_ip`` to not trigger breakpoints or generate actions.
:return: an expression
"""
return self.regs.ip
@ip.setter
def ip(self, val):
self.regs.ip = val
@property
def _ip(self):
"""
Get the instruction pointer expression without triggering SimInspect breakpoints or generating SimActions.
:return: an expression
"""
return self.regs._ip
@_ip.setter
def _ip(self, val):
"""
Set the instruction pointer without triggering SimInspect breakpoints or generating SimActions.
:param val: The new instruction pointer.
:return: None
"""
self.regs._ip = val
@property
def addr(self):
"""
Get the concrete address of the instruction pointer, without triggering SimInspect breakpoints or generating
SimActions. An integer is returned, or an exception is raised if the instruction pointer is symbolic.
:return: an int
"""
return self.se.eval_one(self.regs._ip)
#
# Plugin accessors
#
def __getattr__(self, v):
try:
return self.get_plugin(v)
except KeyError:
raise AttributeError(v)
@property
def memory(self):
return self.get_plugin('memory')
@property
def registers(self):
return self.get_plugin('registers')
@property
def se(self):
return self.get_plugin('solver_engine')
@property
def solver(self):
return self.get_plugin('solver_engine')
@property
def inspect(self):
return self.get_plugin('inspector')
@property
def log(self):
return self.get_plugin('log')
@property
def scratch(self):
return self.get_plugin('scratch')
@property
def history(self):
return self.get_plugin('history')
@property
def posix(self):
return self.get_plugin('posix')
@property
def libc(self):
return self.get_plugin('libc')
@property
def cgc(self):
return self.get_plugin('cgc')
@property
def regs(self):
return self.get_plugin('regs')
@property
def mem(self):
return self.get_plugin('mem')
@property
def gdb(self):
return self.get_plugin('gdb')
@property
def globals(self):
return self.get_plugin('globals')
@property
def uc_manager(self):
return self.get_plugin('uc_manager')
@property
def unicorn(self):
return self.get_plugin('unicorn')
@property
def preconstrainer(self):
return self.get_plugin('preconstrainer')
@property
def callstack(self):
return self.get_plugin('callstack')
def _inspect(self, *args, **kwargs):
if self.has_plugin('inspector'):
self.inspect.action(*args, **kwargs)
def _inspect_getattr(self, attr, default_value):
if self.has_plugin('inspector'):
if hasattr(self.inspect, attr):
return getattr(self.inspect, attr)
return default_value
#
# Plugins
#
def has_plugin(self, name):
return name in self.plugins
def get_plugin(self, name):
if name not in self.plugins:
p = default_plugins[name]()
self.register_plugin(name, p)
return p
return self.plugins[name]
def register_plugin(self, name, plugin):
#l.debug("Adding plugin %s of type %s", name, plugin.__class__.__name__)
plugin.set_state(self._get_weakref() if not isinstance(plugin, SimAbstractMemory) else self)
if plugin.STRONGREF_STATE:
plugin.set_strongref_state(self)
self.plugins[name] = plugin
plugin.init_state()
return plugin
def release_plugin(self, name):
if name in self.plugins:
del self.plugins[name]
#
# Constraint pass-throughs
#
def simplify(self, *args):
"""
Simplify this state's constraints.
"""
return self.se.simplify(*args)
def add_constraints(self, *args, **kwargs):
"""
Add some constraints to the state.
You may pass in any number of symbolic booleans as variadic positional arguments.
"""
if len(args) > 0 and isinstance(args[0], (list, tuple)):<|fim▁hole|> raise Exception("Tuple or list passed to add_constraints!")
if o.TRACK_CONSTRAINTS in self.options and len(args) > 0:
if o.SIMPLIFY_CONSTRAINTS in self.options:
constraints = [ self.simplify(a) for a in args ]
else:
constraints = args
self._inspect('constraints', BP_BEFORE, added_constraints=constraints)
constraints = self._inspect_getattr("added_constraints", constraints)
added = self.se.add(*constraints)
self._inspect('constraints', BP_AFTER)
# add actions for the added constraints
if o.TRACK_CONSTRAINT_ACTIONS in self.options:
for c in added:
sac = SimActionConstraint(self, c)
self.history.add_action(sac)
else:
# preserve the old action logic for when we don't track constraints (why?)
if (
'action' in kwargs and kwargs['action'] and
o.TRACK_CONSTRAINT_ACTIONS in self.options and len(args) > 0
):
for arg in args:
if self.se.symbolic(arg):
sac = SimActionConstraint(self, arg)
self.history.add_action(sac)
if o.ABSTRACT_SOLVER in self.options and len(args) > 0:
for arg in args:
if self.se.is_false(arg):
self._satisfiable = False
return
if self.se.is_true(arg):
continue
# `is_true` and `is_false` does not use VSABackend currently (see commits 97a75366 and 2dfba73e in
# claripy). There is a chance that VSA backend can in fact handle it.
# Therefore we try to resolve it with VSABackend again
if claripy.backends.vsa.is_false(arg):
self._satisfiable = False
return
if claripy.backends.vsa.is_true(arg):
continue
# It's neither True or False. Let's try to apply the condition
# We take the argument, extract a list of constrained SIs out of it (if we could, of course), and
# then replace each original SI the intersection of original SI and the constrained one.
_, converted = self.se.constraint_to_si(arg)
for original_expr, constrained_si in converted:
if not original_expr.variables:
l.error('Incorrect original_expression to replace in add_constraints(). ' +
'This is due to defects in VSA logics inside claripy. Please report ' +
'to Fish and he will fix it if he\'s free.')
continue
new_expr = constrained_si
self.registers.replace_all(original_expr, new_expr)
for _, region in self.memory.regions.items():
region.memory.replace_all(original_expr, new_expr)
l.debug("SimState.add_constraints: Applied to final state.")
elif o.SYMBOLIC not in self.options and len(args) > 0:
for arg in args:
if self.se.is_false(arg):
self._satisfiable = False
return
def satisfiable(self, **kwargs):
"""
Whether the state's constraints are satisfiable
"""
if o.ABSTRACT_SOLVER in self.options or o.SYMBOLIC not in self.options:
extra_constraints = kwargs.pop('extra_constraints', ())
for e in extra_constraints:
if self.se.is_false(e):
return False
return self._satisfiable
else:
return self.se.satisfiable(**kwargs)
def downsize(self):
"""
Clean up after the solver engine. Calling this when a state no longer needs to be solved on will reduce memory
usage.
"""
if 'solver_engine' in self.plugins:
self.se.downsize()
#
# State branching operations
#
def step(self, **kwargs):
"""
Perform a step of symbolic execution using this state.
Any arguments to `AngrObjectFactory.successors` can be passed to this.
:return: A SimSuccessors object categorizing the results of the step.
"""
return self.project.factory.successors(self, **kwargs)
def block(self, *args, **kwargs):
"""
Represent the basic block at this state's instruction pointer.
Any arguments to `AngrObjectFactory.block` can ba passed to this.
:return: A Block object describing the basic block of code at this point.
"""
if not args and 'addr' not in kwargs:
kwargs['addr'] = self.addr
return self.project.factory.block(*args, backup_state=self, **kwargs)
# Returns a dict that is a copy of all the state's plugins
def _copy_plugins(self):
memo = {}
out = {}
for n, p in self.plugins.iteritems():
if id(p) in memo:
out[n] = memo[id(p)]
else:
out[n] = p.copy()
memo[id(p)] = out[n]
return out
def copy(self):
"""
Returns a copy of the state.
"""
if self._global_condition is not None:
raise SimStateError("global condition was not cleared before state.copy().")
c_plugins = self._copy_plugins()
state = SimState(project=self.project, arch=self.arch, plugins=c_plugins, options=self.options, mode=self.mode, os_name=self.os_name)
state.uninitialized_access_handler = self.uninitialized_access_handler
state._special_memory_filler = self._special_memory_filler
state.ip_constraints = self.ip_constraints
return state
def merge(self, *others, **kwargs):
"""
Merges this state with the other states. Returns the merging result, merged state, and the merge flag.
:param states: the states to merge
:param merge_conditions: a tuple of the conditions under which each state holds
:param common_ancestor: a state that represents the common history between the states being merged. Usually it
is only available when EFFICIENT_STATE_MERGING is enabled, otherwise weak-refed states
might be dropped from state history instances.
:param plugin_whitelist: a list of plugin names that will be merged. If this option is given and is not None,
any plugin that is not inside this list will not be merged, and will be created as a
fresh instance in the new state.
:param common_ancestor_history:
a SimStateHistory instance that represents the common history between the states being
merged. This is to allow optimal state merging when EFFICIENT_STATE_MERGING is
disabled.
:return: (merged state, merge flag, a bool indicating if any merging occured)
"""
merge_conditions = kwargs.pop('merge_conditions', None)
common_ancestor = kwargs.pop('common_ancestor', None)
plugin_whitelist = kwargs.pop('plugin_whitelist', None)
common_ancestor_history = kwargs.pop('common_ancestor_history', None)
if len(kwargs) != 0:
raise ValueError("invalid arguments: %s" % kwargs.keys())
if merge_conditions is None:
# TODO: maybe make the length of this smaller? Maybe: math.ceil(math.log(len(others)+1, 2))
merge_flag = self.se.BVS("state_merge_%d" % merge_counter.next(), 16)
merge_values = range(len(others)+1)
merge_conditions = [ merge_flag == b for b in merge_values ]
else:
merge_conditions = [
(self.se.true if len(mc) == 0 else self.se.And(*mc)) for mc in merge_conditions
]
if len(set(o.arch.name for o in others)) != 1:
raise SimMergeError("Unable to merge due to different architectures.")
all_plugins = set(self.plugins.keys()) | set.union(*(set(o.plugins.keys()) for o in others))
if plugin_whitelist is not None:
all_plugins = all_plugins.intersection(set(plugin_whitelist))
merged = self.copy()
merging_occurred = False
# fix parent
merged.history.parent = self.history
# plugins
for p in all_plugins:
our_plugin = merged.plugins[p] if p in merged.plugins else None
their_plugins = [ (pl.plugins[p] if p in pl.plugins else None) for pl in others ]
plugin_classes = (
set([our_plugin.__class__]) | set(pl.__class__ for pl in their_plugins)
) - set([None.__class__])
if len(plugin_classes) != 1:
raise SimMergeError(
"There are differing plugin classes (%s) for plugin %s" % (plugin_classes, p)
)
plugin_class = plugin_classes.pop()
our_filled_plugin = our_plugin if our_plugin is not None else merged.register_plugin(
p, plugin_class()
)
their_filled_plugins = [
(tp if tp is not None else t.register_plugin(p, plugin_class()))
for t,tp in zip(others, their_plugins)
]
plugin_common_ancestor = (
common_ancestor.plugins[p] if
(common_ancestor is not None and p in common_ancestor.plugins) else
None
)
if plugin_common_ancestor is None and \
plugin_class is SimStateHistory and \
common_ancestor_history is not None:
plugin_common_ancestor = common_ancestor_history
plugin_state_merged = our_filled_plugin.merge(
their_filled_plugins, merge_conditions, common_ancestor=plugin_common_ancestor,
)
if plugin_state_merged:
l.debug('Merging occurred in %s', p)
merging_occurred = True
merged.add_constraints(merged.se.Or(*merge_conditions))
return merged, merge_conditions, merging_occurred
def widen(self, *others):
"""
Perform a widening between self and other states
:param others:
:return:
"""
if len(set(frozenset(o.plugins.keys()) for o in others)) != 1:
raise SimMergeError("Unable to widen due to different sets of plugins.")
if len(set(o.arch.name for o in others)) != 1:
raise SimMergeError("Unable to widen due to different architectures.")
widened = self.copy()
widening_occurred = False
# plugins
for p in self.plugins:
if p in ('solver_engine', 'unicorn'):
continue
plugin_state_widened = widened.plugins[p].widen([_.plugins[p] for _ in others])
if plugin_state_widened:
l.debug('Widening occured in %s', p)
widening_occurred = True
return widened, widening_occurred
#############################################
### Accessors for tmps, registers, memory ###
#############################################
def reg_concrete(self, *args, **kwargs):
"""
Returns the contents of a register but, if that register is symbolic,
raises a SimValueError.
"""
e = self.registers.load(*args, **kwargs)
if self.se.symbolic(e):
raise SimValueError("target of reg_concrete is symbolic!")
return self.se.eval(e)
def mem_concrete(self, *args, **kwargs):
"""
Returns the contents of a memory but, if the contents are symbolic,
raises a SimValueError.
"""
e = self.memory.load(*args, **kwargs)
if self.se.symbolic(e):
raise SimValueError("target of mem_concrete is symbolic!")
return self.se.eval(e)
###############################
### Stack operation helpers ###
###############################
@arch_overrideable
def stack_push(self, thing):
"""
Push 'thing' to the stack, writing the thing to memory and adjusting the stack pointer.
"""
# increment sp
sp = self.regs.sp + self.arch.stack_change
self.regs.sp = sp
return self.memory.store(sp, thing, endness=self.arch.memory_endness)
@arch_overrideable
def stack_pop(self):
"""
Pops from the stack and returns the popped thing. The length will be the architecture word size.
"""
sp = self.regs.sp
self.regs.sp = sp - self.arch.stack_change
return self.memory.load(sp, self.arch.bits / 8, endness=self.arch.memory_endness)
@arch_overrideable
def stack_read(self, offset, length, bp=False):
"""
Reads length bytes, at an offset into the stack.
:param offset: The offset from the stack pointer.
:param length: The number of bytes to read.
:param bp: If True, offset from the BP instead of the SP. Default: False.
"""
sp = self.regs.bp if bp else self.regs.sp
return self.memory.load(sp+offset, length, endness=self.arch.memory_endness)
###############################
### Other helpful functions ###
###############################
def make_concrete_int(self, expr):
if isinstance(expr, (int, long)):
return expr
if not self.se.symbolic(expr):
return self.se.eval(expr)
v = self.se.eval(expr)
self.add_constraints(expr == v)
return v
# This handles the preparation of concrete function launches from abstract functions.
@arch_overrideable
def prepare_callsite(self, retval, args, cc='wtf'):
#TODO
pass
def _stack_values_to_string(self, stack_values):
"""
Convert each stack value to a string
:param stack_values: A list of values
:return: The converted string
"""
strings = [ ]
for stack_value in stack_values:
if self.se.symbolic(stack_value):
concretized_value = "SYMBOLIC - %s" % repr(stack_value)
else:
if len(self.se.eval_upto(stack_value, 2)) == 2:
concretized_value = repr(stack_value)
else:
concretized_value = repr(stack_value)
strings.append(concretized_value)
return " .. ".join(strings)
def dbg_print_stack(self, depth=None, sp=None):
"""
Only used for debugging purposes.
Return the current stack info in formatted string. If depth is None, the
current stack frame (from sp to bp) will be printed out.
"""
var_size = self.arch.bits / 8
sp_sim = self.regs._sp
bp_sim = self.regs._bp
if self.se.symbolic(sp_sim) and sp is None:
result = "SP is SYMBOLIC"
elif self.se.symbolic(bp_sim) and depth is None:
result = "BP is SYMBOLIC"
else:
sp_value = sp if sp is not None else self.se.eval(sp_sim)
if self.se.symbolic(bp_sim):
result = "SP = 0x%08x, BP is symbolic\n" % (sp_value)
bp_value = None
else:
bp_value = self.se.eval(bp_sim)
result = "SP = 0x%08x, BP = 0x%08x\n" % (sp_value, bp_value)
if depth is None:
# bp_value cannot be None here
depth = (bp_value - sp_value) / var_size + 1 # Print one more value
pointer_value = sp_value
for i in xrange(depth):
# For AbstractMemory, we wanna utilize more information from VSA
stack_values = [ ]
if o.ABSTRACT_MEMORY in self.options:
sp = self.regs._sp
segment_sizes = self.memory.get_segments(sp + i * var_size, var_size)
pos = i * var_size
for segment_size in segment_sizes:
stack_values.append(self.stack_read(pos, segment_size, bp=False))
pos += segment_size
else:
stack_values.append(self.stack_read(i * var_size, var_size, bp=False))
# Convert it into a big string!
val = self._stack_values_to_string(stack_values)
if pointer_value == sp_value:
line = "(sp)% 16x | %s" % (pointer_value, val)
elif pointer_value == bp_value:
line = "(bp)% 16x | %s" % (pointer_value, val)
else:
line = "% 20x | %s" % (pointer_value, val)
pointer_value += var_size
result += line + "\n"
return result
#
# Other helper methods
#
def set_mode(self, mode):
self.mode = mode
self.options = set(o.modes[mode])
@property
def thumb(self):
if not self.arch.name.startswith('ARM'):
return False
if self.regs.ip.symbolic:
# return True when IP can *only* be odd
new_state = self.copy()
new_state.add_constraints(new_state.regs.ip % 2 == 1, new_state.regs.ip % 2 != 0)
return new_state.satisfiable()
else:
concrete_ip = self.se.eval(self.regs.ip)
return concrete_ip % 2 == 1
#
# Some pretty fancy global condition stuff!
#
@property
def with_condition(self):
@contextlib.contextmanager
def ctx(c):
old_condition = self._global_condition
try:
new_condition = c if old_condition is None else self.se.And(old_condition, c)
self._global_condition = new_condition
yield
finally:
self._global_condition = old_condition
return ctx
def _adjust_condition(self, c):
if self._global_condition is None:
return c
elif c is None:
return self._global_condition
else:
return self.se.And(self._global_condition, c)
def _adjust_condition_list(self, conditions):
if self._global_condition is None:
return conditions
elif len(conditions) == 0:
return conditions.__class__((self._global_condition,))
else:
return conditions.__class__((self._adjust_condition(self.se.And(*conditions)),))
#
# Compatibility layer
#
@property
def state(self):
return self
@property
def length(self):
return self.history.block_count
@property
def jumpkind(self):
return self.scratch.jumpkind
@property
def last_actions(self):
return self.history.recent_actions
@property
def history_iterator(self):
return self.history.lineage
@property
def addr_trace(self):
return self.history.addr_trace
@property
def trace(self):
return self.history.trace
@property
def targets(self):
return self.history.jump_targets
@property
def guards(self):
return self.history.jump_guards
@property
def jumpkinds(self):
return self.history.jumpkinds
@property
def events(self):
return self.history.events
@property
def actions(self):
return self.history.actions
@property
def reachable(self):
return self.history.reachable()
@deprecated
def trim_history(self):
self.history.trim()
from .state_plugins.symbolic_memory import SimSymbolicMemory
from .state_plugins.fast_memory import SimFastMemory
from .state_plugins.abstract_memory import SimAbstractMemory
from .state_plugins.history import SimStateHistory
from .errors import SimMergeError, SimValueError, SimStateError, SimSolverModeError
from .state_plugins.inspect import BP_AFTER, BP_BEFORE
from .state_plugins.sim_action import SimActionConstraint
from . import sim_options as o<|fim▁end|> | |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Interface to random number generators in Rust.
//!
//! This is an experimental library which lives underneath the standard library
//! in its dependency chain. This library is intended to define the interface
//! for random number generation and also provide utilities around doing so. It
//! is not recommended to use this library directly, but rather the official
//! interface through `std::rand`.
#![crate_name = "rand"]
#![crate_type = "rlib"]
#![doc(html_logo_url = "http://www.rust-lang.org/logos/rust-logo-128x128-blk.png",
html_favicon_url = "http://www.rust-lang.org/favicon.ico",
html_root_url = "http://doc.rust-lang.org/nightly/",
html_playground_url = "http://play.rust-lang.org/")]
#![feature(macro_rules, phase, globs)]
#![no_std]
#![experimental]
#[phase(plugin, link)]
extern crate core;
#[cfg(test)] #[phase(plugin, link)] extern crate std;
#[cfg(test)] #[phase(plugin, link)] extern crate log;
use core::prelude::*;
pub use isaac::{IsaacRng, Isaac64Rng};
pub use chacha::ChaChaRng;
use distributions::{Range, IndependentSample};
use distributions::range::SampleRange;
#[cfg(test)]
static RAND_BENCH_N: u64 = 100;
pub mod distributions;
pub mod isaac;
pub mod chacha;
pub mod reseeding;
mod rand_impls;
/// A type that can be randomly generated using an `Rng`.
pub trait Rand {
/// Generates a random instance of this type using the specified source of
/// randomness.
fn rand<R: Rng>(rng: &mut R) -> Self;
}
/// A random number generator.
pub trait Rng {
/// Return the next random u32.
///
/// This rarely needs to be called directly, prefer `r.gen()` to
/// `r.next_u32()`.
// FIXME #7771: Should be implemented in terms of next_u64
fn next_u32(&mut self) -> u32;
/// Return the next random u64.
///
/// By default this is implemented in terms of `next_u32`. An
/// implementation of this trait must provide at least one of
/// these two methods. Similarly to `next_u32`, this rarely needs
/// to be called directly, prefer `r.gen()` to `r.next_u64()`.
fn next_u64(&mut self) -> u64 {
(self.next_u32() as u64 << 32) | (self.next_u32() as u64)
}
/// Return the next random f32 selected from the half-open
/// interval `[0, 1)`.
///
/// By default this is implemented in terms of `next_u32`, but a
/// random number generator which can generate numbers satisfying
/// the requirements directly can overload this for performance.
/// It is required that the return value lies in `[0, 1)`.
///
/// See `Closed01` for the closed interval `[0,1]`, and
/// `Open01` for the open interval `(0,1)`.
fn next_f32(&mut self) -> f32 {
const MANTISSA_BITS: uint = 24;
const IGNORED_BITS: uint = 8;
const SCALE: f32 = (1u64 << MANTISSA_BITS) as f32;
// using any more than `MANTISSA_BITS` bits will
// cause (e.g.) 0xffff_ffff to correspond to 1
// exactly, so we need to drop some (8 for f32, 11
// for f64) to guarantee the open end.
(self.next_u32() >> IGNORED_BITS) as f32 / SCALE
}
/// Return the next random f64 selected from the half-open
/// interval `[0, 1)`.
///
/// By default this is implemented in terms of `next_u64`, but a
/// random number generator which can generate numbers satisfying
/// the requirements directly can overload this for performance.
/// It is required that the return value lies in `[0, 1)`.
///
/// See `Closed01` for the closed interval `[0,1]`, and
/// `Open01` for the open interval `(0,1)`.
fn next_f64(&mut self) -> f64 {
const MANTISSA_BITS: uint = 53;
const IGNORED_BITS: uint = 11;
const SCALE: f64 = (1u64 << MANTISSA_BITS) as f64;
(self.next_u64() >> IGNORED_BITS) as f64 / SCALE
}
/// Fill `dest` with random data.
///
/// This has a default implementation in terms of `next_u64` and
/// `next_u32`, but should be overridden by implementations that
/// offer a more efficient solution than just calling those
/// methods repeatedly.
///
/// This method does *not* have a requirement to bear any fixed
/// relationship to the other methods, for example, it does *not*
/// have to result in the same output as progressively filling
/// `dest` with `self.gen::<u8>()`, and any such behaviour should
/// not be relied upon.
///
/// This method should guarantee that `dest` is entirely filled
/// with new data, and may panic if this is impossible
/// (e.g. reading past the end of a file that is being used as the
/// source of randomness).
///
/// # Example
///
/// ```rust
/// use std::rand::{task_rng, Rng};
///
/// let mut v = [0u8, .. 13579];
/// task_rng().fill_bytes(&mut v);
/// println!("{}", v.as_slice());
/// ```
fn fill_bytes(&mut self, dest: &mut [u8]) {
// this could, in theory, be done by transmuting dest to a
// [u64], but this is (1) likely to be undefined behaviour for
// LLVM, (2) has to be very careful about alignment concerns,
// (3) adds more `unsafe` that needs to be checked, (4)
// probably doesn't give much performance gain if
// optimisations are on.
let mut count = 0i;
let mut num = 0;
for byte in dest.iter_mut() {
if count == 0 {
// we could micro-optimise here by generating a u32 if
// we only need a few more bytes to fill the vector
// (i.e. at most 4).
num = self.next_u64();
count = 8;
}
*byte = (num & 0xff) as u8;
num >>= 8;
count -= 1;
}
}
/// Return a random value of a `Rand` type.
///
/// # Example
///
/// ```rust
/// use std::rand::{task_rng, Rng};
///
/// let mut rng = task_rng();
/// let x: uint = rng.gen();
/// println!("{}", x);
/// println!("{}", rng.gen::<(f64, bool)>());
/// ```
#[inline(always)]
fn gen<T: Rand>(&mut self) -> T {
Rand::rand(self)
}
/// Return an iterator which will yield an infinite number of randomly
/// generated items.
///
/// # Example
///
/// ```
/// use std::rand::{task_rng, Rng};
///
/// let mut rng = task_rng();
/// let x = rng.gen_iter::<uint>().take(10).collect::<Vec<uint>>();
/// println!("{}", x);
/// println!("{}", rng.gen_iter::<(f64, bool)>().take(5)
/// .collect::<Vec<(f64, bool)>>());
/// ```
fn gen_iter<'a, T: Rand>(&'a mut self) -> Generator<'a, T, Self> {
Generator { rng: self }
}
/// Generate a random value in the range [`low`, `high`).
///
/// This is a convenience wrapper around
/// `distributions::Range`. If this function will be called
/// repeatedly with the same arguments, one should use `Range`, as
/// that will amortize the computations that allow for perfect
/// uniformity, as they only happen on initialization.
///
/// # Panics
///
/// Panics if `low >= high`.
///
/// # Example
///
/// ```rust
/// use std::rand::{task_rng, Rng};<|fim▁hole|> ///
/// let mut rng = task_rng();
/// let n: uint = rng.gen_range(0u, 10);
/// println!("{}", n);
/// let m: f64 = rng.gen_range(-40.0f64, 1.3e5f64);
/// println!("{}", m);
/// ```
fn gen_range<T: PartialOrd + SampleRange>(&mut self, low: T, high: T) -> T {
assert!(low < high, "Rng.gen_range called with low >= high");
Range::new(low, high).ind_sample(self)
}
/// Return a bool with a 1 in n chance of true
///
/// # Example
///
/// ```rust
/// use std::rand::{task_rng, Rng};
///
/// let mut rng = task_rng();
/// println!("{}", rng.gen_weighted_bool(3));
/// ```
fn gen_weighted_bool(&mut self, n: uint) -> bool {
n == 0 || self.gen_range(0, n) == 0
}
/// Return an iterator of random characters from the set A-Z,a-z,0-9.
///
/// # Example
///
/// ```rust
/// use std::rand::{task_rng, Rng};
///
/// let s: String = task_rng().gen_ascii_chars().take(10).collect();
/// println!("{}", s);
/// ```
fn gen_ascii_chars<'a>(&'a mut self) -> AsciiGenerator<'a, Self> {
AsciiGenerator { rng: self }
}
/// Return a random element from `values`.
///
/// Return `None` if `values` is empty.
///
/// # Example
///
/// ```
/// use std::rand::{task_rng, Rng};
///
/// let choices = [1i, 2, 4, 8, 16, 32];
/// let mut rng = task_rng();
/// println!("{}", rng.choose(&choices));
/// assert_eq!(rng.choose(choices[..0]), None);
/// ```
fn choose<'a, T>(&mut self, values: &'a [T]) -> Option<&'a T> {
if values.is_empty() {
None
} else {
Some(&values[self.gen_range(0u, values.len())])
}
}
/// Shuffle a mutable slice in place.
///
/// # Example
///
/// ```rust
/// use std::rand::{task_rng, Rng};
///
/// let mut rng = task_rng();
/// let mut y = [1i, 2, 3];
/// rng.shuffle(&mut y);
/// println!("{}", y.as_slice());
/// rng.shuffle(&mut y);
/// println!("{}", y.as_slice());
/// ```
fn shuffle<T>(&mut self, values: &mut [T]) {
let mut i = values.len();
while i >= 2u {
// invariant: elements with index >= i have been locked in place.
i -= 1u;
// lock element i in place.
values.swap(i, self.gen_range(0u, i + 1u));
}
}
}
/// Iterator which will generate a stream of random items.
///
/// This iterator is created via the `gen_iter` method on `Rng`.
pub struct Generator<'a, T, R:'a> {
rng: &'a mut R,
}
impl<'a, T: Rand, R: Rng> Iterator<T> for Generator<'a, T, R> {
fn next(&mut self) -> Option<T> {
Some(self.rng.gen())
}
}
/// Iterator which will continuously generate random ascii characters.
///
/// This iterator is created via the `gen_ascii_chars` method on `Rng`.
pub struct AsciiGenerator<'a, R:'a> {
rng: &'a mut R,
}
impl<'a, R: Rng> Iterator<char> for AsciiGenerator<'a, R> {
fn next(&mut self) -> Option<char> {
static GEN_ASCII_STR_CHARSET: &'static [u8] =
b"ABCDEFGHIJKLMNOPQRSTUVWXYZ\
abcdefghijklmnopqrstuvwxyz\
0123456789";
Some(*self.rng.choose(GEN_ASCII_STR_CHARSET).unwrap() as char)
}
}
/// A random number generator that can be explicitly seeded to produce
/// the same stream of randomness multiple times.
pub trait SeedableRng<Seed>: Rng {
/// Reseed an RNG with the given seed.
///
/// # Example
///
/// ```rust
/// use std::rand::{Rng, SeedableRng, StdRng};
///
/// let seed: &[_] = &[1, 2, 3, 4];
/// let mut rng: StdRng = SeedableRng::from_seed(seed);
/// println!("{}", rng.gen::<f64>());
/// rng.reseed(&[5, 6, 7, 8]);
/// println!("{}", rng.gen::<f64>());
/// ```
fn reseed(&mut self, Seed);
/// Create a new RNG with the given seed.
///
/// # Example
///
/// ```rust
/// use std::rand::{Rng, SeedableRng, StdRng};
///
/// let seed: &[_] = &[1, 2, 3, 4];
/// let mut rng: StdRng = SeedableRng::from_seed(seed);
/// println!("{}", rng.gen::<f64>());
/// ```
fn from_seed(seed: Seed) -> Self;
}
/// An Xorshift[1] random number
/// generator.
///
/// The Xorshift algorithm is not suitable for cryptographic purposes
/// but is very fast. If you do not know for sure that it fits your
/// requirements, use a more secure one such as `IsaacRng` or `OsRng`.
///
/// [1]: Marsaglia, George (July 2003). ["Xorshift
/// RNGs"](http://www.jstatsoft.org/v08/i14/paper). *Journal of
/// Statistical Software*. Vol. 8 (Issue 14).
#[allow(missing_copy_implementations)]
pub struct XorShiftRng {
x: u32,
y: u32,
z: u32,
w: u32,
}
impl Clone for XorShiftRng {
fn clone(&self) -> XorShiftRng {
XorShiftRng {
x: self.x,
y: self.y,
z: self.z,
w: self.w,
}
}
}
impl XorShiftRng {
/// Creates a new XorShiftRng instance which is not seeded.
///
/// The initial values of this RNG are constants, so all generators created
/// by this function will yield the same stream of random numbers. It is
/// highly recommended that this is created through `SeedableRng` instead of
/// this function
pub fn new_unseeded() -> XorShiftRng {
XorShiftRng {
x: 0x193a6754,
y: 0xa8a7d469,
z: 0x97830e05,
w: 0x113ba7bb,
}
}
}
impl Rng for XorShiftRng {
#[inline]
fn next_u32(&mut self) -> u32 {
let x = self.x;
let t = x ^ (x << 11);
self.x = self.y;
self.y = self.z;
self.z = self.w;
let w = self.w;
self.w = w ^ (w >> 19) ^ (t ^ (t >> 8));
self.w
}
}
impl SeedableRng<[u32, .. 4]> for XorShiftRng {
/// Reseed an XorShiftRng. This will panic if `seed` is entirely 0.
fn reseed(&mut self, seed: [u32, .. 4]) {
assert!(!seed.iter().all(|&x| x == 0),
"XorShiftRng.reseed called with an all zero seed.");
self.x = seed[0];
self.y = seed[1];
self.z = seed[2];
self.w = seed[3];
}
/// Create a new XorShiftRng. This will panic if `seed` is entirely 0.
fn from_seed(seed: [u32, .. 4]) -> XorShiftRng {
assert!(!seed.iter().all(|&x| x == 0),
"XorShiftRng::from_seed called with an all zero seed.");
XorShiftRng {
x: seed[0],
y: seed[1],
z: seed[2],
w: seed[3]
}
}
}
impl Rand for XorShiftRng {
fn rand<R: Rng>(rng: &mut R) -> XorShiftRng {
let mut tuple: (u32, u32, u32, u32) = rng.gen();
while tuple == (0, 0, 0, 0) {
tuple = rng.gen();
}
let (x, y, z, w) = tuple;
XorShiftRng { x: x, y: y, z: z, w: w }
}
}
/// A wrapper for generating floating point numbers uniformly in the
/// open interval `(0,1)` (not including either endpoint).
///
/// Use `Closed01` for the closed interval `[0,1]`, and the default
/// `Rand` implementation for `f32` and `f64` for the half-open
/// `[0,1)`.
///
/// # Example
/// ```rust
/// use std::rand::{random, Open01};
///
/// let Open01(val) = random::<Open01<f32>>();
/// println!("f32 from (0,1): {}", val);
/// ```
pub struct Open01<F>(pub F);
/// A wrapper for generating floating point numbers uniformly in the
/// closed interval `[0,1]` (including both endpoints).
///
/// Use `Open01` for the closed interval `(0,1)`, and the default
/// `Rand` implementation of `f32` and `f64` for the half-open
/// `[0,1)`.
///
/// # Example
///
/// ```rust
/// use std::rand::{random, Closed01};
///
/// let Closed01(val) = random::<Closed01<f32>>();
/// println!("f32 from [0,1]: {}", val);
/// ```
pub struct Closed01<F>(pub F);
#[cfg(not(test))]
mod std {
pub use core::{option, fmt}; // panic!()
}
#[cfg(test)]
mod test {
use std::rand;
pub struct MyRng<R> { inner: R }
impl<R: rand::Rng> ::Rng for MyRng<R> {
fn next_u32(&mut self) -> u32 {
fn next<T: rand::Rng>(t: &mut T) -> u32 {
use std::rand::Rng;
t.next_u32()
}
next(&mut self.inner)
}
}
pub fn rng() -> MyRng<rand::TaskRng> {
MyRng { inner: rand::task_rng() }
}
pub fn weak_rng() -> MyRng<rand::XorShiftRng> {
MyRng { inner: rand::weak_rng() }
}
}<|fim▁end|> | |
<|file_name|>DetuneStereoParser.java<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2012 Carl Green
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package info.carlwithak.mpxg2.sysex.effects.algorithms;
import info.carlwithak.mpxg2.model.effects.algorithms.DetuneStereo;
/**
* Class to parse parameter data for Detune (S) effect.
*
* @author Carl Green<|fim▁hole|>
public static DetuneStereo parse(byte[] effectParameters) {
DetuneStereo detuneStereo = new DetuneStereo();
int mix = effectParameters[0] + effectParameters[1] * 16;
detuneStereo.mix.setValue(mix);
int level = effectParameters[2] + effectParameters[3] * 16;
detuneStereo.level.setValue(level);
int tune = effectParameters[4] + effectParameters[5] * 16;
detuneStereo.tune.setValue(tune);
int optimize = effectParameters[6] + effectParameters[7] * 16;
detuneStereo.optimize.setValue(optimize);
int preDelay = effectParameters[8] + effectParameters[9] * 16;
detuneStereo.preDelay.setValue(preDelay);
return detuneStereo;
}
}<|fim▁end|> | */
public class DetuneStereoParser { |
<|file_name|>enter.rs<|end_file_name|><|fim▁begin|>use crate::errors::Result;
use crate::kernel::execve;
use crate::kernel::groups::syscall_group_from_sysnum;
use crate::kernel::groups::SyscallGroup::*;
use crate::kernel::heap::*;
use crate::kernel::ptrace::*;
use crate::kernel::socket::*;
use crate::kernel::standard::*;
use crate::process::proot::InfoBag;
use crate::process::tracee::Tracee;
use crate::register::Original;
pub fn translate(info_bag: &InfoBag, tracee: &mut Tracee) -> Result<()> {
let sys_num = tracee.regs.get_sys_num(Original);
let sys_type = syscall_group_from_sysnum(sys_num);
match sys_type {
Accept => accept::enter(),
BindConnect => bind_connect::enter(),
Brk => brk::enter(),
Chdir => chdir::enter(tracee),
ChmodAccessMkNodAt => chmod_access_mknod_at::enter(tracee),
DirLinkAttr => dir_link_attr::enter(tracee),
Execve => execve::enter(tracee, &info_bag.loader),
GetCwd => getcwd::enter(tracee),
GetSockOrPeerName => get_sockorpeer_name::enter(),
InotifyAddWatch => inotify_add_watch::enter(),
Link => link_rename::enter(tracee),
LinkAt => link_at::enter(tracee),
Mount => mount::enter(),
Open => open::enter(tracee),
OpenAt => open_at::enter(tracee),<|fim▁hole|> Rename => link_rename::enter(tracee),
RenameAt => rename_at::enter(tracee),
SocketCall => socketcall::enter(),
StandardSyscall => standard_syscall::enter(tracee),
StatAt => stat_at::enter(tracee),
SymLink => sym_link::enter(tracee),
SymLinkAt => sym_link_at::enter(tracee),
Wait => wait::enter(),
UnlinkMkdirAt => unlink_mkdir_at::enter(tracee),
_ => Ok(()),
}
}<|fim▁end|> | PivotRoot => pivot_root::enter(),
Ptrace => ptrace::enter(),
ReadLink => dir_link_attr::enter(tracee),
ReadLinkAt => unlink_mkdir_at::enter(tracee), |
<|file_name|>TLDList.cpp<|end_file_name|><|fim▁begin|>// $Id$
// QtLobby released under the GPLv3, see COPYING for details.
#include "TLDList.h"
QMap<QString, QString>* TLDList::TLDMap;
TLDList::TLDList( QObject* parent) : QObject(parent){
if ( TLDMap == NULL ) {
TLDMap = new QMap<QString, QString>;
QString TLDString = tr("AC:Ascension Island\n"
"AD:Andorra\n"
"AE:United Arab Emirates\n"
"AERO:Aircraft-related\n"
"AF:Afghanistan\n"
"AG:Antigua and Barbuda\n"
"AI:Anguilla\n"
"AL:Albania\n"
"AM:Armenia\n"
"AN:Netherland Antilles\n"
"AO:Angola\n"
"AQ:Antarctica\n"
"AR:Argentina\n"
"ARPA:Address and Routing Parameter Area\n"
"AS:American Samoa\n"
"AT:Austria\n"
"AU:Australia\n"
"AW:Aruba\n"
"AZ:Azerbaijan\n"
"BA:Bosnia-Herzegovina\n"
"BB:Barbados\n"
"BE:Belgium\n"
"BF:Burkina Faso\n"
"BG:Bulgaria\n"
"BH:Bahrain\n"
"BI:Burundi\n"
"BIz:Business\n"
"BJ:Benin\n"
"BM:Bermuda\n"
"BN:Brunei Darussalam\n"
"BO:Bolivia\n"
"BR:Brazil\n"
"BS:Bahamas\n"
"BT:Bhutan\n"
"BW:Botswana\n"
"BY:Belarus\n"
"BZ:Belize\n"
"CA:Canada\n"
"CC:Cocos (Keeling) Islands\n"
"CD:Democratic Republic of Congo\n"
"CF:Central African Republic\n"
"CG:Congo\n"
"CH:Switzerland\n"
"CI:Ivory Coast\n"
"CK:Cook Islands\n"
"CL:Chile\n"
"CM:Cameroon\n"
"CN:China\n"
"CO:Colombia\n"
"COM:Commercial\n"
"COOP:Cooperative-related\n"
"CR:Costa Rica\n"
"CU:Cuba\n"
"CV:Cape Verde\n"
"CX:Christmas Island\n"
"CY:Cyprus\n"
"CZ:Czech Republic\n"
"DE:Germany\n"
"DJ:Djibouti\n"
"DK:Denmark\n"
"DM:Dominica\n"
"DO:Dominican Republic\n"
"DZ:Algeria\n"
"EC:Ecuador\n"
"EDU:Educational\n"
"EE:Estonia\n"
"EG:Egypt\n"
"ES:Spain\n"
"ET:Ethiopia\n"
"FI:Finland\n"
"FJ:Fiji\n"
"FK:Falkland Islands (Malvinas)\n"
"FM:Micronesia\n"
"FO:Faroe Islands\n"
"FR:France\n"
"GB:Great Britan\n"
"GA:Gabon\n"
"GD:Grenada\n"
"GE:Georgia\n"
"GF:French Guyana\n"
"GH:Ghana\n"
"GI:Gibraltar\n"
"GL:Greenland\n"
"GM:Gambia\n"<|fim▁hole|> "GR:Greece\n"
"GT:Guatemala\n"
"GU:Guam (US)\n"
"GY:Guyana\n"
"HK:Hong Kong\n"
"HM:Heard and McDonald Islands\n"
"HN:Honduras\n"
"HR:Croatia (Hrvatska)\n"
"HU:Hungary\n"
"ID:Indonesia\n"
"IE:Ireland\n"
"IL:Israel\n"
"IN:India\n"
"INFO:General-purpose TLD\n"
"INT:International\n"
"IO:British Indian Ocean Territory\n"
"IR:Islamic Republic of Iran\n"
"IS:Iceland\n"
"IT:Italy\n"
"JM:Jamaica\n"
"JO:Jordan\n"
"JP:Japan\n"
"KE:Kenya\n"
"KG:Kyrgyzstan\n"
"KH:Cambodia\n"
"KI:Kiribati\n"
"KM:Comoros\n"
"KN:Saint Kitts Nevis Anguilla\n"
"KR:South Korea\n"
"KW:Kuwait\n"
"KY:Cayman Islands\n"
"KZ:Kazakhstan\n"
"LA:Laos (People's Democratic Republic)\n"
"LB:Lebanon\n"
"LC:Saint Lucia\n"
"LI:Liechtenstein\n"
"LK:Sri Lanka\n"
"LR:Liberia\n"
"LS:Lesotho\n"
"LT:Lithuania\n"
"LU:Luxembourg\n"
"LV:Latvia\n"
"LY:Libya (Libyan Arab Jamahiriya)\n"
"MA:Morocco\n"
"MC:Monaco\n"
"MD:Moldavia\n"
"MG:Madagascar\n"
"MH:Marshall Islands\n"
"MIL:US Military\n"
"MK:Macedonia\n"
"ML:Mali\n"
"MM:Myanmar\n"
"MN:Mongolia\n"
"MO:Macau\n"
"MP:Northern Mariana Islands\n"
"MQ:Martinique (French)\n"
"MR:Mauritania\n"
"MS:Montserrat\n"
"MT:Malta\n"
"MU:Mauritius\n"
"MUseum:Museum-related\n"
"MV:Maldives\n"
"MW:Malawi\n"
"MX:Mexico\n"
"MY:Malaysia\n"
"MZ:Mozambique\n"
"NA:Namibia\n"
"NAME:Personal name\n"
"NC:New Caledonia (French)\n"
"NE:Niger\n"
"NET:Network Infrastructure\n"
"NF:Norfolk Island\n"
"NG:Nigeria\n"
"NI:Nicaragua\n"
"NL:Netherlands\n"
"NO:Norway\n"
"NP:Nepal\n"
"NR:Nauru\n"
"NU:Niue\n"
"NZ:New Zealand\n"
"OM:Oman\n"
"ORG:Nonprofit\n"
"PA:Panama\n"
"PE:Peru\n"
"PF:French Polynesia\n"
"PF:Polynesia (French)\n"
"PG:Papua New Guinea\n"
"PH:Philippines\n"
"PK:Pakistan\n"
"PL:Poland\n"
"PM:Saint Pierre and Miquelon\n"
"PN:Pitcairn\n"
"PR:Puerto Rico (US)\n"
"PRo:Professional domain\n"
"PS:Palestina\n"
"PT:Portugal\n"
"PW:Palau\n"
"PY:Paraguay\n"
"QA:Qatar\n"
"RE:Reunion (French)\n"
"RO:Romania\n"
"RU:Russian Federation\n"
"RW:Rwanda\n"
"SA:Saudi Arabia\n"
"SB:Solomon Islands\n"
"SC:Seychelles\n"
"SE:Sweden\n"
"SG:Singapore\n"
"SH:Saint Helena\n"
"SI:Slovenia\n"
"SK:Slovak Republic (Slovakia)\n"
"SL:Sierra Leone\n"
"SM:San Marino\n"
"SN:Senegal\n"
"SO:Somalia\n"
"SR:Surinam\n"
"ST:Saint Tome and Principe\n"
"SU:Soviet Union\n"
"SV:El Salvador\n"
"SZ:Swaziland\n"
"TC:Turks and Caicos Islands\n"
"TD:Chad\n"
"TF:French Southern Territories\n"
"TG:Togo\n"
"TH:Thailand\n"
"TJ:Tajikistan\n"
"TK:Tokelau\n"
"TM:Turkmenistan\n"
"TN:Tunisia\n"
"TO:Tonga\n"
"TP:East Timor\n"
"TR:Turkey\n"
"TT:Trinidad and Tobago\n"
"TV:Tuvalu\n"
"TW:Taiwan\n"
"TZ:Tanzania\n"
"UA:Ukraine\n"
"UG:Uganda\n"
"UK:United Kingdom\n"
"US:United States of America\n"
"UY:Uruguay\n"
"UZ:Uzbekistan\n"
"VA:Vatican City State\n"
"VC:Saint Vincent and the Grenadines\n"
"VE:Venezuela\n"
"VG:Virgin Islands (British)\n"
"VI:Virgin Islands (US)\n"
"VN:Vietnam\n"
"VU:Vanuatu\n"
"WS:Samoa\n"
"YE:Yemen\n"
"YU:Yugoslavia\n"
"ZA:South Africa\n"
"ZM:Zambia\n"
"ZR:Zaire\n"
"ZW:Zimbabwe\n"
"XX:?\n"
);
QStringList list = TLDString.split( "\n" );
foreach( QString s, list ) {
TLDMap->insert( s.section( ":", 0, 0 ), s.section( ":", 1, 1 ) );
}
}
}
TLDList::~TLDList() {
}<|fim▁end|> | "GN:Guinea\n"
"GOV:Government\n"
"GP:Guadeloupe (French)\n"
"GQ:Equatorial Guinea\n" |
<|file_name|>Account.cpp<|end_file_name|><|fim▁begin|>/*
* Account.cpp
*
* Created on: 20 March 2015
* Author: cyosp
*/
#include <com/cyosp/mpa/core/Account.hpp>
namespace mpa
{
bool Account::isAccountAlreadyExisting( string accountName )
{
bool ret = false;
// TODO : try to use BDD facilities
// Get accounts list
vector<mpapo::Account> accounts = all();
// Get accounts iterator
vector<mpapo::Account>::iterator it = accounts.begin();
while (it != accounts.end() && ! ret )
{
if ((*it).name == accountName)
ret = true;
else
it++;
}
return ret;
}
mpapo::Account & Account::add( string accountName )
{
mpapo::Account * ret = NULL;
ret = new mpapo::Account( MPA::getInstance()->getMPAPO() );
ret->setName( accountName );
//sleep(6);
ret->balance = 0;
ret->initializeVersion();
ret->store();
MPA_LOG_TRIVIAL(trace,"Account added, id=" + (* ret).id.value());
return * ret;
}
vector<mpapo::Account> Account::all()
{
return select<mpapo::Account>( MPA::getInstance()->getMPAPO() ).all(); //.orderBy(mpapo::Account::Name).all();
}
bool Account::del(int id , int version )
{
bool ret = false;
MPA_LOG_TRIVIAL(trace,"Account to delete:" + StrUtil::int2string( id )+" with version: " + StrUtil::int2string(version ));
try
{
mpapo::Account accountToDel = get( id );
if( accountToDel.isCorrectVersion( version ) )
{
MPA_LOG_TRIVIAL(trace,"Account found");
if( accountToDel.operations().get().all().size() > 0 ) throw mpa_exception::MsgNotTranslated( IMPOSSIBLE_REMOVE_THERE_ARE_AGAIN_OPERATIONS );
if( accountToDel.providers().get().all().size() > 0 ) throw mpa_exception::MsgNotTranslated( IMPOSSIBLE_REMOVE_THERE_ARE_AGAIN_PROVIDERS );
if( accountToDel.categories().get().all().size() > 0 ) throw mpa_exception::MsgNotTranslated( IMPOSSIBLE_REMOVE_THERE_ARE_AGAIN_CATEGORIES );
accountToDel.del();
}
else throw mpa_exception::MsgNotTranslated( OPERATION_IMPOSSIBLE_BECAUSE_DATA_HAVE_CHANGED );
}
catch (NotFound & e)
{
throw mpa_exception::MsgNotTranslated( ACCOUNT_DOESNT_EXIST );
}
return ret;
}
// Get acount by ID
mpapo::Account Account::get( int id )
{
// BOOST_LOG_TRIVIAL(trace) << "Account retrieved" << std::endl;
return select<mpapo::Account>( MPA::getInstance()->getMPAPO() , mpapo::Account::Id == id ).one();
}
// Rename account
mpapo::Account Account::renameAccount( int accountId , int accountVersionToRename , string newAccountName )
{
//MPA_LOG_TRIVIAL( trace , "Start" );
try
{
mpapo::Account account = get( accountId );
if( account.isCorrectVersion( accountVersionToRename ) )
{
account.setName( newAccountName );
account.store();
//MPA_LOG_TRIVIAL( trace , "End" );
// Return is here because there is no empty constructor for mpapo::Account::Account()
return account;
}
else throw mpa_exception::MsgNotTranslated( OPERATION_IMPOSSIBLE_BECAUSE_DATA_HAVE_CHANGED );
}
catch (NotFound & e)
{
throw mpa_exception::MsgNotTranslated( ACCOUNT_DOESNT_EXIST );
}
}
Account::~Account()<|fim▁hole|>} /* namespace mpa */<|fim▁end|> | {
}
|
<|file_name|>cell_renderer_toggle.rs<|end_file_name|><|fim▁begin|>// Copyright 2013-2015, The Rust-GNOME Project Developers.
// See the COPYRIGHT file at the top-level directory of this distribution.
// Licensed under the MIT license, see the LICENSE file or <http://opensource.org/licenses/MIT>
//! Renders a toggle button in a cell
use ffi;<|fim▁hole|>struct_Widget!(CellRendererToggle);
impl CellRendererToggle {
pub fn new() -> Option<CellRendererToggle> {
let tmp_pointer = unsafe { ffi::gtk_cell_renderer_toggle_new() as *mut ffi::C_GtkWidget };
check_pointer!(tmp_pointer, CellRendererToggle)
}
pub fn get_radio(&self) -> bool {
unsafe {
to_bool(ffi::gtk_cell_renderer_toggle_get_radio(
self.pointer as *mut ffi::C_GtkCellRendererToggle))
}
}
pub fn set_radio(&self, radio: bool) -> () {
unsafe {
ffi::gtk_cell_renderer_toggle_set_radio(
self.pointer as *mut ffi::C_GtkCellRendererToggle, to_gboolean(radio));
}
}
pub fn get_active(&self) -> bool {
unsafe {
to_bool(ffi::gtk_cell_renderer_toggle_get_active(
self.pointer as *mut ffi::C_GtkCellRendererToggle))
}
}
pub fn set_active(&self, active: bool) -> () {
unsafe {
ffi::gtk_cell_renderer_toggle_set_active(
self.pointer as *mut ffi::C_GtkCellRendererToggle, to_gboolean(active));
}
}
}
impl_drop!(CellRendererToggle);
impl_TraitWidget!(CellRendererToggle);
impl ::CellRendererTrait for CellRendererToggle {}<|fim▁end|> | use glib::{to_bool, to_gboolean};
|
<|file_name|>playvideo.py<|end_file_name|><|fim▁begin|>import urlparse
import sys,urllib
import xbmc, xbmcgui, xbmcaddon, xbmcplugin
import urlresolver
base_url = sys.argv[0]
addon_handle = int(sys.argv[1])
args = urlparse.parse_qs(sys.argv[2][1:])
_addon = xbmcaddon.Addon()
_icon = _addon.getAddonInfo('icon')
def build_url(query):
return base_url + '?' + urllib.urlencode(query)
def resolve_url(url):
duration=7500 #in milliseconds
message = "Cannot Play URL"
stream_url = urlresolver.HostedMediaFile(url=url).resolve()
# If urlresolver returns false then the video url was not resolved.
if not stream_url:
dialog = xbmcgui.Dialog()
dialog.notification("URL Resolver Error", message, xbmcgui.NOTIFICATION_INFO, duration)
return False
else:
return stream_url
def play_video(path):
"""
Play a video by the provided path.
:param path: str
"""
# Create a playable item with a path to play.
play_item = xbmcgui.ListItem(path=path)
vid_url = play_item.getfilename()
stream_url = resolve_url(vid_url)
if stream_url:
play_item.setPath(stream_url)
# Pass the item to the Kodi player.
xbmcplugin.setResolvedUrl(addon_handle, True, listitem=play_item)
<|fim▁hole|>mode = args.get('mode', None)
if mode is None:
video_play_url = "http://www.vidsplay.com/wp-content/uploads/2017/04/alligator.mp4"
url = build_url({'mode' :'play', 'playlink' : video_play_url})
li = xbmcgui.ListItem('Play Video 1', iconImage='DefaultVideo.png')
li.setProperty('IsPlayable' , 'true')
xbmcplugin.addDirectoryItem(handle=addon_handle, url=url, listitem=li)
video_play_url = "https://www.youtube.com/watch?v=J9d9UrK0Jsw"
url = build_url({'mode' :'play', 'playlink' : video_play_url})
li = xbmcgui.ListItem('Play Video 2', iconImage='DefaultVideo.png')
li.setProperty('IsPlayable' , 'true')
xbmcplugin.addDirectoryItem(handle=addon_handle, url=url, listitem=li)
video_play_url = "www.reddit.com"
url = build_url({'mode' :'play', 'playlink' : video_play_url})
li = xbmcgui.ListItem('Play Video 3', iconImage='DefaultVideo.png')
li.setProperty('IsPlayable' , 'true')
xbmcplugin.addDirectoryItem(handle=addon_handle, url=url, listitem=li)
xbmcplugin.endOfDirectory(addon_handle)
elif mode[0] == 'play':
final_link = args['playlink'][0]
play_video(final_link)<|fim▁end|> | # addon kicks in
|
<|file_name|>test_comment.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import unittest
import pykintone
from pykintone.model import kintoneModel
import tests.envs as envs
class TestAppModelSimple(kintoneModel):
def __init__(self):
super(TestAppModelSimple, self).__init__()
self.my_key = ""
self.stringField = ""
class TestComment(unittest.TestCase):
def test_comment(self):
app = pykintone.load(envs.FILE_PATH).app()
model = TestAppModelSimple()
model.my_key = "comment_test"
model.stringField = "comment_test_now"
result = app.create(model)
self.assertTrue(result.ok) # confirm create the record to test comment
_record_id = result.record_id
# create comment
r_created = app.comment(_record_id).create("コメントのテスト")
self.assertTrue(r_created.ok)
# it requires Administrator user is registered in kintone
r_created_m = app.comment(_record_id).create("メンションのテスト", [("Administrator", "USER")])
self.assertTrue(r_created_m.ok)
# select comment
r_selected = app.comment(_record_id).select(True, 0, 10)
self.assertTrue(r_selected.ok)
self.assertTrue(2, len(r_selected.raw_comments))
comments = r_selected.comments()
self.assertTrue(1, len(comments[-1].mentions))
# delete comment<|fim▁hole|> r_deleted = app.comment(_record_id).delete(c.comment_id)
self.assertTrue(r_deleted.ok)
r_selected = app.comment(_record_id).select()
self.assertEqual(0, len(r_selected.raw_comments))
# done test
app.delete(_record_id)<|fim▁end|> | for c in comments: |
<|file_name|>appNotifications.ts<|end_file_name|><|fim▁begin|>export interface AppNotification {
id: string;
severity: AppNotificationSeverity;
icon: string;
title: string;
text: string;
component?: React.ReactElement;
timeout: AppNotificationTimeout;
}
export enum AppNotificationSeverity {<|fim▁hole|> Warning = 'warning',
Error = 'error',
Info = 'info',
}
export enum AppNotificationTimeout {
Warning = 5000,
Success = 3000,
Error = 7000,
}
export interface AppNotificationsState {
byId: Record<string, AppNotification>;
}<|fim▁end|> | Success = 'success', |
<|file_name|>groups.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# author: @netmanchris
# This section imports required libraries
import requests
import json
from pyhpeimc.plat.device import *
HEADERS = {'Accept': 'application/json', 'Content-Type':
'application/json', 'Accept-encoding': 'application/json'}
"""
This section deals with HPE IMC Custom View functions
"""
def get_custom_views(auth: object, url: object, name: object = None, headers: object = HEADERS) -> object:
"""
function requires no input and returns a list of dictionaries of custom views from an HPE IMC. Optional name
argument will return only the specified view.
:param name: str containing the name of the desired custom view
:param auth: requests auth object #usually auth.creds from auth pyhpeimc.auth.class
:param url: base url of IMC RS interface #usually auth.url from pyhpeimc.auth.authclass
:param name: (optional) str of name of specific custom view
:return: list of dictionaties containing attributes of the custom views
:rtype: list
>>> from pyhpeimc.auth import *
>>> from pyhpeimc.plat.groups import *
>>> auth = IMCAuth("http://", "10.101.0.203", "8080", "admin", "admin")
>>> all_views = get_custom_views(auth.creds, auth.url)
>>> assert type(all_views) is list
>>> assert 'name' in all_views[0]
>>> non_existant_view = get_custom_views(auth.creds, auth.url, name = '''Doesn't Exist''')
>>> assert non_existant_view == None
"""
if name is None:
get_custom_view_url = '/imcrs/plat/res/view/custom?resPrivilegeFilter=false&desc=false&total=false'
elif name is not None:
get_custom_view_url = '/imcrs/plat/res/view/custom?resPrivilegeFilter=false&name='+name+'&desc=false&total=false'
f_url = url + get_custom_view_url
r = requests.get(f_url, auth=auth, headers=headers)
try:
if r.status_code == 200:
custom_view_list = (json.loads(r.text))
if 'customView' in custom_view_list:
custom_view_list = custom_view_list['customView']
if type(custom_view_list) == dict:
custom_view_list = [custom_view_list]
return custom_view_list
else:
return custom_view_list
except requests.exceptions.RequestException as e:
return "Error:\n" + str(e) + ' get_custom_views: An Error has occured'
def get_custom_view_details(name, auth, url):
"""
function requires no input and returns a list of dictionaries of custom views from an HPE IMC. Optional name
argument will return only the specified view.
:param name: str containing the name of the desired custom view
:param auth: requests auth object #usually auth.creds from auth pyhpeimc.auth.class
:param url: base url of IMC RS interface #usually auth.url from pyhpeimc.auth.authclass
:param name: (optional) str of name of specific custom view
:return: list of dictionaties containing attributes of the custom views
:rtype: list
>>> from pyhpeimc.auth import *
>>> from pyhpeimc.plat.groups import *
>>> auth = IMCAuth("http://", "10.101.0.203", "8080", "admin", "admin")
>>> view_details = get_custom_view_details('My Network View', auth.creds, auth.url)
>>> assert type(view_details) is list
>>> assert 'label' in view_details[0]
"""
view_id = get_custom_views(auth, url, name=name)[0]['symbolId']
get_custom_view_details_url = '/imcrs/plat/res/view/custom/' + str(view_id)
f_url = url + get_custom_view_details_url
r = requests.get(f_url, auth=auth,
headers=HEADERS) # creates the URL using the payload variable as the contents
try:
if r.status_code == 200:
current_devices = (json.loads(r.text))
if 'device' in current_devices:
return current_devices['device']
else:
return []
except requests.exceptions.RequestException as e:
return "Error:\n" + str(e) + ' get_custom_views: An Error has occured'
def create_custom_views(auth, url,name=None, upperview=None):
"""
function takes no input and issues a RESTFUL call to get a list of custom views from HPE IMC. Optional Name input
will return only the specified view.
:param name: string containg the name of the desired custom view
:param auth: requests auth object #usually auth.creds from auth pyhpeimc.auth.class
:param url: base url of IMC RS interface #usually auth.url from pyhpeimc.auth.authclass
:return: str of creation results ( "view " + name + "created successfully"
:rtype: str
>>> from pyhpeimc.auth import *
>>> from pyhpeimc.plat.groups import *
>>> auth = IMCAuth("http://", "10.101.0.203", "8080", "admin", "admin")
#Create L1 custom view
>>> create_custom_views(auth.creds, auth.url, name='L1 View')
'View L1 View created successfully'
>>> view_1 =get_custom_views( auth.creds, auth.url, name = 'L1 View')
>>> assert type(view_1) is list
>>> assert view_1[0]['name'] == 'L1 View'
#Create Nested custome view
>>> create_custom_views(auth.creds, auth.url, name='L2 View', upperview='L1 View')
'View L2 View created successfully'
>>> view_2 = get_custom_views( auth.creds, auth.url, name = 'L2 View')
>>> assert type(view_2) is list
>>> assert view_2[0]['name'] == 'L2 View'
"""
create_custom_views_url = '/imcrs/plat/res/view/custom?resPrivilegeFilter=false&desc=false&total=false'
f_url = url + create_custom_views_url
if upperview is None:
payload = '''{ "name": "''' + name + '''",
"upLevelSymbolId" : ""}'''
#print (payload)
else:
parentviewid = get_custom_views(auth, url, upperview)[0]['symbolId']
payload = '''{ "name": "'''+name+ '''",
"upLevelSymbolId" : "'''+str(parentviewid)+'''"}'''
#print (payload)
r = requests.post(f_url, data = payload, auth=auth, headers=HEADERS) # creates the URL using the payload variable as the contents
try:
if r.status_code == 201:
return 'View ' + name +' created successfully'
except requests.exceptions.RequestException as e:
return "Error:\n" + str(e) + ' get_custom_views: An Error has occured'
#TODO Need to add tests and examples for add_devs_custom_views
def add_devs_custom_views(custom_view_name, dev_list, auth, url):
"""
function takes a list of devIDs from devices discovered in the HPE IMC platform and and issues a RESTFUL call to
add the list of devices to a specific custom views from HPE IMC.
:param dev_list: list containing the devID of all devices to be contained in this custom view.
:param auth: requests auth object #usually auth.creds from auth pyhpeimc.auth.class
:param url: base url of IMC RS interface #usually auth.url from pyhpeimc.auth.authclass
:return: str of creation results ( "view " + name + "created successfully"
:rtype: str
>>> from pyhpeimc.auth import *
>>> from pyhpeimc.plat.groups import *
>>> auth = IMCAuth("http://", "10.101.0.203", "8080", "admin", "admin")
"""
view_id = get_custom_views(auth, url, name=custom_view_name)[0]['symbolId']
add_devs_custom_views_url = '/imcrs/plat/res/view/custom/'+str(view_id)
payload = '''{"device" : '''+ json.dumps(dev_list) + '''}'''
f_url = url + add_devs_custom_views_url
r = requests.put(f_url, data = payload, auth=auth, headers=HEADERS) # creates the URL using the payload variable as the contents
try:
if r.status_code == 204:
print ('View ' + custom_view_name +' : Devices Successfully Added')<|fim▁hole|> return "Error:\n" + str(e) + ' get_custom_views: An Error has occured'
def delete_custom_view(auth, url, name):
"""
function takes input of auth, url, and name and issues a RESTFUL call to delete a specific of custom views from HPE
IMC.
:param name: string containg the name of the desired custom view
:param auth: requests auth object #usually auth.creds from auth pyhpeimc.auth.class
:param url: base url of IMC RS interface #usually auth.url from pyhpeimc.auth.authclass
:return: str of creation results ( "view " + name + "created successfully"
:rtype: str
>>> from pyhpeimc.auth import *
>>> from pyhpeimc.plat.groups import *
>>> auth = IMCAuth("http://", "10.101.0.203", "8080", "admin", "admin")
>>> delete_custom_view(auth.creds, auth.url, name = "L1 View")
'View L1 View deleted successfully'
>>> view_1 =get_custom_views( auth.creds, auth.url, name = 'L1 View')
>>> assert view_1 == None
>>> delete_custom_view(auth.creds, auth.url, name = "L2 View")
'View L2 View deleted successfully'
>>> view_2 =get_custom_views( auth.creds, auth.url, name = 'L2 View')
>>> assert view_2 == None
"""
view_id = get_custom_views(auth, url,name )[0]['symbolId']
delete_custom_view_url = '/imcrs/plat/res/view/custom/'+str(view_id)
f_url = url + delete_custom_view_url
r = requests.delete(f_url, auth=auth, headers=HEADERS) # creates the URL using the payload variable as the contents
try:
if r.status_code == 204:
return 'View ' + name +' deleted successfully'
except requests.exceptions.RequestException as e:
return "Error:\n" + str(e) + ' delete_custom_view: An Error has occured'<|fim▁end|> | return r.status_code
except requests.exceptions.RequestException as e: |
<|file_name|>DCHLog.java<|end_file_name|><|fim▁begin|><|fim▁hole|>
import org.apache.logging.log4j.Level;
import cpw.mods.fml.relauncher.FMLRelaunchLog;
/**
* Created by christopher on 02/08/15.
*/
public class DCHLog {
public static final FMLRelaunchLog INSTANCE = FMLRelaunchLog.log;
private DCHLog(){
}
public static void warning( String format, Object... data )
{
log( Level.WARN, format, data );
}
private static void log( Level level, String format, Object... data )
{
FMLRelaunchLog.log( "DCH:", level, format, data );
}
public static void error( Throwable e )
{
severe( "Error: " + e.getClass().getName() + " : " + e.getMessage() );
e.printStackTrace();
}
public static void severe( String format, Object... data )
{
log( Level.ERROR, format, data );
}
public static void blockUpdate( int xCoord, int yCoord, int zCoord, String title)
{
info( title + " @ " + xCoord + ", " + yCoord + ", " + zCoord );
}
public static void info( String format, Object... data )
{
log( Level.INFO, format, data );
}
public static void crafting( String format, Object... data )
{
log( Level.INFO, format, data );
}
}<|fim▁end|> | package com.digitalcraftinghabitat.forgemod.util;
|
<|file_name|>linechart_module.py<|end_file_name|><|fim▁begin|>from modules.chart_module import ChartModule
import tornado.web
import logging
class LineChartModule(ChartModule):
def render(self, raw_data, keys, chart_id="linechart"):
self.chart_id = chart_id
self.chart_data = self.overtime_linechart_data(raw_data, keys)
return self.render_string('modules/linechart.html', chart_id=self.chart_id)
def overtime_linechart_data(self, raw_data, keys,
yearterms_key='fcqs_yearterms',
overtime_key='fcqs_overtime'):
def _overtime_builder(overtime_data, key):
def _transform_overtime_data(yearterm):
value = overtime_data[str(yearterm)][key]
roundto = {
'percent_a': 3,
'percent_b': 3,
'percent_c': 3,
'percent_d': 3,
'percent_f': 3,
'percent_incomplete': 3,
'average_grade': 3
}.get(key, 1)
if value is not None:
return round(value, roundto)
else:
return None
return _transform_overtime_data
def _overtime_dataset_builder(key):
color = {
'course_howmuchlearned_average': (247, 92, 3),
'course_challenge_average': (217, 3, 104),
'courseoverall_average': (130, 2, 99),
'course_priorinterest_average': (4, 167, 119),
'instructor_effectiveness_average': (247, 92, 3),
'instructor_respect_average': (217, 3, 104),
'instructoroverall_average': (130, 2, 99),
'instructor_availability_average': (4, 167, 119),
'TTT_instructoroverall_average': (197, 27, 125),
'OTH_instructoroverall_average': (233, 163, 201),
'TA_instructoroverall_average': (253, 224, 239),
'GR_courseoverall_average': (77, 146, 33),
'UD_courseoverall_average': (161, 215, 106),
'LD_courseoverall_average': (230, 245, 106),
'percent_a': (44, 123, 182),
'percent_b': (171, 217, 233),
'percent_c': (255, 255, 191),
'percent_d': (253, 174, 97),
'percent_f': (215, 25, 28),
'percent_incomplete': (48, 48, 48),
'average_grade': (48, 48, 48),
}.get(key, (48, 48, 48))
yaxis_id = {
'percent_a': 'y-axis-3',
'percent_b': 'y-axis-3',
'percent_c': 'y-axis-3',
'percent_d': 'y-axis-3',
'percent_f': 'y-axis-3',
'percent_incomplete': 'y-axis-3',
'average_grade': 'y-axis-2',
}.get(key, 'y-axis-1')
fill = {
'percent_a': True,
'percent_b': True,<|fim▁hole|> 'percent_d': True,
'percent_f': True,
'percent_incomplete': True,
}.get(key, False)
label = {
'course_howmuchlearned_average': 'Amount Learned',
'course_challenge_average': 'Challenge',
'courseoverall_average': 'Course Overall',
'course_priorinterest_average': 'Prior Interest',
'instructor_effectiveness_average': 'Effectiveness',
'instructor_respect_average': 'Respect',
'instructoroverall_average': 'Instructor Overall',
'instructor_availability_average': 'Availability',
'TTT_instructoroverall_average': 'TTT instructors',
'OTH_instructoroverall_average': 'OTH instructors',
'TA_instructoroverall_average': 'TA instructors',
'GR_courseoverall_average': 'GR Course Overall',
'UD_courseoverall_average': 'UD Course Overall',
'LD_courseoverall_average': 'LD Course Overall',
'percent_a': 'A Grade',
'percent_b': 'B Grade',
'percent_c': 'C Grade',
'percent_d': 'D Grade',
'percent_f': 'F Grade',
'percent_incomplete': 'Incomplete',
'average_grade': 'Average GPA'
}.get(key, '???')
background_alpha = 1.0 if fill else 0.2
return {
'label': label,
'fill': fill,
'yAxisID': yaxis_id,
'backgroundColor': "rgba({0},{1},{2},{background_alpha})".format(*color, background_alpha=background_alpha),
'borderColor': "rgba({0},{1},{2},1)".format(*color),
'pointBackgroundColor': "rgba({0},{1},{2},1)".format(*color),
'pointHoverBackgroundColor': "rgba({0},{1},{2},1)".format(*color),
'pointHoverBorderColor': "#fff",
'pointHoverBorderWidth': 2,
'pointHoverRadius': 5,
'data': list(map(_overtime_builder(overtime_data, key), yearterms))
}
yearterms = raw_data[yearterms_key]
overtime_data = raw_data[overtime_key]
labels = list(map(self.convert_date, yearterms))
datasets = list(map(_overtime_dataset_builder, keys))
return tornado.escape.json_encode({
'labels': labels,
'datasets': datasets,
})
def embedded_javascript(self):
options = tornado.escape.json_encode(self.chart_options())
foo = '''
new Chart(document.getElementById("{2}").getContext("2d"),{{
type:'line',
data:{1},
options:{0}
}});
'''.format(options, self.chart_data, self.chart_id)
return foo<|fim▁end|> | 'percent_c': True, |
<|file_name|>ppmatlab.py<|end_file_name|><|fim▁begin|>## Automatically adapted for numpy.oldnumeric Apr 14, 2008 by -c
from builtins import range
def writeMeshMatlabFormat(mesh,meshFileBase):
"""
build array data structures for matlab finite element mesh representation
and write to a file to view and play with in matlatb
in matlab can then print mesh with
pdemesh(p,e,t)
where
p is the vertex or point matrix
e is the edge matrix, and
t is the element matrix
points matrix is [2 x num vertices]
format :
row 1 = x coord,
row 2 = y coord for nodes in mesh
edge matrix is [7 x num edges]
format:
row 1 = start vertex number
row 2 = end vertex number
row 3 = start value in edge parameterization, should be 0
row 4 = end value in edge parameterization, should be 1
row 5 = global edge id, base 1
row 6 = subdomain on left? always 1 for now
row 7 = subdomain on right? always 0 for now
element matrix is [4 x num elements]
row 1 = vertex 1 global number
row 2 = vertex 2 global number
row 3 = vertex 3 global number
row 4 = triangle subdomain number
where 1,2,3 is a local counter clockwise numbering of vertices in
triangle
"""
import numpy as numpy
matlabBase = 1
p = numpy.zeros((2,mesh['nNodes_global']),numpy.float_)
e = numpy.zeros((7,mesh['nElementBoundaries_global']),numpy.float_)
t = numpy.zeros((4,mesh['nElements_global']),numpy.float_)
#load p,e,t and write file
mfile = open(meshFileBase+'.m','w')
mfile.write('p = [ ... \n')
for nN in range(mesh['nNodes_global']):
p[0,nN]=mesh['nodeArray'][nN,0]
p[1,nN]=mesh['nodeArray'][nN,1]
mfile.write('%g %g \n' % tuple(p[:,nN]))
mfile.write(']; \n')
mfile.write("p = p\';\n") #need transpose for matlab
mfile.write('e = [ ... \n')
for ebN in range(mesh['nElementBoundaries_global']):
e[0,ebN]=mesh['elementBoundaryNodesArray'][ebN,0] + matlabBase #global node number of start node base 1
e[1,ebN]=mesh['elementBoundaryNodesArray'][ebN,1] + matlabBase #global node number of end node base 1
e[2,ebN]=0.0 #edge param. is 0 to 1
e[3,ebN]=1.0
e[4,ebN]=ebN + matlabBase #global edge number base 1
e[5,ebN]=0 #subdomain to left
e[6,ebN]=1 #subdomain to right
mfile.write('%g %g %g %g %g %g %g \n' % tuple(e[:,ebN]))
mfile.write(']; \n')
mfile.write("e = e\';\n") #need transpose for matlab
#write triangles last
mfile.write('t = [ ... \n')
for eN in range(mesh['nElements_global']):
t[0,eN]=mesh['elementNodesArray'][eN,0]+matlabBase #global node number for vertex 0
t[1,eN]=mesh['elementNodesArray'][eN,1]+matlabBase #global node number for vertex 0
t[2,eN]=mesh['elementNodesArray'][eN,2]+matlabBase #global node number for vertex 0
t[3,eN]=1 #subdomain id
mfile.write('%g %g %g %g \n' % tuple(t[:,eN]))
mfile.write(']; \n');
mfile.write("t = t\';\n") #need transpose for matlab
mfile.close()
return p,e,t
<|fim▁hole|> import ppmatlab,numpy.oldnumeric as numpy
os.listdir('./results')
filename = './results/re_forsyth2_ss_2d_pre_forsyth2_ss_2d_c0p1_n_mesh_results.dat'
res = shelve.open(filename)
mesh = res['mesh']
mmfile = 'forsyth2MeshMatlab'
p,e,t = ppmatlab.writeMeshMatlabFormat(mesh,mmfile)<|fim▁end|> |
########################################################################
if __name__ == '__main__':
import os,shelve |
<|file_name|>ex20.py<|end_file_name|><|fim▁begin|>from sys import argv
script, input_file = argv
def print_all(f):
print f.read()
def rewind(f):
f.seek(0)
def print_a_line(line_count, f):
print line_count, f.readline()
current_file = open(input_file)
print "First let's print the whole file:\n"
print_all(current_file)
print "Now let's rewind, kind of like a tape."
rewind(current_file)
print "Let's print three lines:"
current_line = 1<|fim▁hole|>print_a_line(current_line, current_file)
current_line = current_line + 1
print_a_line(current_line, current_file)<|fim▁end|> | print_a_line(current_line, current_file)
current_line = current_line + 1 |
<|file_name|>normalization_test.py<|end_file_name|><|fim▁begin|># Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for normalization layers."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import parameterized
import numpy as np
from tensorflow.python import keras
from tensorflow.python.eager import def_function
from tensorflow.python.eager import wrap_function
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import ops
from tensorflow.python.keras import combinations
from tensorflow.python.keras import keras_parameterized
from tensorflow.python.keras import testing_utils
from tensorflow.python.keras.layers import normalization
from tensorflow.python.keras.layers import normalization_v2
from tensorflow.python.keras.mixed_precision.experimental import policy
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gradient_checker_v2
from tensorflow.python.ops import math_ops
from tensorflow.python.platform import test
from tensorflow.python.training import gradient_descent
class BatchNormalizationTest(keras_parameterized.TestCase):
@keras_parameterized.run_all_keras_modes
def test_basic_batchnorm(self):
testing_utils.layer_test(
keras.layers.BatchNormalization,
kwargs={
'momentum': 0.9,
'epsilon': 0.1,
'gamma_regularizer': keras.regularizers.l2(0.01),
'beta_regularizer': keras.regularizers.l2(0.01)
},
input_shape=(3, 4, 2))
testing_utils.layer_test(
keras.layers.BatchNormalization,
kwargs={
'gamma_initializer': 'ones',
'beta_initializer': 'ones',
'moving_mean_initializer': 'zeros',
'moving_variance_initializer': 'ones'
},
input_shape=(3, 4, 2))
testing_utils.layer_test(
keras.layers.BatchNormalization,
kwargs={'scale': False,
'center': False},
input_shape=(3, 3))
testing_utils.layer_test(
keras.layers.BatchNormalization,
kwargs={
'gamma_initializer': 'ones',
'beta_initializer': 'ones',
'moving_mean_initializer': 'zeros',
'moving_variance_initializer': 'ones'
},
input_shape=(3, 2, 4, 2))
@combinations.generate(combinations.combine(mode=['graph', 'eager']))
def test_batchnorm_weights(self):
layer = keras.layers.BatchNormalization(scale=False, center=False)
layer.build((None, 3, 4))
self.assertEqual(len(layer.trainable_weights), 0)
self.assertEqual(len(layer.weights), 2)
layer = keras.layers.BatchNormalization()
layer.build((None, 3, 4))
self.assertEqual(len(layer.trainable_weights), 2)
self.assertEqual(len(layer.weights), 4)
@combinations.generate(combinations.combine(mode=['graph', 'eager']))
def test_batchnorm_regularization(self):
layer = keras.layers.BatchNormalization(
gamma_regularizer='l1', beta_regularizer='l1')
layer.build((None, 3, 4))
self.assertEqual(len(layer.losses), 2)
max_norm = keras.constraints.max_norm
layer = keras.layers.BatchNormalization(
gamma_constraint=max_norm, beta_constraint=max_norm)
layer.build((None, 3, 4))
self.assertEqual(layer.gamma.constraint, max_norm)
self.assertEqual(layer.beta.constraint, max_norm)
@keras_parameterized.run_all_keras_modes
def test_batchnorm_convnet(self):
if test.is_gpu_available(cuda_only=True):
with self.session(use_gpu=True):
model = keras.models.Sequential()
norm = keras.layers.BatchNormalization(
axis=1, input_shape=(3, 4, 4), momentum=0.8)
model.add(norm)
model.compile(
loss='mse',
optimizer=gradient_descent.GradientDescentOptimizer(0.01),
run_eagerly=testing_utils.should_run_eagerly())
# centered on 5.0, variance 10.0
x = np.random.normal(loc=5.0, scale=10.0, size=(1000, 3, 4, 4))
model.fit(x, x, epochs=4, verbose=0)
out = model.predict(x)
out -= np.reshape(keras.backend.eval(norm.beta), (1, 3, 1, 1))
out /= np.reshape(keras.backend.eval(norm.gamma), (1, 3, 1, 1))
np.testing.assert_allclose(np.mean(out, axis=(0, 2, 3)), 0.0, atol=1e-1)
np.testing.assert_allclose(np.std(out, axis=(0, 2, 3)), 1.0, atol=1e-1)
@keras_parameterized.run_all_keras_modes
def test_batchnorm_convnet_channel_last(self):
model = keras.models.Sequential()
norm = keras.layers.BatchNormalization(
axis=-1, input_shape=(4, 4, 3), momentum=0.8)
model.add(norm)
model.compile(
loss='mse',
optimizer=gradient_descent.GradientDescentOptimizer(0.01),
run_eagerly=testing_utils.should_run_eagerly())
# centered on 5.0, variance 10.0
x = np.random.normal(loc=5.0, scale=10.0, size=(1000, 4, 4, 3))
model.fit(x, x, epochs=4, verbose=0)
out = model.predict(x)
out -= np.reshape(keras.backend.eval(norm.beta), (1, 1, 1, 3))
out /= np.reshape(keras.backend.eval(norm.gamma), (1, 1, 1, 3))
np.testing.assert_allclose(np.mean(out, axis=(0, 1, 2)), 0.0, atol=1e-1)
np.testing.assert_allclose(np.std(out, axis=(0, 1, 2)), 1.0, atol=1e-1)
@keras_parameterized.run_all_keras_modes
def test_batchnorm_correctness(self):
_run_batchnorm_correctness_test(
normalization.BatchNormalization, dtype='float32')
_run_batchnorm_correctness_test(
normalization_v2.BatchNormalization, dtype='float32')
@keras_parameterized.run_all_keras_modes
def test_batchnorm_float16(self):
_run_batchnorm_correctness_test(
normalization.BatchNormalization, dtype='float16')
_run_batchnorm_correctness_test(
normalization_v2.BatchNormalization, dtype='float16')
@combinations.generate(combinations.combine(mode=['graph', 'eager']))
@testing_utils.enable_v2_dtype_behavior
def test_batchnorm_mixed_precision(self):
norm = keras.layers.BatchNormalization(
axis=-1,
input_shape=(4, 4, 3),
momentum=0.8,
dtype=policy.Policy('mixed_float16'))
x = np.random.normal(size=(10, 4, 4, 3))
y = norm(x)
self.assertEqual(y.dtype, 'float16')
self.assertEqual(norm.beta.dtype.base_dtype, 'float32')
self.assertEqual(norm.gamma.dtype.base_dtype, 'float32')
@combinations.generate(combinations.combine(mode=['graph', 'eager'],
fused=[True, False]))
@testing_utils.enable_v2_dtype_behavior
def test_batchnorm_mixed_precision_does_not_overflow(self, fused):
norm = keras.layers.BatchNormalization(
axis=-1,
input_shape=(1, 1, 1),
fused=fused,
dtype=policy.Policy('mixed_float16'))
x = np.array([-1000., 1000.]).reshape((2, 1, 1, 1))
y = norm(x, training=True)
expected_y = np.array([-1.0, 1.0]).reshape((2, 1, 1, 1))
self.assertAllClose(keras.backend.eval(y), expected_y)
@keras_parameterized.run_all_keras_modes(always_skip_v1=True)
def test_batchnorm_non_trainable_with_fit(self):
# We use the same data shape for all the data we use in this test.
# This will prevent any used tf.functions from retracing.
# This helps us verify that changing trainable and recompiling really
# does update the training loop, rather than a different data shape
# triggering a retrace.
data_shape = (100, 3)
inputs = keras.Input((3,))
bn = normalization_v2.BatchNormalization()
outputs = bn(inputs)
model = keras.Model(inputs, outputs)
model.compile(
'rmsprop',
'mse',
run_eagerly=testing_utils.should_run_eagerly())
model.fit(np.random.random(data_shape), np.random.random(data_shape))
test_data = np.random.random(data_shape)
test_targets = np.random.random(data_shape)
test_loss = model.evaluate(test_data, test_targets)
bn.trainable = False
model.compile(
'rmsprop',
'mse',
run_eagerly=testing_utils.should_run_eagerly())
train_loss = model.train_on_batch(test_data, test_targets)
self.assertAlmostEqual(test_loss, train_loss)
@keras_parameterized.run_all_keras_modes(always_skip_v1=True)
def test_eager_batchnorm_in_custom_model_call_with_tf_function(self):
class MyModel(keras.Model):
def __init__(self):
super(MyModel, self).__init__()
self.bn = keras.layers.BatchNormalization()
@def_function.function()
def call(self, x, training):
return self.bn(x, training=training)
model = MyModel()
for _ in range(10):
x = constant_op.constant(0.5, shape=[1, 1])
model(x, training=True)
# Make sure the moving mean and variance have been updated
self.assertAllClose(model.bn.moving_mean.numpy(), [0.047], atol=3e-3)
self.assertAllClose(model.bn.moving_variance.numpy(), [0.9], atol=3e-2)
class BatchNormalizationV1Test(keras_parameterized.TestCase):
@combinations.generate(combinations.combine(mode=['graph', 'eager']))
def test_v1_fused_attribute(self):
norm = normalization.BatchNormalization()
inp = keras.layers.Input((4, 4, 4))
norm(inp)
self.assertEqual(norm.fused, True)
norm = normalization.BatchNormalization(fused=False)
self.assertEqual(norm.fused, False)
inp = keras.layers.Input(shape=(4, 4, 4))
norm(inp)
self.assertEqual(norm.fused, False)
norm = normalization.BatchNormalization(virtual_batch_size=2)
self.assertEqual(norm.fused, True)
inp = keras.layers.Input(shape=(2, 2, 2))
norm(inp)
self.assertEqual(norm.fused, False)
class BatchNormalizationV2Test(keras_parameterized.TestCase):
@keras_parameterized.run_all_keras_modes
def test_basic_batchnorm_v2(self):
testing_utils.layer_test(
normalization_v2.BatchNormalization,
kwargs={'fused': True},
input_shape=(3, 3, 3, 3))
testing_utils.layer_test(
normalization_v2.BatchNormalization,
kwargs={'fused': None},
input_shape=(3, 3, 3))
@combinations.generate(combinations.combine(mode=['graph', 'eager']))
def test_v2_fused_attribute(self):
norm = normalization_v2.BatchNormalization()
self.assertEqual(norm.fused, None)
inp = keras.layers.Input(shape=(4, 4, 4))
norm(inp)
self.assertEqual(norm.fused, True)
norm = normalization_v2.BatchNormalization()
self.assertEqual(norm.fused, None)
inp = keras.layers.Input(shape=(4, 4))
norm(inp)
self.assertEqual(norm.fused, False)
norm = normalization_v2.BatchNormalization(virtual_batch_size=2)
self.assertEqual(norm.fused, False)
inp = keras.layers.Input(shape=(4, 4, 4))
norm(inp)
self.assertEqual(norm.fused, False)
norm = normalization_v2.BatchNormalization(fused=False)
self.assertEqual(norm.fused, False)
inp = keras.layers.Input(shape=(4, 4, 4))
norm(inp)
self.assertEqual(norm.fused, False)
norm = normalization_v2.BatchNormalization(fused=True, axis=[3])
self.assertEqual(norm.fused, True)
inp = keras.layers.Input(shape=(4, 4, 4))
norm(inp)
self.assertEqual(norm.fused, True)
with self.assertRaisesRegex(ValueError, 'fused.*renorm'):
normalization_v2.BatchNormalization(fused=True, renorm=True)
with self.assertRaisesRegex(ValueError, 'fused.*when axis is 1 or 3'):
normalization_v2.BatchNormalization(fused=True, axis=2)
with self.assertRaisesRegex(ValueError, 'fused.*when axis is 1 or 3'):
normalization_v2.BatchNormalization(fused=True, axis=[1, 3])
with self.assertRaisesRegex(ValueError, 'fused.*virtual_batch_size'):
normalization_v2.BatchNormalization(fused=True, virtual_batch_size=2)
with self.assertRaisesRegex(ValueError, 'fused.*adjustment'):
normalization_v2.BatchNormalization(fused=True,
adjustment=lambda _: (1, 0))
norm = normalization_v2.BatchNormalization(fused=True)
self.assertEqual(norm.fused, True)
inp = keras.layers.Input(shape=(4, 4))
with self.assertRaisesRegex(ValueError, '4D or 5D input tensors'):
norm(inp)
def test_updates_in_wrap_function(self):
def my_func():
layer = normalization.BatchNormalization()
x = array_ops.ones((10, 1))
y = layer(x, training=True)
# Updates should be tracked in a `wrap_function`.
self.assertLen(layer.updates, 2)
return y
wrapped_fn = wrap_function.wrap_function(my_func, [])
wrapped_fn()
@keras_parameterized.run_all_keras_modes
def test_basic_batchnorm_v2_none_shape_and_virtual_batch_size(self):
# Test case for GitHub issue for 32380
norm = normalization_v2.BatchNormalization(virtual_batch_size=8)
inp = keras.layers.Input(shape=(None, None, 3))
_ = norm(inp)
def _run_batchnorm_correctness_test(layer, dtype='float32', fused=False):
model = keras.models.Sequential()
model.add(keras.Input(shape=(2, 2, 2), dtype=dtype))
norm = layer(momentum=0.8, fused=fused)
model.add(norm)
if dtype == 'float16':
# Keras models require float32 losses.
model.add(keras.layers.Lambda(lambda x: keras.backend.cast(x, 'float32')))
model.compile(
loss='mse',
optimizer=gradient_descent.GradientDescentOptimizer(0.01),
run_eagerly=testing_utils.should_run_eagerly())
# centered on 5.0, variance 10.0
x = (np.random.normal(loc=5.0, scale=10.0, size=(1000, 2, 2, 2))
.astype(dtype))
model.fit(x, x, epochs=4, verbose=0)
out = model.predict(x)
out -= keras.backend.eval(norm.beta)
out /= keras.backend.eval(norm.gamma)
np.testing.assert_allclose(out.mean(), 0.0, atol=2e-1)
np.testing.assert_allclose(out.std(), 1.0, atol=2e-1)
@parameterized.parameters(
[normalization.BatchNormalization, normalization_v2.BatchNormalization])
class NormalizationLayersGraphModeOnlyTest(
test.TestCase, parameterized.TestCase):
def test_shared_batchnorm(self, layer):
"""Test that a BN layer can be shared across different data streams."""
with self.cached_session():
# Test single layer reuse
bn = layer()
x1 = keras.layers.Input(shape=(10,))
_ = bn(x1)
x2 = keras.layers.Input(shape=(10,))
y2 = bn(x2)
x = np.random.normal(loc=5.0, scale=10.0, size=(2, 10))
model = keras.models.Model(x2, y2)
model.compile(gradient_descent.GradientDescentOptimizer(0.01), 'mse')
model.train_on_batch(x, x)
# Test model-level reuse
x3 = keras.layers.Input(shape=(10,))
y3 = model(x3)
new_model = keras.models.Model(x3, y3, name='new_model')
new_model.compile(gradient_descent.GradientDescentOptimizer(0.01), 'mse')
new_model.train_on_batch(x, x)
def test_that_trainable_disables_updates(self, layer):
with self.cached_session():
val_a = np.random.random((10, 4))
val_out = np.random.random((10, 4))
a = keras.layers.Input(shape=(4,))
layer = layer(input_shape=(4,))
b = layer(a)
model = keras.models.Model(a, b)
model.trainable = False
model.compile(gradient_descent.GradientDescentOptimizer(0.01), 'mse')
x1 = model.predict(val_a)
model.train_on_batch(val_a, val_out)
x2 = model.predict(val_a)
self.assertAllClose(x1, x2, atol=1e-7)
model.trainable = True
model.compile(gradient_descent.GradientDescentOptimizer(0.01), 'mse')
model.train_on_batch(val_a, val_out)
x2 = model.predict(val_a)
assert np.abs(np.sum(x1 - x2)) > 1e-5
layer.trainable = False
model.compile(gradient_descent.GradientDescentOptimizer(0.01), 'mse')
x1 = model.predict(val_a)
model.train_on_batch(val_a, val_out)
x2 = model.predict(val_a)
self.assertAllClose(x1, x2, atol=1e-7)
def test_batchnorm_trainable(self, layer):
"""Tests that batchnorm layer is trainable when learning phase is enabled.
Computes mean and std for current inputs then
applies batch normalization using them.
Args:
layer: Either V1 or V2 of BatchNormalization layer.
"""
# TODO(fchollet): enable in all execution modes when issue with
# learning phase setting is resolved.
with ops.Graph().as_default(), self.cached_session():
bn_mean = 0.5
bn_std = 10.
val_a = np.expand_dims(np.arange(10.), axis=1)
def get_model(bn_mean, bn_std):
inp = keras.layers.Input(shape=(1,))
x = layer()(inp)
model1 = keras.models.Model(inp, x)
model1.set_weights([
np.array([1.]),
np.array([0.]),
np.array([bn_mean]),
np.array([bn_std**2])
])
return model1
# Simulates training-mode with trainable layer.
# Should use mini-batch statistics.
with keras.backend.learning_phase_scope(1):
model = get_model(bn_mean, bn_std)
model.compile(loss='mse', optimizer='rmsprop')
out = model.predict(val_a)
self.assertAllClose(
(val_a - np.mean(val_a)) / np.std(val_a), out, atol=1e-3)
def _run_layernorm_correctness_test(layer, dtype='float32'):
model = keras.models.Sequential()
model.add(keras.layers.Lambda(lambda x: math_ops.cast(x, dtype='float16')))
norm = layer(input_shape=(2, 2, 2), dtype=dtype)
model.add(norm)
model.compile(
loss='mse',
optimizer=gradient_descent.GradientDescentOptimizer(0.01),
run_eagerly=testing_utils.should_run_eagerly())
# centered on 5.0, variance 10.0
x = (np.random.normal(loc=5.0, scale=10.0, size=(1000, 2, 2, 2))
.astype(dtype))
model.fit(x, x, epochs=4, verbose=0)
out = model.predict(x)
out -= keras.backend.eval(norm.beta)
out /= keras.backend.eval(norm.gamma)
np.testing.assert_allclose(out.mean(), 0.0, atol=1e-1)
np.testing.assert_allclose(out.std(), 1.0, atol=1e-1)
class LayerNormalizationTest(keras_parameterized.TestCase):
@keras_parameterized.run_all_keras_modes
def test_basic_layernorm(self):
testing_utils.layer_test(
keras.layers.LayerNormalization,
kwargs={
'gamma_regularizer': keras.regularizers.l2(0.01),
'beta_regularizer': keras.regularizers.l2(0.01)
},
input_shape=(3, 4, 2))
testing_utils.layer_test(
keras.layers.LayerNormalization,
kwargs={
'gamma_initializer': 'ones',
'beta_initializer': 'ones',
},
input_shape=(3, 4, 2))
testing_utils.layer_test(
keras.layers.LayerNormalization,
kwargs={'scale': False,
'center': False},
input_shape=(3, 3))
testing_utils.layer_test(
keras.layers.LayerNormalization,
kwargs={'axis': (-3, -2, -1)},
input_shape=(2, 8, 8, 3))
@keras_parameterized.run_all_keras_modes
def test_non_fused_layernorm(self):
testing_utils.layer_test(
keras.layers.LayerNormalization,
kwargs={'axis': -2},
input_shape=(3, 4, 2))
testing_utils.layer_test(
keras.layers.LayerNormalization,
kwargs={'axis': (-3, -2)},
input_shape=(2, 8, 8, 3))
testing_utils.layer_test(
keras.layers.LayerNormalization,
kwargs={'axis': (-3, -1)},
input_shape=(2, 8, 8, 3))<|fim▁hole|> layer = keras.layers.LayerNormalization(scale=False, center=False)
layer.build((None, 3, 4))
self.assertEqual(len(layer.trainable_weights), 0)
self.assertEqual(len(layer.weights), 0)
layer = keras.layers.LayerNormalization()
layer.build((None, 3, 4))
self.assertEqual(len(layer.trainable_weights), 2)
self.assertEqual(len(layer.weights), 2)
@combinations.generate(combinations.combine(mode=['graph', 'eager']))
def test_layernorm_regularization(self):
layer = keras.layers.LayerNormalization(
gamma_regularizer='l1', beta_regularizer='l1')
layer.build((None, 3, 4))
self.assertEqual(len(layer.losses), 2)
max_norm = keras.constraints.max_norm
layer = keras.layers.LayerNormalization(
gamma_constraint=max_norm, beta_constraint=max_norm)
layer.build((None, 3, 4))
self.assertEqual(layer.gamma.constraint, max_norm)
self.assertEqual(layer.beta.constraint, max_norm)
@keras_parameterized.run_all_keras_modes
def test_layernorm_convnet_channel_last(self):
model = keras.models.Sequential()
norm = keras.layers.LayerNormalization(input_shape=(4, 4, 3))
model.add(norm)
model.compile(
loss='mse',
optimizer=gradient_descent.GradientDescentOptimizer(0.01),
run_eagerly=testing_utils.should_run_eagerly())
# centered on 5.0, variance 10.0
x = np.random.normal(loc=5.0, scale=10.0, size=(1000, 4, 4, 3))
model.fit(x, x, epochs=4, verbose=0)
out = model.predict(x)
out -= np.reshape(keras.backend.eval(norm.beta), (1, 1, 1, 3))
out /= np.reshape(keras.backend.eval(norm.gamma), (1, 1, 1, 3))
np.testing.assert_allclose(np.mean(out, axis=(0, 1, 2)), 0.0, atol=1e-1)
np.testing.assert_allclose(np.std(out, axis=(0, 1, 2)), 1.0, atol=1e-1)
@keras_parameterized.run_all_keras_modes
def test_layernorm_correctness(self):
_run_layernorm_correctness_test(
normalization.LayerNormalization, dtype='float32')
@keras_parameterized.run_all_keras_modes
def test_layernorm_mixed_precision(self):
_run_layernorm_correctness_test(
normalization.LayerNormalization, dtype='float16')
@combinations.generate(combinations.combine(mode=['graph', 'eager']))
def testIncorrectAxisType(self):
with self.assertRaisesRegex(TypeError,
r'Expected an int or a list/tuple of ints'):
_ = normalization.LayerNormalization(axis={'axis': -1})
@combinations.generate(combinations.combine(mode=['graph', 'eager']))
def testInvalidAxis(self):
with self.assertRaisesRegex(ValueError, r'Invalid axis: 3'):
layer_norm = normalization.LayerNormalization(axis=3)
layer_norm.build(input_shape=(2, 2, 2))
@combinations.generate(combinations.combine(mode=['graph', 'eager']))
def testDuplicateAxis(self):
with self.assertRaisesRegex(ValueError, r'Duplicate axis:'):
layer_norm = normalization.LayerNormalization(axis=[-1, -1])
layer_norm.build(input_shape=(2, 2, 2))
@combinations.generate(combinations.combine(mode=['graph', 'eager']))
def testFusedAttr(self):
layer_norm = normalization.LayerNormalization(axis=[-2, -1])
layer_norm.build(input_shape=(2, 2, 2))
self.assertEqual(layer_norm._fused, True)
class LayerNormalizationNumericsTest(keras_parameterized.TestCase):
"""Tests LayerNormalization has correct and numerically stable outputs."""
def _expected_layer_norm(self, x, beta, gamma, batch_input_shape, axis,
epsilon):
"""Returns the layer norm, which is computed using NumPy."""
broadcast_shape = [batch_input_shape[i] if i in axis else 1
for i in range(len(batch_input_shape))]
mean = np.mean(x, axis=axis, keepdims=True)
var = np.var(x, axis=axis, keepdims=True)
expected = (x - mean) / np.sqrt(var + epsilon)
expected *= np.reshape(gamma, broadcast_shape)
expected += np.reshape(beta, broadcast_shape)
return expected
def _test_forward_pass(self, batch_input_shape, axis, fp64_tol=1e-14,
fp32_tol=1e-6, fp16_tol=1e-2):
"""Tests the forward pass of layer normalization.
Args:
batch_input_shape: The input shape that will be used to test, including
the batch dimension.
axis: A list of axises to normalize. Will be passed to the `axis` argument
of LayerNormalization.
fp64_tol: The relative and absolute tolerance for float64.
fp32_tol: The relative and absolute tolerance for float32.
fp16_tol: The relative and absolute tolerance for float16.
"""
param_shape = [batch_input_shape[i] for i in axis]
param_elems = 1
for dim in param_shape:
param_elems *= dim
beta = np.arange(param_elems, dtype='float64').reshape(param_shape)
gamma = np.arange(1, param_elems + 1, dtype='float64').reshape(param_shape)
x = np.random.normal(size=batch_input_shape)
for epsilon in 1e-12, 1e-3:
expected = self._expected_layer_norm(x, beta, gamma, batch_input_shape,
axis, epsilon)
for dtype in 'float64', 'float32', 'float16':
norm = normalization.LayerNormalization(
axis=axis, dtype=dtype, batch_input_shape=batch_input_shape,
epsilon=epsilon, beta_initializer=keras.initializers.constant(beta),
gamma_initializer=keras.initializers.constant(gamma))
y = norm(keras.backend.cast(x, dtype))
actual = keras.backend.eval(y)
if dtype == 'float64':
tol = fp64_tol
elif dtype == 'float32':
tol = fp32_tol
else:
assert dtype == 'float16'
tol = fp16_tol
# We use absolute tolerances in addition to relative tolerances, because
# some of the values are very close to zero.
self.assertAllClose(expected, actual, rtol=tol, atol=tol)
@combinations.generate(combinations.combine(mode=['graph', 'eager']))
def test_forward(self):
# For numeric stability, we ensure the axis's dimension(s) have at least 4
# elements.
self._test_forward_pass((4, 3), (0,))
self._test_forward_pass((3, 4), (1,))
self._test_forward_pass((4, 3, 2), (0,))
self._test_forward_pass((2, 4, 2), (1,))
self._test_forward_pass((2, 3, 4), (2,), fp16_tol=5e-2)
self._test_forward_pass((2, 3, 2), (0, 2))
self._test_forward_pass((2, 2, 2, 2), (1, 3))
self._test_forward_pass((2, 2, 2, 2), (2, 3))
self._test_forward_pass((2, 3, 4, 5), (3,))
def _test_backward_pass(self, batch_input_shape, axis, fp64_tol=1e-5,
fp32_tol=1e-5, fp16_tol=2e-2):
"""Tests the backwards pass of layer normalization.
Args:
batch_input_shape: The input shape that will be used to test, including
the batch dimension.
axis: A list of axises to normalize. Will be passed to the `axis` argument
of LayerNormalization.
fp64_tol: The relative and absolute tolerance for float64.
fp32_tol: The relative and absolute tolerance for float32.
fp16_tol: The relative and absolute tolerance for float16.
"""
param_shape = [batch_input_shape[i] for i in axis]
param_elems = 1
for dim in param_shape:
param_elems *= dim
beta = np.arange(param_elems, dtype='float64').reshape(param_shape)
gamma = np.arange(1, param_elems + 1, dtype='float64').reshape(param_shape)
x = np.random.normal(size=batch_input_shape)
for epsilon in 1e-12, 1e-3:
# Float64 must come first in this list, as we use the float64 numerical
# gradients to compare to the float32 and float16 symbolic gradients as
# well. Computing float32/float16 numerical gradients is too numerically
# unstable.
for dtype in 'float64', 'float32', 'float16':
norm = normalization.LayerNormalization(
axis=axis, dtype=dtype, batch_input_shape=batch_input_shape,
epsilon=epsilon, beta_initializer=keras.initializers.constant(beta),
gamma_initializer=keras.initializers.constant(gamma))
norm.build(x.shape)
# pylint: disable=cell-var-from-loop
def forward_fn(x, beta, gamma):
# We must monkey-patch the attributes of `norm` with the function
# arguments, so that the gradient checker will properly compute their
# gradients. The gradient checker computes gradients with respect to
# the input arguments of `f`.
with test.mock.patch.object(norm, 'beta', beta):
with test.mock.patch.object(norm, 'gamma', gamma):
return norm(x)
# pylint: enable=cell-var-from-loop
results = gradient_checker_v2.compute_gradient(
forward_fn, [keras.backend.cast(x, dtype), norm.beta, norm.gamma])
([x_grad_t, beta_grad_t, gamma_grad_t],
[x_grad_n, beta_grad_n, gamma_grad_n]) = results
if dtype == 'float64':
# We use the float64 numeric gradients as the reference, to compare
# against the symbolic gradients for all dtypes.
x_grad_ref = x_grad_n
beta_grad_ref = beta_grad_n
gamma_grad_ref = gamma_grad_n
tol = fp64_tol
elif dtype == 'float32':
tol = fp32_tol
else:
assert dtype == 'float16'
tol = fp16_tol
# We use absolute tolerances in addition to relative tolerances, because
# some of the values are very close to zero.
self.assertAllClose(x_grad_t, x_grad_ref, rtol=tol, atol=tol)
self.assertAllClose(beta_grad_t, beta_grad_ref, rtol=tol, atol=tol)
self.assertAllClose(gamma_grad_t, gamma_grad_ref, rtol=tol, atol=tol)
# The gradient_checker_v2 does not work properly with LayerNorm in graph mode.
@testing_utils.run_v2_only
def test_backward(self):
# For numeric stability, we ensure the axis's dimension(s) have at least 4
# elements.
self._test_backward_pass((4, 3), (0,))
self._test_backward_pass((2, 4, 2), (1,))
self._test_backward_pass((2, 3, 4), (2,))
self._test_backward_pass((2, 3, 2), (0, 2), fp64_tol=5e-4, fp32_tol=5e-4)
self._test_backward_pass((2, 2, 2, 2), (1, 3))
self._test_backward_pass((2, 2, 2, 2), (2, 3))
if __name__ == '__main__':
test.main()<|fim▁end|> |
@combinations.generate(combinations.combine(mode=['graph', 'eager']))
def test_layernorm_weights(self): |
<|file_name|>MiningDurationRoleBonus.java<|end_file_name|><|fim▁begin|>package fr.guiguilechat.jcelechat.model.sde.attributes;
import fr.guiguilechat.jcelechat.model.sde.IntAttribute;
/**
*
*/
public class MiningDurationRoleBonus
extends IntAttribute
{
public static final MiningDurationRoleBonus INSTANCE = new MiningDurationRoleBonus();
@Override
public int getId() {
return 2458;
}
@Override
public int getCatId() {
return 7;
}
@Override
public boolean getHighIsGood() {
return false;
}
@Override
public double getDefaultValue() {
return 0.0;
}
<|fim▁hole|> return true;
}
@Override
public boolean getStackable() {
return true;
}
@Override
public String toString() {
return "MiningDurationRoleBonus";
}
}<|fim▁end|> | @Override
public boolean getPublished() { |
<|file_name|>const.py<|end_file_name|><|fim▁begin|>"""Constants for the Vilfo Router integration."""
from __future__ import annotations
from dataclasses import dataclass
from homeassistant.components.sensor import SensorEntityDescription
from homeassistant.const import DEVICE_CLASS_TIMESTAMP, PERCENTAGE
DOMAIN = "vilfo"
ATTR_API_DATA_FIELD_LOAD = "load"
ATTR_API_DATA_FIELD_BOOT_TIME = "boot_time"
ATTR_LOAD = "load"<|fim▁hole|>ATTR_BOOT_TIME = "boot_time"
ROUTER_DEFAULT_HOST = "admin.vilfo.com"
ROUTER_DEFAULT_MODEL = "Vilfo Router"
ROUTER_DEFAULT_NAME = "Vilfo Router"
ROUTER_MANUFACTURER = "Vilfo AB"
@dataclass
class VilfoRequiredKeysMixin:
"""Mixin for required keys."""
api_key: str
@dataclass
class VilfoSensorEntityDescription(SensorEntityDescription, VilfoRequiredKeysMixin):
"""Describes Vilfo sensor entity."""
SENSOR_TYPES: tuple[VilfoSensorEntityDescription, ...] = (
VilfoSensorEntityDescription(
key=ATTR_LOAD,
name="Load",
native_unit_of_measurement=PERCENTAGE,
icon="mdi:memory",
api_key=ATTR_API_DATA_FIELD_LOAD,
),
VilfoSensorEntityDescription(
key=ATTR_BOOT_TIME,
name="Boot time",
icon="mdi:timer-outline",
api_key=ATTR_API_DATA_FIELD_BOOT_TIME,
device_class=DEVICE_CLASS_TIMESTAMP,
),
)<|fim▁end|> | |
<|file_name|>azure_hl.rs<|end_file_name|><|fim▁begin|>// This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at http://mozilla.org/MPL/2.0/.
//! High-level bindings to Azure.
pub use AzColor as Color;
pub use azure::AzGradientStop as GradientStop;
use azure::{AZ_FILTER_TYPE_COLOR_MATRIX};
use azure::{AZ_FILTER_TYPE_FLOOD, AZ_FILTER_TYPE_GAUSSIAN_BLUR, AZ_FILTER_TYPE_LINEAR_TRANSFER};
use azure::{AZ_FILTER_TYPE_TABLE_TRANSFER, AZ_IN_COLOR_MATRIX_IN, AZ_IN_COMPOSITE_IN};
use azure::{AZ_IN_FLOOD_IN, AZ_IN_GAUSSIAN_BLUR_IN, AZ_IN_LINEAR_TRANSFER_IN};
use azure::{AZ_ATT_COLOR_MATRIX_MATRIX, AZ_ATT_FLOOD_COLOR, AZ_ATT_GAUSSIAN_BLUR_STD_DEVIATION};
use azure::{AZ_FILTER_TYPE_COMPOSITE, AZ_IN_TABLE_TRANSFER_IN, AZ_ATT_LINEAR_TRANSFER_SLOPE_R};
use azure::{AZ_ATT_LINEAR_TRANSFER_SLOPE_G, AZ_ATT_LINEAR_TRANSFER_SLOPE_B};
use azure::{AZ_ATT_LINEAR_TRANSFER_SLOPE_A, AZ_ATT_LINEAR_TRANSFER_INTERCEPT_R};
use azure::{AZ_ATT_LINEAR_TRANSFER_INTERCEPT_G, AZ_ATT_LINEAR_TRANSFER_INTERCEPT_B};
use azure::{AZ_ATT_LINEAR_TRANSFER_INTERCEPT_A, AZ_ATT_TABLE_TRANSFER_TABLE_R};
use azure::{AZ_ATT_TABLE_TRANSFER_TABLE_G, AZ_ATT_TABLE_TRANSFER_TABLE_B};
use azure::{AZ_ATT_TABLE_TRANSFER_TABLE_A, AZ_ATT_TRANSFER_DISABLE_R};
use azure::{AZ_ATT_TRANSFER_DISABLE_G, AZ_ATT_TRANSFER_DISABLE_B};
use azure::{AZ_ATT_TRANSFER_DISABLE_A};
use azure::{AzPoint, AzRect, AzIntRect, AzFloat, AzIntSize, AzColor, AzColorPatternRef, AzGradientStopsRef};
use azure::{AzStrokeOptions, AzDrawOptions, AzSurfaceFormat, AzIntPoint, AzFilter, AzDrawSurfaceOptions};
use azure::{AzBackendType, AzDrawTargetRef, AzSourceSurfaceRef, AzDataSourceSurfaceRef};
use azure::{AzScaledFontRef, AzGlyphRenderingOptionsRef, AzExtendMode};
use azure::{AzCompositionOp, AzAntialiasMode, AzJoinStyle, AzCapStyle};
use azure::{struct__AzGlyphBuffer};
use azure::{struct__AzDrawOptions, struct__AzIntRect, struct__AzDrawSurfaceOptions, struct__AzIntSize};
use azure::{struct__AzPoint, struct__AzRect, struct__AzStrokeOptions, struct__AzIntPoint, struct__AzMatrix5x4};
use azure::{AzCreateColorPattern, AzCreateDrawTarget, AzCreateDrawTargetForData};
use azure::{AzDataSourceSurfaceGetData, AzDataSourceSurfaceGetStride};
use azure::{AzDrawTargetClearRect};
use azure::{AzDrawTargetCreateSourceSurfaceFromData};
use azure::{AzDrawTargetDrawSurface, AzDrawTargetCopySurface, AzDrawTargetFillRect, AzDrawTargetFlush};
use azure::{AzDrawTargetGetSize, AzDrawTargetGetSnapshot, AzDrawTargetSetTransform};
use azure::{AzDrawTargetStrokeLine, AzDrawTargetStrokeRect, AzDrawTargetFillGlyphs};
use azure::{AzDrawTargetCreateGradientStops, AzDrawTargetGetFormat};
use azure::{AzReleaseDrawTarget, AzReleasePattern, AzReleaseGradientStops};
use azure::{AzReleaseSourceSurface, AzRetainDrawTarget, AzRetainSourceSurface};
use azure::{AzSourceSurfaceGetDataSurface, AzSourceSurfaceGetFormat};
use azure::{AzSourceSurfaceGetSize, AzCreateDrawTargetSkiaWithGrContextAndFBO};
use azure::{AzCreatePathBuilder, AzPathBuilderRef, AzPathBuilderMoveTo, AzPathBuilderLineTo};
use azure::{AzDrawTargetStroke, AzPathBuilderArc, AzPathBuilderFinish, AzReleasePathBuilder};
use azure::{AzDrawTargetFill, AzPathRef, AzReleasePath, AzDrawTargetPushClip, AzDrawTargetPopClip};
use azure::{AzLinearGradientPatternRef, AzRadialGradientPatternRef, AzSurfacePatternRef, AzMatrix, AzPatternRef};
use azure::{AzCreateLinearGradientPattern, AzCreateRadialGradientPattern, AzCreateSurfacePattern, AzDrawTargetPushClipRect};
use azure::{AzCloneLinearGradientPattern, AzCloneRadialGradientPattern, AzCloneSurfacePattern, AzSurfacePatternGetSize};
use azure::{AzDrawTargetDrawSurfaceWithShadow, AzDrawTargetCreateShadowDrawTarget};
use azure::{AzDrawTargetCreateSimilarDrawTarget, AzDrawTargetGetTransform};
use azure::{AzFilterNodeSetSourceSurfaceInput, AzReleaseFilterNode, AzDrawTargetCreateFilter};
use azure::{AzFilterNodeSetColorAttribute, AzFilterNodeSetFloatAttribute};
use azure::{AzFilterNodeSetMatrix5x4Attribute, AzFilterNodeSetFilterNodeInput};
use azure::{AzFilterNodeSetFloatArrayAttribute, AzFilterNodeSetBoolAttribute};
use azure::{AzDrawTargetDrawFilter, AzFilterNodeRef, AzFilterType};
use azure::{AzPathBuilderBezierTo, AzPathBuilderQuadraticBezierTo};
use azure::{AzPathBuilderCurrentPoint, AzPathBuilderClose};
use azure::{AzPathContainsPoint, AzPathCopyToBuilder};
use euclid::Transform2D;
use euclid::Point2D;
use euclid::Vector2D;
use euclid::Rect;
use euclid::Size2D;
use libc::size_t;
use skia::gl_rasterization_context::GLRasterizationContext;
use std::mem;
use std::ptr;
use std::slice;
use std::sync::Arc;
impl Color {
#[inline]
pub fn new(r: AzFloat, g: AzFloat, b: AzFloat, a: AzFloat) -> AzColor {
AzColor { r: r, g: g, b: b, a: a }
}
#[inline]
pub fn rgb(r: u8, g: u8, b: u8) -> AzColor {
AzColor {
r: (r as AzFloat) / (255.0 as AzFloat),
g: (g as AzFloat) / (255.0 as AzFloat),
b: (b as AzFloat) / (255.0 as AzFloat),
a: 1.0 as AzFloat
}
}
#[inline]
pub fn rgba(r: AzFloat, g: AzFloat, b: AzFloat, a: AzFloat) -> AzColor {
AzColor { r: r, g: g, b: b, a: a }
}
#[inline]
pub fn black() -> AzColor {
AzColor { r: 0.0, g: 0.0, b: 0.0, a: 1.0 }
}
#[inline]
pub fn transparent() -> AzColor {
AzColor { r: 0.0, g: 0.0, b: 0.0, a: 0.0 }
}
#[inline]
pub fn white() -> AzColor {
AzColor { r: 1.0, g: 1.0, b: 1.0, a: 1.0 }
}
}
pub trait AsAzureRect {
fn as_azure_rect(&self) -> AzRect;
}
impl AsAzureRect for Rect<AzFloat> {
fn as_azure_rect(&self) -> AzRect {
struct__AzRect {
x: self.origin.x,
y: self.origin.y,
width: self.size.width,
height: self.size.height
}
}
}
pub trait AsAzureIntRect {
fn as_azure_int_rect(&self) -> AzIntRect;
}
impl AsAzureIntRect for Rect<i32> {
fn as_azure_int_rect(&self) -> AzIntRect {
struct__AzIntRect {
x: self.origin.x,
y: self.origin.y,
width: self.size.width,
height: self.size.height
}
}
}
pub trait AsAzureIntSize {
fn as_azure_int_size(&self) -> AzIntSize;
}
impl AsAzureIntSize for Size2D<i32> {
fn as_azure_int_size(&self) -> AzIntSize {
struct__AzIntSize {
width: self.width,
height: self.height
}
}
}
pub trait AsAzurePoint {
fn as_azure_point(&self) -> AzPoint;
}
impl AsAzurePoint for Point2D<AzFloat> {
fn as_azure_point(&self) -> AzPoint {
struct__AzPoint {
x: self.x,
y: self.y
}
}
}
// Azure does not have a separation between points and vectors, AzPoint is used
// to represent both
impl AsAzurePoint for Vector2D<AzFloat> {
#[inline]
fn as_azure_point(&self) -> AzPoint {
struct__AzPoint {
x: self.x,
y: self.y
}
}
}
pub trait AsAzureIntPoint {
fn as_azure_int_point(&self) -> AzIntPoint;
}
impl AsAzureIntPoint for Point2D<i32> {
fn as_azure_int_point(&self) -> AzIntPoint {
struct__AzIntPoint {
x: self.x,
y: self.y
}
}
}
// FIXME: Should have a class hierarchy here starting with Pattern.
#[derive(Debug)]
pub struct ColorPattern {
color: Color,
pub azure_color_pattern: AzColorPatternRef,
}
impl Drop for ColorPattern {
fn drop(&mut self) {
unsafe {
AzReleasePattern(self.azure_color_pattern);
}
}
}
impl Clone for ColorPattern {
fn clone(&self) -> ColorPattern {
ColorPattern::new(self.color.clone())
}
}
impl ColorPattern {
pub fn new(color: Color) -> ColorPattern {
unsafe {
ColorPattern {
color: color,
azure_color_pattern: AzCreateColorPattern(&mut color.clone())
}
}
}
}
#[repr(u8)]
#[derive(Clone, Copy, PartialEq, Debug)]
pub enum CompositionOp {
Over,
Add,
Atop,
Out,
In,
Source,
DestIn,
DestOut,
DestOver,
DestAtop,
Xor,
Multiply,
Screen,
Overlay,
Darken,
Lighten,
ColorDodge,
ColorBurn,
HardLight,
SoftLight,
Difference,
Exclusion,
Hue,
Saturation,
Color,
Luminosity,
Count,
}
impl CompositionOp {
fn as_azure_composition_op(self) -> AzCompositionOp {
self as AzCompositionOp
}
}
#[repr(u8)]
#[derive(Clone, Copy, PartialEq, Debug)]
pub enum AntialiasMode {
None = 0,
Gray = 1,
Subpixel = 2,
Default = 3,
}
impl AntialiasMode {
fn as_azure_antialias_mode(self) -> AzAntialiasMode {
self as AzAntialiasMode
}
}
#[repr(u8)]
#[derive(Clone, Copy, PartialEq, Debug)]
pub enum JoinStyle {
Bevel = 0,
Round = 1,
Miter = 2,
MiterOrBevel = 3,
}
impl JoinStyle {
fn as_azure_join_style(self) -> AzJoinStyle {
self as AzJoinStyle
}
}
#[repr(u8)]
#[derive(Clone, Copy, PartialEq, Debug)]
pub enum CapStyle {
Butt = 0,
Round = 1,
Square = 2,
}
impl CapStyle {
fn as_azure_cap_style(self) -> AzCapStyle {
self as AzCapStyle
}
}
#[allow(non_snake_case)]
#[derive(Clone, Debug)]
pub struct StrokeOptions<'a> {
pub line_width: AzFloat,
pub miter_limit: AzFloat,
pub mDashPattern: &'a[AzFloat],
pub line_join: JoinStyle,
pub line_cap: CapStyle,
}
impl<'a> StrokeOptions<'a> {
pub fn new(line_width: AzFloat, line_join: JoinStyle, line_cap: CapStyle, miter_limit: AzFloat,
dash_pattern: &'a[AzFloat]) -> StrokeOptions {
StrokeOptions {
line_width: line_width,
miter_limit: miter_limit,
mDashPattern: dash_pattern,
line_join: line_join,
line_cap: line_cap,
}
}
fn as_azure_stroke_options(&self) -> AzStrokeOptions {
struct__AzStrokeOptions {
mLineWidth: self.line_width,
mMiterLimit: self.miter_limit,
mDashPattern: self.mDashPattern.as_ptr(),
mDashLength: self.mDashPattern.len() as size_t,
mDashOffset: 0.0 as AzFloat,
mLineJoin: self.line_join.as_azure_join_style(),
mLineCap: self.line_cap.as_azure_cap_style(),
}
}
}
#[derive(Clone, Debug)]
pub struct DrawOptions {
pub alpha: AzFloat,
pub composition: CompositionOp,
pub antialias: AntialiasMode,
}
impl DrawOptions {
pub fn default() -> DrawOptions {
DrawOptions {
alpha: 1.0,
composition: CompositionOp::Over,
antialias: AntialiasMode::Default,
}
}
pub fn new(alpha: AzFloat, composition: CompositionOp, antialias: AntialiasMode) -> DrawOptions {
DrawOptions {
alpha: alpha,
composition: composition,
antialias: antialias,
}
}
fn as_azure_draw_options(&self) -> AzDrawOptions {
struct__AzDrawOptions {
mAlpha: self.alpha,
mCompositionOp: self.composition.as_azure_composition_op(),
mAntialiasMode: self.antialias.as_azure_antialias_mode(),
}
}
pub fn set_composition_op(&mut self, style: CompositionOp) {
self.composition = style;
}
pub fn set_antialias_mode(&mut self, style: AntialiasMode) {
self.antialias = style;
}
}
#[derive(Debug)]
pub enum SurfaceFormat {
B8G8R8A8,
B8G8R8X8,
R8G8B8A8,
R8G8B8X8,
R5G6B5,
A8,
YUV,
UNKNOWN
}
impl SurfaceFormat {
fn as_azure_surface_format(self) -> AzSurfaceFormat {
self as AzSurfaceFormat
}
pub fn new(azure_surface_format: AzSurfaceFormat) -> SurfaceFormat {
match azure_surface_format {
0 => SurfaceFormat::B8G8R8A8,
1 => SurfaceFormat::B8G8R8X8,
2 => SurfaceFormat::R8G8B8A8,
3 => SurfaceFormat::R8G8B8X8,
4 => SurfaceFormat::R5G6B5,
5 => SurfaceFormat::A8,
6 => SurfaceFormat::YUV,
7 => SurfaceFormat::UNKNOWN,
_ => panic!("SurfaceFormat::new(): unknown Azure surface format")
}
}
}
#[derive(Copy, Clone, Debug)]
pub enum Filter {
Good,
Linear,
Point
}
impl Filter {
pub fn as_azure_filter(self) -> AzFilter {
self as AzFilter
}
}
#[derive(Debug)]
pub struct DrawSurfaceOptions {
pub filter: Filter,
pub sampling_bounds: bool,
}
impl DrawSurfaceOptions {
pub fn new(filter: Filter, sampling_bounds: bool) -> DrawSurfaceOptions {
DrawSurfaceOptions {
filter: filter,
sampling_bounds: sampling_bounds,
}
}
fn as_azure_draw_surface_options(&self) -> AzDrawSurfaceOptions {
struct__AzDrawSurfaceOptions {
filter: self.filter as i8,
sampling_bounds: self.sampling_bounds as i8,
}
}
}
#[derive(Debug, Clone, PartialEq)]
pub enum BackendType {
None,
Direct2D,
CoreGraphics,
CoreGraphicsAccelerated,
Cairo,
Skia,
Recording,
Direct2D11,
NVPathRendering,
}
impl BackendType {
pub fn as_azure_backend_type(self) -> AzBackendType {
match self {
BackendType::None => 0,
BackendType::Direct2D => 1,
BackendType::CoreGraphics => 2,
BackendType::CoreGraphicsAccelerated => 3,
BackendType::Cairo => 4,
BackendType::Skia => 5,
BackendType::Recording => 6,
BackendType::Direct2D11 => 7,
BackendType::NVPathRendering => 8,
}
}
}
pub struct DrawTarget {
pub azure_draw_target: AzDrawTargetRef,
pub backing: DrawTargetBacking,
}
impl Drop for DrawTarget {
fn drop(&mut self) {
unsafe {
AzReleaseDrawTarget(self.azure_draw_target);
}
}
}
impl PartialEq for DrawTarget {
#[inline]
fn eq(&self, other: &DrawTarget) -> bool {
self.azure_draw_target == other.azure_draw_target
}
}
impl DrawTarget {
pub fn new(backend: BackendType, size: Size2D<i32>, format: SurfaceFormat)
-> DrawTarget {
let azure_draw_target = unsafe {
AzCreateDrawTarget(backend.as_azure_backend_type(),
&mut size.as_azure_int_size(),
format.as_azure_surface_format())
};
if azure_draw_target.is_null() {
panic!("null azure draw target");
}
DrawTarget {
azure_draw_target: azure_draw_target,
backing: DrawTargetBacking::Empty,
}
}
pub fn new_with_data(backend: BackendType,
mut data: Vec<u8>,
offset: usize,
size: Size2D<i32>,
stride: i32,
format: SurfaceFormat) -> DrawTarget {
assert!((data.len() - offset) as i32 >= stride * size.height);
let azure_draw_target = unsafe {
AzCreateDrawTargetForData(backend.as_azure_backend_type(),
data.as_mut_ptr().offset(offset as isize),
&mut size.as_azure_int_size(),
stride,
format.as_azure_surface_format())
};
if azure_draw_target.is_null() {
panic!("null azure draw target");
}
DrawTarget {
azure_draw_target: azure_draw_target,
backing: DrawTargetBacking::Data(Arc::new(data)),
}
}
pub fn new_with_gl_rasterization_context(gl_rasterization_context: Arc<GLRasterizationContext>,
format: SurfaceFormat)
-> DrawTarget {
let mut size = gl_rasterization_context.size.as_azure_int_size();
let azure_draw_target = unsafe {
AzCreateDrawTargetSkiaWithGrContextAndFBO(gl_rasterization_context.gl_context.gr_context,
gl_rasterization_context.framebuffer_id,
&mut size,
format.as_azure_surface_format())
};
if azure_draw_target.is_null() {
panic!("Failed to create GL rasterizing AzureDrawTarget");
}
DrawTarget {
azure_draw_target: azure_draw_target,
backing: DrawTargetBacking::GLRasterizationContext(gl_rasterization_context)
}
}
pub fn clone(&self) -> DrawTarget {
unsafe {
AzRetainDrawTarget(self.azure_draw_target);
}
DrawTarget {
azure_draw_target: self.azure_draw_target,
backing: self.backing.clone(),
}
}
pub fn make_current(&self) {
self.backing.make_current();
}
pub fn finish(self) {
self.backing.make_current();
self.backing.finish();
}
pub fn get_size(&self) -> AzIntSize {
unsafe {
AzDrawTargetGetSize(self.azure_draw_target)
}
}
pub fn get_format(&self) -> SurfaceFormat {
unsafe {
SurfaceFormat::new(AzDrawTargetGetFormat(self.azure_draw_target))
}
}
pub fn get_transform(&self) -> Transform2D<AzFloat> {
unsafe {
let mut result: AzMatrix = mem::uninitialized();
AzDrawTargetGetTransform(self.azure_draw_target, &mut result);
result.as_matrix_2d()
}
}
pub fn flush(&self) {
unsafe {
AzDrawTargetFlush(self.azure_draw_target);
}
self.backing.flush();
}
pub fn clear_rect(&self, rect: &Rect<AzFloat>) {
unsafe {
AzDrawTargetClearRect(self.azure_draw_target, &mut rect.as_azure_rect());
}
}
pub fn fill(&self,
path: &Path,
pattern: PatternRef,
draw_options: &DrawOptions) {
unsafe {
AzDrawTargetFill(self.azure_draw_target,
path.azure_path,
pattern.as_azure_pattern(),
&mut draw_options.as_azure_draw_options());
}
}
pub fn fill_rect(&self,
rect: &Rect<AzFloat>,
pattern: PatternRef,
draw_options: Option<&DrawOptions>) {
let mut draw_options = draw_options.map(|draw_options| {
draw_options.as_azure_draw_options()
});
let draw_options = match draw_options {
None => ptr::null_mut(),
Some(ref mut draw_options) => draw_options as *mut AzDrawOptions
};
unsafe {
AzDrawTargetFillRect(self.azure_draw_target,
&mut rect.as_azure_rect(),
pattern.as_azure_pattern(),
draw_options);
}
}
pub fn stroke(&self,
path: &Path,
pattern: PatternRef,
stroke_options: &StrokeOptions,
draw_options: &DrawOptions) {
unsafe {
AzDrawTargetStroke(self.azure_draw_target,
path.azure_path,
pattern.as_azure_pattern(),
&stroke_options.as_azure_stroke_options(),
&draw_options.as_azure_draw_options());
}
}
pub fn stroke_line(&self,
start: Point2D<AzFloat>,
end: Point2D<AzFloat>,
pattern: PatternRef,
stroke_options: &StrokeOptions,
draw_options: &DrawOptions) {
unsafe {
AzDrawTargetStrokeLine(self.azure_draw_target,
&mut start.as_azure_point(),
&mut end.as_azure_point(),
pattern.as_azure_pattern(),
&mut stroke_options.as_azure_stroke_options(),
&mut draw_options.as_azure_draw_options());
}
}
pub fn stroke_rect(&self,
rect: &Rect<AzFloat>,
pattern: PatternRef,
stroke_options: &StrokeOptions,
draw_options: &DrawOptions) {
unsafe {
AzDrawTargetStrokeRect(self.azure_draw_target,
&mut rect.as_azure_rect(),
pattern.as_azure_pattern(),
&mut stroke_options.as_azure_stroke_options(),
&mut draw_options.as_azure_draw_options());
}
}
pub fn draw_surface(&self,
surface: SourceSurface,
dest: Rect<AzFloat>,
source: Rect<AzFloat>,
surf_options: DrawSurfaceOptions,
options: DrawOptions) {
unsafe {
AzDrawTargetDrawSurface(self.azure_draw_target,
surface.azure_source_surface,
&mut dest.as_azure_rect(),
&mut source.as_azure_rect(),
&mut surf_options.as_azure_draw_surface_options(),
&mut options.as_azure_draw_options());
}
}
pub fn copy_surface(&self,
surface: SourceSurface,
source: Rect<i32>,
destination: Point2D<i32>) {
unsafe {
AzDrawTargetCopySurface(self.azure_draw_target,
surface.azure_source_surface,
&mut source.as_azure_int_rect(),
&mut destination.as_azure_int_point());
}
}
pub fn draw_filter(&self,
filter: &FilterNode,
source_rect: &Rect<AzFloat>,
dest_point: &Point2D<AzFloat>,
options: DrawOptions) {
unsafe {
AzDrawTargetDrawFilter(self.azure_draw_target,
filter.azure_filter_node,
&AzRect::from_rect(source_rect),
&AzPoint::from_point_2d(dest_point),
&options.as_azure_draw_options())
}
}
pub fn draw_surface_with_shadow(&self,
surface: SourceSurface,
dest: &Point2D<AzFloat>,
color: &Color,
offset: &Vector2D<AzFloat>,
sigma: AzFloat,
operator: CompositionOp) {
unsafe {
AzDrawTargetDrawSurfaceWithShadow(self.azure_draw_target,
surface.azure_source_surface,
&AzPoint::from_point_2d(dest),
color,
&AzPoint::from_vector_2d(offset),
sigma,
operator as AzCompositionOp)
}
}
pub fn snapshot(&self) -> SourceSurface {
unsafe {
let azure_surface = AzDrawTargetGetSnapshot(self.azure_draw_target);
SourceSurface::new(azure_surface)
}
}
/// Creates a source surface from the given data. If the data is zero-sized, returns `None`;
/// otherwise, returns the source surface.
pub fn create_source_surface_from_data(&self,
data: &[u8],
size: Size2D<i32>,
stride: i32,
format: SurfaceFormat)
-> Option<SourceSurface> {
assert!(data.len() as i32 >= stride * size.height);
if data.len() == 0 {
return None
}
unsafe {
let azure_surface = AzDrawTargetCreateSourceSurfaceFromData(
self.azure_draw_target,
data.as_ptr(),
&mut size.as_azure_int_size(),
stride,
format.as_azure_surface_format());
Some(SourceSurface::new(azure_surface))
}
}
pub fn create_similar_draw_target(&self, size: &Size2D<i32>, format: SurfaceFormat)
-> DrawTarget {
unsafe {
let new_draw_target = AzDrawTargetCreateSimilarDrawTarget(
self.azure_draw_target,
&size.as_azure_int_size(),
format.as_azure_surface_format());
DrawTarget {
azure_draw_target: new_draw_target,
backing: DrawTargetBacking::Empty,
}
}
}
pub fn create_shadow_draw_target(&self,
size: &Size2D<i32>,
format: SurfaceFormat,
sigma: AzFloat)
-> DrawTarget {
unsafe {
let new_draw_target = AzDrawTargetCreateShadowDrawTarget(
self.azure_draw_target,
&size.as_azure_int_size(),
format.as_azure_surface_format(),
sigma);
DrawTarget {
azure_draw_target: new_draw_target,
backing: DrawTargetBacking::Empty,
}
}
}
pub fn create_gradient_stops(&self,
gradient_stops: &[GradientStop],
extend_mode: ExtendMode)
-> GradientStops {
unsafe {
GradientStops::new(AzDrawTargetCreateGradientStops(
self.azure_draw_target,
gradient_stops.as_ptr(),
gradient_stops.len() as u32,
extend_mode.as_azure_extend_mode()))
}
}
pub fn set_transform(&self, matrix: &Transform2D<AzFloat>) {
unsafe {
AzDrawTargetSetTransform(self.azure_draw_target, &AzMatrix::from_matrix_2d(matrix));
}
}
pub fn fill_glyphs(&self,
azfontref: AzScaledFontRef,
mut glyphbuf: struct__AzGlyphBuffer,
azure_pattern: AzColorPatternRef,
mut options: struct__AzDrawOptions,
rendering_options: AzGlyphRenderingOptionsRef) {
unsafe {
AzDrawTargetFillGlyphs(self.azure_draw_target,
azfontref,
&mut glyphbuf,
azure_pattern,
&mut options,
rendering_options);
}
}
pub fn create_path_builder(&self) -> PathBuilder {
unsafe {
PathBuilder {
azure_path_builder: AzCreatePathBuilder(self.azure_draw_target)
}
}
}
pub fn create_filter(&self, filter_type: FilterType) -> FilterNode {
unsafe {
FilterNode {
azure_filter_node: AzDrawTargetCreateFilter(self.azure_draw_target,
filter_type.as_azure_filter_type()),
}
}
}
pub fn push_clip(&self, path: &Path) {
unsafe {
AzDrawTargetPushClip(self.azure_draw_target,path.azure_path);
}
}
pub fn push_clip_rect(&self, rect: &Rect<AzFloat>) {
unsafe {
AzDrawTargetPushClipRect(self.azure_draw_target, &rect.as_azure_rect());
}
}
pub fn pop_clip(&self) {
unsafe {
AzDrawTargetPopClip(self.azure_draw_target);
}
}
}
// Ugly workaround for the lack of explicit self.
pub fn clone_mutable_draw_target(draw_target: &mut DrawTarget) -> DrawTarget {
return draw_target.clone();
}
pub enum DrawTargetBacking {
Empty, // The backing is completely owned by the DrawTarget.
Data(Arc<Vec<u8>>),
GLRasterizationContext(Arc<GLRasterizationContext>),
}
impl Drop for DrawTargetBacking {
fn drop(&mut self) {
match *self {
DrawTargetBacking::Empty | DrawTargetBacking::Data(_) |
DrawTargetBacking::GLRasterizationContext(_) => { }
}
}
}
impl DrawTargetBacking {
pub fn clone(&self) -> DrawTargetBacking {
match *self {
DrawTargetBacking::Empty => DrawTargetBacking::Empty,
DrawTargetBacking::Data(ref arc_data) => DrawTargetBacking::Data(arc_data.clone()),
DrawTargetBacking::GLRasterizationContext(ref context_ref) =>
DrawTargetBacking::GLRasterizationContext(context_ref.clone()),
}
}
pub fn make_current(&self) {
match *self {
DrawTargetBacking::Empty | DrawTargetBacking::Data(_) => { }
DrawTargetBacking::GLRasterizationContext(ref context_ref) =>
context_ref.make_current(),
}
}
pub fn flush(&self) {
match *self {
DrawTargetBacking::Empty | DrawTargetBacking::Data(_) => { },
DrawTargetBacking::GLRasterizationContext(ref context_ref) =>
context_ref.flush(),
}
}
pub fn finish(&self) {
match *self {
DrawTargetBacking::Empty | DrawTargetBacking::Data(_) => {},
DrawTargetBacking::GLRasterizationContext(ref context_ref) =>
context_ref.flush_to_surface(),
}
}
}
#[derive(Debug)]
pub struct SourceSurface {
pub azure_source_surface: AzSourceSurfaceRef,
}
impl Clone for SourceSurface {
fn clone(&self) -> SourceSurface {
unsafe {
SourceSurface {
azure_source_surface: AzRetainSourceSurface(self.azure_source_surface),
}
}
}
}
impl Drop for SourceSurface {
fn drop(&mut self) {
unsafe {
AzReleaseSourceSurface(self.azure_source_surface);
}
}
}
impl SourceSurface {
pub fn new(azure_source_surface: AzSourceSurfaceRef) -> SourceSurface {
SourceSurface {
azure_source_surface: azure_source_surface
}
}
}
#[derive(Debug)]
pub struct GradientStops {
pub azure_gradient_stops: AzGradientStopsRef,
}
impl Drop for GradientStops {
fn drop(&mut self) {
unsafe {
AzReleaseGradientStops(self.azure_gradient_stops);
}
}
}
impl GradientStops {
pub fn new(azure_gradient_stops: AzGradientStopsRef) -> GradientStops {
GradientStops {
azure_gradient_stops: azure_gradient_stops,
}
}
}
#[repr(i32)]
#[derive(Clone, PartialEq, Debug)]
pub enum ExtendMode {
Clamp = 0,
Repeat = 1,
Reflect = 2,
}
impl ExtendMode {
fn as_azure_extend_mode(self) -> AzExtendMode {
self as AzExtendMode
}
}
// FIXME Rust #8753 no fixed stack segment for default methods
#[allow(non_snake_case)]
unsafe fn AzSourceSurfaceGetSize_(aSurface: AzSourceSurfaceRef) -> AzIntSize {
AzSourceSurfaceGetSize(aSurface)
}
// FIXME Rust #8753 no fixed stack segment for default methods
#[allow(non_snake_case)]
unsafe fn AzSourceSurfaceGetFormat_(aSurface: AzSourceSurfaceRef) -> AzSurfaceFormat {
AzSourceSurfaceGetFormat(aSurface)
}
pub trait SourceSurfaceMethods {
fn get_azure_source_surface(&self) -> AzSourceSurfaceRef;
fn size(&self) -> Size2D<i32> {
let size = unsafe {
AzSourceSurfaceGetSize_(self.get_azure_source_surface())
};
Size2D::new(size.width, size.height)
}
fn format(&self) -> SurfaceFormat {
unsafe {
SurfaceFormat::new(AzSourceSurfaceGetFormat_(self.get_azure_source_surface()))
}
}
}
impl SourceSurface {
pub fn get_data_surface(&self) -> DataSourceSurface {
let data_source_surface = unsafe {
AzSourceSurfaceGetDataSurface(self.azure_source_surface)
};
DataSourceSurface {
azure_data_source_surface: data_source_surface
}
}
}
impl SourceSurfaceMethods for SourceSurface {
fn get_azure_source_surface(&self) -> AzSourceSurfaceRef { self.azure_source_surface }
}
#[derive(Debug)]
pub struct DataSourceSurface {
pub azure_data_source_surface: AzDataSourceSurfaceRef,
}
impl Drop for DataSourceSurface {
fn drop(&mut self) {
unsafe {
AzReleaseSourceSurface(self.azure_data_source_surface);
}
}
}
impl DataSourceSurface {
pub fn with_data<F: FnOnce(&[u8])>(&self, f: F) {
unsafe {
let buf = AzDataSourceSurfaceGetData(self.azure_data_source_surface) as *const u8;
let len = self.stride() * self.size().height;
let slice = slice::from_raw_parts(buf, len as usize);
f(slice)
}
}
pub fn stride(&self) -> i32 {
unsafe {
AzDataSourceSurfaceGetStride(self.azure_data_source_surface)
}
}
// FIXME: Workaround for lack of working cross-crate default methods.
pub fn get_size(&self) -> Size2D<i32> {
self.size()
}
}
impl SourceSurfaceMethods for DataSourceSurface {
fn get_azure_source_surface(&self) -> AzSourceSurfaceRef {
self.azure_data_source_surface
}
}
#[derive(Debug)]
pub struct Path {
pub azure_path: AzPathRef
}
impl Path {
pub fn contains_point(&self, x: f64, y: f64, matrix: &Transform2D<AzFloat>) -> bool {
let point: Point2D<AzFloat> = Point2D::new(x as f32, y as f32);
let mut az_point = point.as_azure_point();
unsafe {
AzPathContainsPoint(self.azure_path, &mut az_point,
&AzMatrix::from_matrix_2d(matrix))
}
}
pub fn copy_to_builder(&self) -> PathBuilder {
unsafe {
PathBuilder {
azure_path_builder: AzPathCopyToBuilder(self.azure_path)
}
}
}
}
impl Drop for Path {
fn drop(&mut self) {
unsafe {
AzReleasePath(self.azure_path);
}
}
}
#[derive(Debug)]
pub struct PathBuilder {
pub azure_path_builder: AzPathBuilderRef
}
impl PathBuilder {
pub fn move_to(&self, point: Point2D<AzFloat>) {
let mut az_point = point.as_azure_point();
unsafe {
AzPathBuilderMoveTo(self.azure_path_builder, &mut az_point);
}
}
pub fn line_to(&self, point: Point2D<AzFloat>) {
let mut az_point = point.as_azure_point();
unsafe {
AzPathBuilderLineTo(self.azure_path_builder, &mut az_point);
}
}
/// Adds an arc to the current figure.
pub fn arc(&self,
origin: Point2D<AzFloat>,
radius: AzFloat,
start_angle: AzFloat,
end_angle: AzFloat,
anticlockwise: bool) {
let origin = origin.as_azure_point();
unsafe {
AzPathBuilderArc(self.azure_path_builder,
&origin,
radius,
start_angle,
end_angle,
anticlockwise)
}
}
/// Adds a quadratic Bézier curve to the current figure.
pub fn quadratic_curve_to(&self,
control_point: &Point2D<AzFloat>,
end_point: &Point2D<AzFloat>) {
unsafe {
AzPathBuilderQuadraticBezierTo(self.azure_path_builder,
&control_point.as_azure_point(),
&end_point.as_azure_point())
}
}
/// Adds a cubic Bézier curve to the current figure.
pub fn bezier_curve_to(&self,
control_point_1: &Point2D<AzFloat>,
control_point_2: &Point2D<AzFloat>,
control_point_3: &Point2D<AzFloat>) {
unsafe {
AzPathBuilderBezierTo(self.azure_path_builder,
&control_point_1.as_azure_point(),
&control_point_2.as_azure_point(),
&control_point_3.as_azure_point())
}
}
/// Returns the current last point of the current path.
pub fn get_current_point(&self) -> AzPoint {
unsafe {
AzPathBuilderCurrentPoint(self.azure_path_builder)
}
}
/// Closes the current path.
pub fn close(&self) {
unsafe {
AzPathBuilderClose(self.azure_path_builder)
}
}
pub fn finish(&self) -> Path {
let az_path = unsafe { AzPathBuilderFinish(self.azure_path_builder) };
Path {
azure_path : az_path
}
}
}
impl Drop for PathBuilder {
fn drop(&mut self) {
unsafe {
AzReleasePathBuilder(self.azure_path_builder);
}
}
}
#[derive(Debug)]
pub struct LinearGradientPattern {
pub azure_linear_gradient_pattern: AzLinearGradientPatternRef,
pub begin: Point2D<AzFloat>,
pub end: Point2D<AzFloat>,
}
impl Drop for LinearGradientPattern {
fn drop(&mut self) {
unsafe {
AzReleasePattern(self.azure_linear_gradient_pattern);
}
}
}
impl Clone for LinearGradientPattern {
fn clone(&self) -> LinearGradientPattern {
unsafe {
LinearGradientPattern {
azure_linear_gradient_pattern:
AzCloneLinearGradientPattern(self.azure_linear_gradient_pattern),
begin: self.begin,
end: self.end,
}
}
}
}
impl LinearGradientPattern {
pub fn new(begin: &Point2D<AzFloat>,
end: &Point2D<AzFloat>,
stops: GradientStops,
matrix: &Transform2D<AzFloat>)
-> LinearGradientPattern {
unsafe {
LinearGradientPattern {
azure_linear_gradient_pattern:
AzCreateLinearGradientPattern(&AzPoint::from_point_2d(begin),
&AzPoint::from_point_2d(end),
stops.azure_gradient_stops,
&AzMatrix::from_matrix_2d(matrix)),
begin: *begin,
end: *end,
}
}
}
pub fn is_zero_size(&self) -> bool {
self.begin == self.end
}
}
#[derive(Debug)]
pub struct RadialGradientPattern {
pub azure_radial_gradient_pattern: AzRadialGradientPatternRef,
}
impl Drop for RadialGradientPattern {
fn drop(&mut self) {
unsafe {
AzReleasePattern(self.azure_radial_gradient_pattern);
}
}
}
impl Clone for RadialGradientPattern {
fn clone(&self) -> RadialGradientPattern {
unsafe {
RadialGradientPattern {
azure_radial_gradient_pattern:
AzCloneRadialGradientPattern(self.azure_radial_gradient_pattern),
}
}
}
}
impl RadialGradientPattern {
pub fn new(center1: &Point2D<AzFloat>,
center2: &Point2D<AzFloat>,
radius1: AzFloat,
radius2: AzFloat,
stops: GradientStops,
matrix: &Transform2D<AzFloat>)
-> RadialGradientPattern {
unsafe {
RadialGradientPattern {
azure_radial_gradient_pattern:
AzCreateRadialGradientPattern(&AzPoint::from_point_2d(center1),
&AzPoint::from_point_2d(center2),
radius1,
radius2,
stops.azure_gradient_stops,
&AzMatrix::from_matrix_2d(matrix)),
}
}
}
}
#[derive(Debug)]
pub struct SurfacePattern {
pub azure_surface_pattern: AzSurfacePatternRef,
pub repeat_x: bool,
pub repeat_y: bool,
}
impl Drop for SurfacePattern {
fn drop(&mut self) {
unsafe {
AzReleasePattern(self.azure_surface_pattern);
}
}
}
impl Clone for SurfacePattern {
fn clone(&self) -> SurfacePattern {
unsafe {
SurfacePattern {
azure_surface_pattern:
AzCloneSurfacePattern(self.azure_surface_pattern),
repeat_x: self.repeat_x,
repeat_y: self.repeat_y,
}
}
}
}
impl SurfacePattern {
pub fn new(surface: AzSourceSurfaceRef,
repeat_x: bool,
repeat_y: bool,
matrix: &Transform2D<AzFloat>)
-> SurfacePattern {
let mode = if !repeat_x && !repeat_y {
ExtendMode::Clamp
} else {
ExtendMode::Repeat
};
unsafe {
SurfacePattern {
azure_surface_pattern:
AzCreateSurfacePattern(surface,
mode.as_azure_extend_mode(),
&AzMatrix::from_matrix_2d(matrix)),
repeat_x: repeat_x,
repeat_y: repeat_y,
}
}
}
pub fn size(&self) -> AzIntSize {
unsafe {
AzSurfacePatternGetSize(self.azure_surface_pattern)
}
}
}
#[derive(Debug)]
pub enum PatternRef<'a> {
Color(&'a ColorPattern),
LinearGradient(&'a LinearGradientPattern),
RadialGradient(&'a RadialGradientPattern),
Surface(&'a SurfacePattern),
}
impl<'a> PatternRef<'a> {
fn as_azure_pattern(&self) -> AzPatternRef {
match *self {
PatternRef::Color(color_pattern) => {
color_pattern.azure_color_pattern
},
PatternRef::LinearGradient(linear_gradient_pattern) => {
linear_gradient_pattern.azure_linear_gradient_pattern
},
PatternRef::RadialGradient(radial_gradient_pattern) => {
radial_gradient_pattern.azure_radial_gradient_pattern
},
PatternRef::Surface(surface_pattern) => {
surface_pattern.azure_surface_pattern
},
}
}
}
#[derive(Clone, Debug)]
pub enum Pattern {
Color(ColorPattern),
LinearGradient(LinearGradientPattern),
RadialGradient(RadialGradientPattern),
Surface(SurfacePattern),
}
impl Pattern {
pub fn to_pattern_ref(&self) -> PatternRef {
match *self {
Pattern::Color(ref color_pattern) => PatternRef::Color(color_pattern),
Pattern::LinearGradient(ref linear_gradient_pattern) => {
PatternRef::LinearGradient(linear_gradient_pattern)
},
Pattern::RadialGradient(ref radial_gradient_pattern) => {
PatternRef::RadialGradient(radial_gradient_pattern)
},
Pattern::Surface(ref surface_pattern) => {
PatternRef::Surface(surface_pattern)
},
}
}
}
#[derive(Debug)]
pub struct FilterNode {
pub azure_filter_node: AzFilterNodeRef,
}
impl Drop for FilterNode {
fn drop(&mut self) {
unsafe {
AzReleaseFilterNode(self.azure_filter_node);
}
}
}
impl FilterNode {
pub fn set_input<FIndex,FInput>(&self, index: FIndex, input: &FInput)
where FIndex: FilterInputIndex, FInput: FilterInput {
input.set(self, index.azure_index())
}
pub fn set_attribute<A>(&self, attribute: A) where A: FilterAttribute {
attribute.set(self)
}
}
#[derive(Debug)]
pub struct ColorMatrixInput;
#[derive(Debug)]
pub struct CompositeInput;
#[derive(Debug)]
pub struct FloodFilterInput;
#[derive(Debug)]
pub struct GaussianBlurInput;
#[derive(Debug)]
pub struct LinearTransferInput;
#[derive(Debug)]
pub struct TableTransferInput;
pub trait FilterInputIndex {
fn azure_index(&self) -> u32;
}
impl FilterInputIndex for ColorMatrixInput {
fn azure_index(&self) -> u32 {
AZ_IN_COLOR_MATRIX_IN
}
}
impl FilterInputIndex for CompositeInput {
fn azure_index(&self) -> u32 {
AZ_IN_COMPOSITE_IN
}
}
impl FilterInputIndex for FloodFilterInput {
fn azure_index(&self) -> u32 {
AZ_IN_FLOOD_IN
}
}
impl FilterInputIndex for GaussianBlurInput {
fn azure_index(&self) -> u32 {
AZ_IN_GAUSSIAN_BLUR_IN
}
}
impl FilterInputIndex for LinearTransferInput {
fn azure_index(&self) -> u32 {
AZ_IN_LINEAR_TRANSFER_IN
}
}
impl FilterInputIndex for TableTransferInput {
fn azure_index(&self) -> u32 {
AZ_IN_TABLE_TRANSFER_IN
}
}
pub trait FilterAttribute {
fn set(&self, filter_node: &FilterNode);
}
#[derive(Debug)]
pub enum ColorMatrixAttribute {
Matrix(Matrix5x4),
}
#[derive(Debug)]
pub enum FloodAttribute {
Color(Color),
}
#[derive(Debug)]
pub enum GaussianBlurAttribute {
StdDeviation(AzFloat),
}
#[derive(Debug)]
pub enum LinearTransferAttribute {
DisableR(bool),
DisableG(bool),
DisableB(bool),
DisableA(bool),
SlopeR(AzFloat),
SlopeG(AzFloat),
SlopeB(AzFloat),
SlopeA(AzFloat),
InterceptR(AzFloat),
InterceptG(AzFloat),
InterceptB(AzFloat),
InterceptA(AzFloat),
}
#[derive(Debug)]
pub enum TableTransferAttribute<'a> {
DisableR(bool),
DisableG(bool),
DisableB(bool),
DisableA(bool),
TableR(&'a [AzFloat]),
TableG(&'a [AzFloat]),
TableB(&'a [AzFloat]),
TableA(&'a [AzFloat]),
}
impl FilterAttribute for ColorMatrixAttribute {
fn set(&self, filter_node: &FilterNode) {
let ColorMatrixAttribute::Matrix(ref value) = *self;
unsafe {
AzFilterNodeSetMatrix5x4Attribute(filter_node.azure_filter_node,
AZ_ATT_COLOR_MATRIX_MATRIX,
&value.as_azure_matrix_5x4())
}
}
}
impl FilterAttribute for FloodAttribute {
fn set(&self, filter_node: &FilterNode) {
let FloodAttribute::Color(value) = *self;
unsafe {
AzFilterNodeSetColorAttribute(filter_node.azure_filter_node,
AZ_ATT_FLOOD_COLOR,
&value)
}
}
}
impl FilterAttribute for GaussianBlurAttribute {
fn set(&self, filter_node: &FilterNode) {
let GaussianBlurAttribute::StdDeviation(value) = *self;
unsafe {
AzFilterNodeSetFloatAttribute(filter_node.azure_filter_node,
AZ_ATT_GAUSSIAN_BLUR_STD_DEVIATION,
value)
}
}
}
impl FilterAttribute for LinearTransferAttribute {
fn set(&self, filter_node: &FilterNode) {
unsafe {
match *self {
LinearTransferAttribute::DisableR(value) => {
AzFilterNodeSetBoolAttribute(filter_node.azure_filter_node,
AZ_ATT_TRANSFER_DISABLE_R,
value)
}
LinearTransferAttribute::DisableG(value) => {
AzFilterNodeSetBoolAttribute(filter_node.azure_filter_node,
AZ_ATT_TRANSFER_DISABLE_G,
value)
}
LinearTransferAttribute::DisableB(value) => {
AzFilterNodeSetBoolAttribute(filter_node.azure_filter_node,
AZ_ATT_TRANSFER_DISABLE_B,
value)
}
LinearTransferAttribute::DisableA(value) => {
AzFilterNodeSetBoolAttribute(filter_node.azure_filter_node,
AZ_ATT_TRANSFER_DISABLE_A,
value)
}
LinearTransferAttribute::SlopeR(value) => {
AzFilterNodeSetFloatAttribute(filter_node.azure_filter_node,
AZ_ATT_LINEAR_TRANSFER_SLOPE_R,
value)
}
LinearTransferAttribute::SlopeG(value) => {
AzFilterNodeSetFloatAttribute(filter_node.azure_filter_node,
AZ_ATT_LINEAR_TRANSFER_SLOPE_G,
value)
}
LinearTransferAttribute::SlopeB(value) => {
AzFilterNodeSetFloatAttribute(filter_node.azure_filter_node,
AZ_ATT_LINEAR_TRANSFER_SLOPE_B,
value)
}
LinearTransferAttribute::SlopeA(value) => {
AzFilterNodeSetFloatAttribute(filter_node.azure_filter_node,
AZ_ATT_LINEAR_TRANSFER_SLOPE_A,
value)
}
LinearTransferAttribute::InterceptR(value) => {
AzFilterNodeSetFloatAttribute(filter_node.azure_filter_node,
AZ_ATT_LINEAR_TRANSFER_INTERCEPT_R,
value)
}
LinearTransferAttribute::InterceptG(value) => {
AzFilterNodeSetFloatAttribute(filter_node.azure_filter_node,
AZ_ATT_LINEAR_TRANSFER_INTERCEPT_G,
value)
}
LinearTransferAttribute::InterceptB(value) => {
AzFilterNodeSetFloatAttribute(filter_node.azure_filter_node,
AZ_ATT_LINEAR_TRANSFER_INTERCEPT_B,
value)
}
LinearTransferAttribute::InterceptA(value) => {
AzFilterNodeSetFloatAttribute(filter_node.azure_filter_node,
AZ_ATT_LINEAR_TRANSFER_INTERCEPT_A,
value)
}
}
}
}
}
impl<'a> FilterAttribute for TableTransferAttribute<'a> {
fn set(&self, filter_node: &FilterNode) {
unsafe {
match *self {
TableTransferAttribute::DisableR(value) => {
AzFilterNodeSetBoolAttribute(filter_node.azure_filter_node,
AZ_ATT_TRANSFER_DISABLE_R,
value)
}
TableTransferAttribute::DisableG(value) => {
AzFilterNodeSetBoolAttribute(filter_node.azure_filter_node,
AZ_ATT_TRANSFER_DISABLE_G,
value)
}
TableTransferAttribute::DisableB(value) => {
AzFilterNodeSetBoolAttribute(filter_node.azure_filter_node,
AZ_ATT_TRANSFER_DISABLE_B,
value)
}
TableTransferAttribute::DisableA(value) => {
AzFilterNodeSetBoolAttribute(filter_node.azure_filter_node,
AZ_ATT_TRANSFER_DISABLE_A,
value)
}
TableTransferAttribute::TableR(value) => {
AzFilterNodeSetFloatArrayAttribute(filter_node.azure_filter_node,
AZ_ATT_TABLE_TRANSFER_TABLE_R,
value.as_ptr(),
value.len() as u32)
}
TableTransferAttribute::TableG(value) => {
AzFilterNodeSetFloatArrayAttribute(filter_node.azure_filter_node,
AZ_ATT_TABLE_TRANSFER_TABLE_G,
value.as_ptr(),
value.len() as u32)
}
TableTransferAttribute::TableB(value) => {
AzFilterNodeSetFloatArrayAttribute(filter_node.azure_filter_node,
AZ_ATT_TABLE_TRANSFER_TABLE_B,
value.as_ptr(),
value.len() as u32)
}
TableTransferAttribute::TableA(value) => {
AzFilterNodeSetFloatArrayAttribute(filter_node.azure_filter_node,
AZ_ATT_TABLE_TRANSFER_TABLE_A,
value.as_ptr(),
value.len() as u32)
}
}
}
}
}
#[derive(Debug)]
pub enum FilterType {
ColorMatrix,
Composite,
Flood,
GaussianBlur,
LinearTransfer,
TableTransfer,
}
impl FilterType {
pub fn as_azure_filter_type(self) -> AzFilterType {
match self {
FilterType::ColorMatrix => AZ_FILTER_TYPE_COLOR_MATRIX,
FilterType::Composite => AZ_FILTER_TYPE_COMPOSITE,
FilterType::Flood => AZ_FILTER_TYPE_FLOOD,
FilterType::GaussianBlur => AZ_FILTER_TYPE_GAUSSIAN_BLUR,
FilterType::LinearTransfer => AZ_FILTER_TYPE_LINEAR_TRANSFER,
FilterType::TableTransfer => AZ_FILTER_TYPE_TABLE_TRANSFER,
}
}
}
pub trait FilterInput {
fn set(&self, filter: &FilterNode, index: u32);
}
<|fim▁hole|> unsafe {
AzFilterNodeSetSourceSurfaceInput(filter.azure_filter_node,
index,
self.azure_source_surface)
}
}
}
impl FilterInput for FilterNode {
fn set(&self, filter: &FilterNode, index: u32) {
unsafe {
AzFilterNodeSetFilterNodeInput(filter.azure_filter_node, index, self.azure_filter_node)
}
}
}
#[derive(PartialEq, Clone, Debug)]
pub struct Matrix5x4 {
pub m11: AzFloat, pub m12: AzFloat, pub m13: AzFloat, pub m14: AzFloat,
pub m21: AzFloat, pub m22: AzFloat, pub m23: AzFloat, pub m24: AzFloat,
pub m31: AzFloat, pub m32: AzFloat, pub m33: AzFloat, pub m34: AzFloat,
pub m41: AzFloat, pub m42: AzFloat, pub m43: AzFloat, pub m44: AzFloat,
pub m51: AzFloat, pub m52: AzFloat, pub m53: AzFloat, pub m54: AzFloat,
}
impl Matrix5x4 {
#[inline]
pub fn as_azure_matrix_5x4(&self) -> struct__AzMatrix5x4 {
struct__AzMatrix5x4 {
_11: self.m11, _12: self.m12, _13: self.m13, _14: self.m14,
_21: self.m21, _22: self.m22, _23: self.m23, _24: self.m24,
_31: self.m31, _32: self.m32, _33: self.m33, _34: self.m34,
_41: self.m41, _42: self.m42, _43: self.m43, _44: self.m44,
_51: self.m51, _52: self.m52, _53: self.m53, _54: self.m54,
}
}
}
impl AzMatrix {
#[inline]
fn as_matrix_2d(&self) -> Transform2D<AzFloat> {
Transform2D::row_major(
self._11, self._12, self._21, self._22, self._31, self._32)
}
#[inline]
fn from_matrix_2d(matrix: &Transform2D<AzFloat>) -> Self {
AzMatrix {
_11: matrix.m11,
_12: matrix.m12,
_21: matrix.m21,
_22: matrix.m22,
_31: matrix.m31,
_32: matrix.m32,
}
}
}
impl AzPoint {
#[inline]
fn from_point_2d(point: &Point2D<AzFloat>) -> Self {
AzPoint {
x: point.x,
y: point.y,
}
}
#[inline]
fn from_vector_2d(point: &Vector2D<AzFloat>) -> Self {
AzPoint {
x: point.x,
y: point.y,
}
}
}
impl AzRect {
#[inline]
fn from_rect(rect: &Rect<AzFloat>) -> Self {
AzRect {
x: rect.origin.x,
y: rect.origin.y,
width: rect.size.width,
height: rect.size.height,
}
}
}<|fim▁end|> | impl FilterInput for SourceSurface {
fn set(&self, filter: &FilterNode, index: u32) { |
<|file_name|>conf.py<|end_file_name|><|fim▁begin|>from django.conf import settings
from . import defaults
__title__ = 'fobi.contrib.plugins.form_elements.fields.' \
'select_multiple_with_max.conf'
__author__ = 'Artur Barseghyan <[email protected]>'
__copyright__ = '2014-2017 Artur Barseghyan'
__license__ = 'GPL 2.0/LGPL 2.1'
__all__ = ('get_setting',)
def get_setting(setting, override=None):
"""Get setting.
Get a setting from
`fobi.contrib.plugins.form_elements.fields.select_multiple_with_max`
conf module, falling back to the default.<|fim▁hole|> :param override: Value to use when no setting is available. Defaults
to None.
:return: Setting value.
"""
if override is not None:
return override
if hasattr(
settings,
'FOBI_FORM_ELEMENT_SELECT_MULTIPLE_WITH_MAX_{0}'.format(setting)
):
return getattr(
settings,
'FOBI_FORM_ELEMENT_SELECT_MULTIPLE_WITH_MAX_{0}'.format(setting)
)
else:
return getattr(defaults, setting)<|fim▁end|> |
If override is not None, it will be used instead of the setting.
:param setting: String with setting name |
<|file_name|>AbstractFactory.java<|end_file_name|><|fim▁begin|>/*
* WorldEdit, a Minecraft world manipulation toolkit
* Copyright (C) sk89q <http://www.sk89q.com>
* Copyright (C) WorldEdit team and contributors
*
* This program is free software: you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as published by the
* Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package com.sk89q.worldedit.internal.registry;
import com.sk89q.worldedit.WorldEdit;
import com.sk89q.worldedit.extension.input.ParserContext;
import com.sk89q.worldedit.extension.input.InputParseException;
import com.sk89q.worldedit.extension.input.NoMatchException;
import java.util.ArrayList;
import java.util.List;
import static com.google.common.base.Preconditions.checkNotNull;
/**
* An abstract implementation of a factory for internal usage.
*
* @param <E> the element that the factory returns
*/
public abstract class AbstractFactory<E> {
protected final WorldEdit worldEdit;
protected final List<InputParser<E>> parsers = new ArrayList<InputParser<E>>();
/**
* Create a new factory.
*<|fim▁hole|> protected AbstractFactory(WorldEdit worldEdit) {
checkNotNull(worldEdit);
this.worldEdit = worldEdit;
}
public E parseFromInput(String input, ParserContext context) throws InputParseException {
E match;
for (InputParser<E> parser : parsers) {
match = parser.parseFromInput(input, context);
if (match != null) {
return match;
}
}
throw new NoMatchException("No match for '" + input + "'");
}
}<|fim▁end|> | * @param worldEdit the WorldEdit instance
*/ |
<|file_name|>account_journal.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from common_report_header import common_report_header
from openerp.report import report_sxw
class journal_print(report_sxw.rml_parse, common_report_header):
def __init__(self, cr, uid, name, context=None):
if context is None:
context = {}
super(journal_print, self).__init__(cr, uid, name, context=context)
self.period_ids = []
self.last_move_id = False
self.journal_ids = []
self.sort_selection = 'am.name'
self.localcontext.update({
'time': time,
'lines': self.lines,
'sum_debit': self._sum_debit,
'sum_credit': self._sum_credit,
'get_start_period': self.get_start_period,
'get_end_period': self.get_end_period,
'get_account': self._get_account,
'get_filter': self._get_filter,
'get_start_date': self._get_start_date,
'get_end_date': self._get_end_date,
'get_fiscalyear': self._get_fiscalyear,
'display_currency':self._display_currency,
'get_sortby': self._get_sortby,
'get_target_move': self._get_target_move,
'check_last_move_id': self.check_last_move_id,
'set_last_move_id': self.set_last_move_id,
'tax_codes': self.tax_codes,
'sum_vat': self._sum_vat,
})
def set_context(self, objects, data, ids, report_type=None):
obj_move = self.pool.get('account.move.line')
new_ids = ids
self.query_get_clause = ''
self.target_move = data['form'].get('target_move', 'all')
if (data['model'] == 'ir.ui.menu'):
self.period_ids = tuple(data['form']['periods'])
self.journal_ids = tuple(data['form']['journal_ids'])
new_ids = data['form'].get('active_ids', [])
self.query_get_clause = 'AND '
self.query_get_clause += obj_move._query_get(self.cr, self.uid, obj='l', context=data['form'].get('used_context', {}))
self.sort_selection = data['form'].get('sort_selection', 'date')
objects = self.pool.get('account.journal.period').browse(self.cr, self.uid, new_ids)
elif new_ids:
#in case of direct access from account.journal.period object, we need to set the journal_ids and periods_ids
self.cr.execute('SELECT period_id, journal_id FROM account_journal_period WHERE id IN %s', (tuple(new_ids),))
res = self.cr.fetchall()
self.period_ids, self.journal_ids = zip(*res)
return super(journal_print, self).set_context(objects, data, ids, report_type=report_type)
def set_last_move_id(self, move_id):
self.last_move_id = move_id
def check_last_move_id(self, move_id):
'''
return True if we need to draw a gray line above this line, used to separate moves
'''
if self.last_move_id:
return not(self.last_move_id == move_id)
return False
def tax_codes(self, period_id, journal_id):
ids_journal_period = self.pool.get('account.journal.period').search(self.cr, self.uid,
[('journal_id', '=', journal_id), ('period_id', '=', period_id)])
self.cr.execute(
'select distinct tax_code_id from account_move_line ' \
'where period_id=%s and journal_id=%s and tax_code_id is not null and state<>\'draft\'',
(period_id, journal_id)
)
ids = map(lambda x: x[0], self.cr.fetchall())
tax_code_ids = []
if ids:
self.cr.execute('select id from account_tax_code where id in %s order by code', (tuple(ids),))
tax_code_ids = map(lambda x: x[0], self.cr.fetchall())
tax_codes = self.pool.get('account.tax.code').browse(self.cr, self.uid, tax_code_ids)
return tax_codes
def _sum_vat(self, period_id, journal_id, tax_code_id):
self.cr.execute('select sum(tax_amount) from account_move_line where ' \
'period_id=%s and journal_id=%s and tax_code_id=%s and state<>\'draft\'',
(period_id, journal_id, tax_code_id))
return self.cr.fetchone()[0] or 0.0
def _sum_debit(self, period_id=False, journal_id=False):
if journal_id and isinstance(journal_id, int):
journal_id = [journal_id]
if period_id and isinstance(period_id, int):
period_id = [period_id]
if not journal_id:
journal_id = self.journal_ids
if not period_id:
period_id = self.period_ids
if not (period_id and journal_id):
return 0.0
move_state = ['draft','posted']
if self.target_move == 'posted':
move_state = ['posted']
self.cr.execute('SELECT SUM(debit) FROM account_move_line l, account_move am '
'WHERE l.move_id=am.id AND am.state IN %s AND l.period_id IN %s AND l.journal_id IN %s ' + self.query_get_clause + ' ',
(tuple(move_state), tuple(period_id), tuple(journal_id)))
return self.cr.fetchone()[0] or 0.0
def _sum_credit(self, period_id=False, journal_id=False):<|fim▁hole|> if not journal_id:
journal_id = self.journal_ids
if not period_id:
period_id = self.period_ids
if not (period_id and journal_id):
return 0.0
move_state = ['draft','posted']
if self.target_move == 'posted':
move_state = ['posted']
self.cr.execute('SELECT SUM(l.credit) FROM account_move_line l, account_move am '
'WHERE l.move_id=am.id AND am.state IN %s AND l.period_id IN %s AND l.journal_id IN %s '+ self.query_get_clause+'',
(tuple(move_state), tuple(period_id), tuple(journal_id)))
return self.cr.fetchone()[0] or 0.0
def lines(self, period_id, journal_id=False):
if not journal_id:
journal_id = self.journal_ids
else:
journal_id = [journal_id]
obj_mline = self.pool.get('account.move.line')
self.cr.execute('update account_journal_period set state=%s where journal_id IN %s and period_id=%s and state=%s', ('printed', self.journal_ids, period_id, 'draft'))
move_state = ['draft','posted']
if self.target_move == 'posted':
move_state = ['posted']
self.cr.execute('SELECT l.id FROM account_move_line l, account_move am WHERE l.move_id=am.id AND am.state IN %s AND l.period_id=%s AND l.journal_id IN %s ' + self.query_get_clause + ' ORDER BY '+ self.sort_selection + ', l.move_id',(tuple(move_state), period_id, tuple(journal_id) ))
ids = map(lambda x: x[0], self.cr.fetchall())
return obj_mline.browse(self.cr, self.uid, ids)
def _set_get_account_currency_code(self, account_id):
self.cr.execute("SELECT c.symbol AS code "\
"FROM res_currency c,account_account AS ac "\
"WHERE ac.id = %s AND ac.currency_id = c.id" % (account_id))
result = self.cr.fetchone()
if result:
self.account_currency = result[0]
else:
self.account_currency = False
def _get_fiscalyear(self, data):
if data['model'] == 'account.journal.period':
return self.pool.get('account.journal.period').browse(self.cr, self.uid, data['id']).fiscalyear_id.name
return super(journal_print, self)._get_fiscalyear(data)
def _get_account(self, data):
if data['model'] == 'account.journal.period':
return self.pool.get('account.journal.period').browse(self.cr, self.uid, data['id']).company_id.name
return super(journal_print, self)._get_account(data)
def _display_currency(self, data):
if data['model'] == 'account.journal.period':
return True
return data['form']['amount_currency']
def _get_sortby(self, data):
# TODO: deprecated, to remove in trunk
if self.sort_selection == 'date':
return self._translate('Date')
elif self.sort_selection == 'ref':
return self._translate('Reference Number')
return self._translate('Date')
report_sxw.report_sxw('report.account.journal.period.print', 'account.journal.period', 'addons/account/report/account_journal.rml', parser=journal_print, header='external')
report_sxw.report_sxw('report.account.journal.period.print.sale.purchase', 'account.journal.period', 'addons/account/report/account_journal_sale_purchase.rml', parser=journal_print, header='external')
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:<|fim▁end|> | if journal_id and isinstance(journal_id, int):
journal_id = [journal_id]
if period_id and isinstance(period_id, int):
period_id = [period_id] |
<|file_name|>javascript-types-spec.js<|end_file_name|><|fim▁begin|>describe('Testing JavaScript types', function() {
// we will use jQuery to manipulate HTML for testing...
beforeEach(function() {
// empty for now...
});
describe('Testing JavaScript numbers', function() {
/**
* parseInt is used to parse a string to an integer.
*/
it('string, integer and parseInt', function() {
var aString = '123';
expect(aString).toBe('123');
expect(aString).not.toBe(123);
var theNumber = parseInt(aString);
expect(theNumber).toBe(123);
expect(theNumber).not.toBe('123');
});
/**
* test modulus operator for numbers.
*/
it('modulus opterator for numbers', function() {
var x = 5;
var a = 4;
var b = 0;
var y = 2;
expect(x % y).toBe(1);
expect(a % y).toBe(0);
expect(b % y).toBe(0);
});<|fim▁hole|> it('Pagination calculation', function() {
var total = 13;
var perPage = 10;
// ceil will return the smallest integer
// greater then or equal to the givan number.
var totalPages = Math.ceil(total / perPage);
expect(totalPages).toBe(2);
// case for the exact page!
total = 60;
perPage = 5;
totalPages = Math.ceil(total/perPage);
expect(totalPages).toBe(12);
// case for less than one page.
total = 8;
perPage = 10;
totalPages = Math.ceil(total/perPage);
expect(totalPages).toBe(1);
});
});
describe('Testing JavaScript JSON format', function() {
/**
* try to define an array by using JSON format.
*/
it('define a simple array', function() {
// simple array with strings.
var aArray = ['a', 'b'];
// Array in JavaScript is an object type.
expect(typeof aArray).toBe('object');
// the function isArray will check the given object
// is an array or not!
expect(Array.isArray(aArray)).toBe(true);
// JavasScript array allow mix data type for each item
aArray = ['a', 'b', 1, 3, {a: '123'}];
// this array has string as its first value.
expect(typeof aArray[0]).toBe('string');
expect(typeof aArray[2]).toBe('number');
expect(typeof aArray[4]).toBe('object');
});
/**
* try to define an object by using JSON format.
*/
it('define a simple object', function() {
// simple object.
var anObject = {
a: 'abc',
b: 123,
// the ending item should NOT have separator.
c: 'cde'
};
// type should be an object.
expect(typeof anObject).toBe('object');
// inspect the values.
expect(anObject.a).toBe('abc');
expect(anObject.b).toBe(123);
// get keys of the object.
expect(Object.keys(anObject)).toEqual(['a', 'b', 'c']);
});
});
});<|fim▁end|> |
/**
* test the divide
*/ |
<|file_name|>iscroll.js<|end_file_name|><|fim▁begin|>/*!
* iScroll v4.1.8 ~ Copyright (c) 2011 Matteo Spinelli, http://cubiq.org
* Released under MIT license, http://cubiq.org/license
*/
(function(){
var m = Math,
vendor = (/webkit/i).test(navigator.appVersion) ? 'webkit' :
(/firefox/i).test(navigator.userAgent) ? 'Moz' :
'opera' in window ? 'O' : '',
// Browser capabilities
has3d = 'WebKitCSSMatrix' in window && 'm11' in new WebKitCSSMatrix(),
hasTouch = 'ontouchstart' in window,
hasTransform = vendor + 'Transform' in document.documentElement.style,
isAndroid = (/android/gi).test(navigator.appVersion),
isIDevice = (/iphone|ipad/gi).test(navigator.appVersion),
isPlaybook = (/playbook/gi).test(navigator.appVersion),
hasTransitionEnd = isIDevice || isPlaybook,
nextFrame = (function() {
return window.requestAnimationFrame
|| window.webkitRequestAnimationFrame
|| window.mozRequestAnimationFrame
|| window.oRequestAnimationFrame
|| window.msRequestAnimationFrame
|| function(callback) { return setTimeout(callback, 1); }
})(),
cancelFrame = (function () {
return window.cancelRequestAnimationFrame
|| window.webkitCancelRequestAnimationFrame
|| window.mozCancelRequestAnimationFrame
|| window.oCancelRequestAnimationFrame
|| window.msCancelRequestAnimationFrame
|| clearTimeout
})(),
// Events
RESIZE_EV = 'onorientationchange' in window ? 'orientationchange' : 'resize',
START_EV = hasTouch ? 'touchstart' : 'mousedown',
MOVE_EV = hasTouch ? 'touchmove' : 'mousemove',
END_EV = hasTouch ? 'touchend' : 'mouseup',
CANCEL_EV = hasTouch ? 'touchcancel' : 'mouseup',
WHEEL_EV = vendor == 'Moz' ? 'DOMMouseScroll' : 'mousewheel',
// Helpers
trnOpen = 'translate' + (has3d ? '3d(' : '('),
trnClose = has3d ? ',0)' : ')',
// Constructor
iScroll = function (el, options) {
var that = this,
doc = document,
i;
that.wrapper = typeof el == 'object' ? el : doc.getElementById(el);
that.wrapper.style.overflow = 'hidden';
that.scroller = that.wrapper.children[0];
// Default options
that.options = {
hScroll: true,
vScroll: true,
bounce: true,
bounceLock: false,
momentum: true,
lockDirection: true,
useTransform: true,
useTransition: false,
topOffset: 0,
checkDOMChanges: false, // Experimental
// Scrollbar
hScrollbar: true,
vScrollbar: true,
fixedScrollbar: isAndroid,
hideScrollbar: isIDevice,
fadeScrollbar: isIDevice && has3d,
scrollbarClass: '',
// Zoom
zoom: false,
zoomMin: 1,
zoomMax: 4,
doubleTapZoom: 2,
wheelAction: 'scroll',
// Snap
snap: false,
snapThreshold: 1,
// Events
onRefresh: null,
onBeforeScrollStart: function (e) { e.preventDefault(); },
onScrollStart: null,
onBeforeScrollMove: null,
onScrollMove: null,
onBeforeScrollEnd: null,
onScrollEnd: null,
onTouchEnd: null,
onDestroy: null,
onZoomStart: null,
onZoom: null,
onZoomEnd: null,
// Added by Lissa
scrollOffsetLeft: 0,
scrollOffsetTop: 0
};
// User defined options
for (i in options) that.options[i] = options[i];
// Normalize options
that.options.useTransform = hasTransform ? that.options.useTransform : false;
that.options.hScrollbar = that.options.hScroll && that.options.hScrollbar;
that.options.vScrollbar = that.options.vScroll && that.options.vScrollbar;
that.options.zoom = that.options.useTransform && that.options.zoom;
that.options.useTransition = hasTransitionEnd && that.options.useTransition;
// Set some default styles
that.scroller.style[vendor + 'TransitionProperty'] = that.options.useTransform ? '-' + vendor.toLowerCase() + '-transform' : 'top left';
that.scroller.style[vendor + 'TransitionDuration'] = '0';
that.scroller.style[vendor + 'TransformOrigin'] = '0 0';
if (that.options.useTransition) that.scroller.style[vendor + 'TransitionTimingFunction'] = 'cubic-bezier(0.33,0.66,0.66,1)';
if (that.options.useTransform) that.scroller.style[vendor + 'Transform'] = trnOpen + '0,0' + trnClose;
else that.scroller.style.cssText += ';position:absolute;top:0;left:0';
if (that.options.useTransition) that.options.fixedScrollbar = true;<|fim▁hole|> that._bind(START_EV);
if (!hasTouch) {
that._bind('mouseout', that.wrapper);
that._bind(WHEEL_EV);
}
if (that.options.checkDOMChanges) that.checkDOMTime = setInterval(function () {
that._checkDOMChanges();
}, 500);
};
// Prototype
iScroll.prototype = {
enabled: true,
x: 0,
y: 0,
steps: [],
scale: 1,
currPageX: 0, currPageY: 0,
pagesX: [], pagesY: [],
aniTime: null,
wheelZoomCount: 0,
handleEvent: function (e) {
var that = this;
switch(e.type) {
case START_EV:
if (!hasTouch && e.button !== 0) return;
that._start(e);
break;
case MOVE_EV: that._move(e); break;
case END_EV:
case CANCEL_EV: that._end(e); break;
case RESIZE_EV: that._resize(); break;
case WHEEL_EV: that._wheel(e); break;
case 'mouseout': that._mouseout(e); break;
case 'webkitTransitionEnd': that._transitionEnd(e); break;
}
},
_checkDOMChanges: function () {
if (this.moved || this.zoomed || this.animating ||
(this.scrollerW == this.scroller.offsetWidth * this.scale && this.scrollerH == this.scroller.offsetHeight * this.scale)) return;
this.refresh();
},
_scrollbar: function (dir) {
var that = this,
doc = document,
bar;
if (!that[dir + 'Scrollbar']) {
if (that[dir + 'ScrollbarWrapper']) {
if (hasTransform) that[dir + 'ScrollbarIndicator'].style[vendor + 'Transform'] = '';
that[dir + 'ScrollbarWrapper'].parentNode.removeChild(that[dir + 'ScrollbarWrapper']);
that[dir + 'ScrollbarWrapper'] = null;
that[dir + 'ScrollbarIndicator'] = null;
}
return;
}
if (!that[dir + 'ScrollbarWrapper']) {
// Create the scrollbar wrapper
bar = doc.createElement('div');
if (that.options.scrollbarClass) bar.className = that.options.scrollbarClass + dir.toUpperCase();
else bar.style.cssText = 'position:absolute;z-index:100;' + (dir == 'h' ? 'height:7px;bottom:1px;left:2px;right:' + (that.vScrollbar ? '7' : '2') + 'px' : 'width:7px;bottom:' + (that.hScrollbar ? '7' : '2') + 'px;top:2px;right:1px');
bar.style.cssText += ';pointer-events:none;-' + vendor + '-transition-property:opacity;-' + vendor + '-transition-duration:' + (that.options.fadeScrollbar ? '350ms' : '0') + ';overflow:hidden;opacity:' + (that.options.hideScrollbar ? '0' : '1');
that.wrapper.appendChild(bar);
that[dir + 'ScrollbarWrapper'] = bar;
// Create the scrollbar indicator
bar = doc.createElement('div');
if (!that.options.scrollbarClass) {
bar.style.cssText = 'position:absolute;z-index:100;background:rgba(0,0,0,0.5);border:1px solid rgba(255,255,255,0.9);-' + vendor + '-background-clip:padding-box;-' + vendor + '-box-sizing:border-box;' + (dir == 'h' ? 'height:100%' : 'width:100%') + ';-' + vendor + '-border-radius:3px;border-radius:3px';
}
bar.style.cssText += ';pointer-events:none;-' + vendor + '-transition-property:-' + vendor + '-transform;-' + vendor + '-transition-timing-function:cubic-bezier(0.33,0.66,0.66,1);-' + vendor + '-transition-duration:0;-' + vendor + '-transform:' + trnOpen + '0,0' + trnClose;
if (that.options.useTransition) bar.style.cssText += ';-' + vendor + '-transition-timing-function:cubic-bezier(0.33,0.66,0.66,1)';
that[dir + 'ScrollbarWrapper'].appendChild(bar);
that[dir + 'ScrollbarIndicator'] = bar;
}
if (dir == 'h') {
that.hScrollbarSize = that.hScrollbarWrapper.clientWidth;
that.hScrollbarIndicatorSize = m.max(m.round(that.hScrollbarSize * that.hScrollbarSize / that.scrollerW), 8);
that.hScrollbarIndicator.style.width = that.hScrollbarIndicatorSize + 'px';
that.hScrollbarMaxScroll = that.hScrollbarSize - that.hScrollbarIndicatorSize;
that.hScrollbarProp = that.hScrollbarMaxScroll / that.maxScrollX;
} else {
that.vScrollbarSize = that.vScrollbarWrapper.clientHeight;
that.vScrollbarIndicatorSize = m.max(m.round(that.vScrollbarSize * that.vScrollbarSize / that.scrollerH), 8);
that.vScrollbarIndicator.style.height = that.vScrollbarIndicatorSize + 'px';
that.vScrollbarMaxScroll = that.vScrollbarSize - that.vScrollbarIndicatorSize;
that.vScrollbarProp = that.vScrollbarMaxScroll / that.maxScrollY;
}
// Reset position
that._scrollbarPos(dir, true);
},
_resize: function () {
var that = this;
setTimeout(function () { that.refresh(); }, isAndroid ? 200 : 0);
},
_pos: function (x, y) {
x = this.hScroll ? x : 0;
y = this.vScroll ? y : 0;
if (this.options.useTransform) {
this.scroller.style[vendor + 'Transform'] = trnOpen + x + 'px,' + y + 'px' + trnClose + ' scale(' + this.scale + ')';
} else {
x = m.round(x);
y = m.round(y);
this.scroller.style.left = x + 'px';
this.scroller.style.top = y + 'px';
}
this.x = x;
this.y = y;
this._scrollbarPos('h');
this._scrollbarPos('v');
},
_scrollbarPos: function (dir, hidden) {
var that = this,
pos = dir == 'h' ? that.x : that.y,
size;
if (!that[dir + 'Scrollbar']) return;
pos = that[dir + 'ScrollbarProp'] * pos;
if (pos < 0) {
if (!that.options.fixedScrollbar) {
size = that[dir + 'ScrollbarIndicatorSize'] + m.round(pos * 3);
if (size < 8) size = 8;
that[dir + 'ScrollbarIndicator'].style[dir == 'h' ? 'width' : 'height'] = size + 'px';
}
pos = 0;
} else if (pos > that[dir + 'ScrollbarMaxScroll']) {
if (!that.options.fixedScrollbar) {
size = that[dir + 'ScrollbarIndicatorSize'] - m.round((pos - that[dir + 'ScrollbarMaxScroll']) * 3);
if (size < 8) size = 8;
that[dir + 'ScrollbarIndicator'].style[dir == 'h' ? 'width' : 'height'] = size + 'px';
pos = that[dir + 'ScrollbarMaxScroll'] + (that[dir + 'ScrollbarIndicatorSize'] - size);
} else {
pos = that[dir + 'ScrollbarMaxScroll'];
}
}
that[dir + 'ScrollbarWrapper'].style[vendor + 'TransitionDelay'] = '0';
that[dir + 'ScrollbarWrapper'].style.opacity = hidden && that.options.hideScrollbar ? '0' : '1';
that[dir + 'ScrollbarIndicator'].style[vendor + 'Transform'] = trnOpen + (dir == 'h' ? pos + 'px,0' : '0,' + pos + 'px') + trnClose;
},
_start: function (e) {
var that = this,
point = hasTouch ? e.touches[0] : e,
matrix, x, y,
c1, c2;
if (!that.enabled) return;
if (that.options.onBeforeScrollStart) that.options.onBeforeScrollStart.call(that, e);
if (that.options.useTransition || that.options.zoom) that._transitionTime(0);
that.moved = false;
that.animating = false;
that.zoomed = false;
that.distX = 0;
that.distY = 0;
that.absDistX = 0;
that.absDistY = 0;
that.dirX = 0;
that.dirY = 0;
// Gesture start
if (that.options.zoom && hasTouch && e.touches.length > 1) {
c1 = m.abs(e.touches[0].pageX-e.touches[1].pageX);
c2 = m.abs(e.touches[0].pageY-e.touches[1].pageY);
that.touchesDistStart = m.sqrt(c1 * c1 + c2 * c2);
that.originX = m.abs(e.touches[0].pageX + e.touches[1].pageX - that.wrapperOffsetLeft * 2) / 2 - that.x;
that.originY = m.abs(e.touches[0].pageY + e.touches[1].pageY - that.wrapperOffsetTop * 2) / 2 - that.y;
if (that.options.onZoomStart) that.options.onZoomStart.call(that, e);
}
if (that.options.momentum) {
if (that.options.useTransform) {
// Very lame general purpose alternative to CSSMatrix
matrix = getComputedStyle(that.scroller, null)[vendor + 'Transform'].replace(/[^0-9-.,]/g, '').split(',');
x = matrix[4] * 1;
y = matrix[5] * 1;
} else {
x = getComputedStyle(that.scroller, null).left.replace(/[^0-9-]/g, '') * 1;
y = getComputedStyle(that.scroller, null).top.replace(/[^0-9-]/g, '') * 1;
}
if (x != that.x || y != that.y) {
if (that.options.useTransition) that._unbind('webkitTransitionEnd');
else cancelFrame(that.aniTime);
that.steps = [];
that._pos(x, y);
}
}
that.absStartX = that.x; // Needed by snap threshold
that.absStartY = that.y;
that.startX = that.x;
that.startY = that.y;
that.pointX = point.pageX;
that.pointY = point.pageY;
that.startTime = e.timeStamp || (new Date()).getTime();
if (that.options.onScrollStart) that.options.onScrollStart.call(that, e);
that._bind(MOVE_EV);
that._bind(END_EV);
that._bind(CANCEL_EV);
},
_move: function (e) {
var that = this,
point = hasTouch ? e.touches[0] : e,
deltaX = point.pageX - that.pointX,
deltaY = point.pageY - that.pointY,
newX = that.x + deltaX,
newY = that.y + deltaY,
c1, c2, scale,
timestamp = e.timeStamp || (new Date()).getTime();
if (that.options.onBeforeScrollMove) that.options.onBeforeScrollMove.call(that, e);
// Zoom
if (that.options.zoom && hasTouch && e.touches.length > 1) {
c1 = m.abs(e.touches[0].pageX - e.touches[1].pageX);
c2 = m.abs(e.touches[0].pageY - e.touches[1].pageY);
that.touchesDist = m.sqrt(c1*c1+c2*c2);
that.zoomed = true;
scale = 1 / that.touchesDistStart * that.touchesDist * this.scale;
if (scale < that.options.zoomMin) scale = 0.5 * that.options.zoomMin * Math.pow(2.0, scale / that.options.zoomMin);
else if (scale > that.options.zoomMax) scale = 2.0 * that.options.zoomMax * Math.pow(0.5, that.options.zoomMax / scale);
that.lastScale = scale / this.scale;
newX = this.originX - this.originX * that.lastScale + this.x,
newY = this.originY - this.originY * that.lastScale + this.y;
this.scroller.style[vendor + 'Transform'] = trnOpen + newX + 'px,' + newY + 'px' + trnClose + ' scale(' + scale + ')';
if (that.options.onZoom) that.options.onZoom.call(that, e);
return;
}
that.pointX = point.pageX;
that.pointY = point.pageY;
// Slow down if outside of the boundaries
if (newX > 0 || newX < that.maxScrollX) {
newX = that.options.bounce ? that.x + (deltaX / 2) : newX >= 0 || that.maxScrollX >= 0 ? 0 : that.maxScrollX;
}
if (newY > that.minScrollY || newY < that.maxScrollY) {
newY = that.options.bounce ? that.y + (deltaY / 2) : newY >= that.minScrollY || that.maxScrollY >= 0 ? that.minScrollY : that.maxScrollY;
}
if (that.absDistX < 6 && that.absDistY < 6) {
that.distX += deltaX;
that.distY += deltaY;
that.absDistX = m.abs(that.distX);
that.absDistY = m.abs(that.distY);
return;
}
// Lock direction
if (that.options.lockDirection) {
if (that.absDistX > that.absDistY + 5) {
newY = that.y;
deltaY = 0;
} else if (that.absDistY > that.absDistX + 5) {
newX = that.x;
deltaX = 0;
}
}
that.moved = true;
that._pos(newX, newY);
that.dirX = deltaX > 0 ? -1 : deltaX < 0 ? 1 : 0;
that.dirY = deltaY > 0 ? -1 : deltaY < 0 ? 1 : 0;
if (timestamp - that.startTime > 300) {
that.startTime = timestamp;
that.startX = that.x;
that.startY = that.y;
}
if (that.options.onScrollMove) that.options.onScrollMove.call(that, e);
},
_end: function (e) {
if (hasTouch && e.touches.length != 0) return;
var that = this,
point = hasTouch ? e.changedTouches[0] : e,
target, ev,
momentumX = { dist:0, time:0 },
momentumY = { dist:0, time:0 },
duration = (e.timeStamp || (new Date()).getTime()) - that.startTime,
newPosX = that.x,
newPosY = that.y,
distX, distY,
newDuration,
snap,
scale;
that._unbind(MOVE_EV);
that._unbind(END_EV);
that._unbind(CANCEL_EV);
if (that.options.onBeforeScrollEnd) that.options.onBeforeScrollEnd.call(that, e);
if (that.zoomed) {
scale = that.scale * that.lastScale;
scale = Math.max(that.options.zoomMin, scale);
scale = Math.min(that.options.zoomMax, scale);
that.lastScale = scale / that.scale;
that.scale = scale;
that.x = that.originX - that.originX * that.lastScale + that.x;
that.y = that.originY - that.originY * that.lastScale + that.y;
that.scroller.style[vendor + 'TransitionDuration'] = '200ms';
that.scroller.style[vendor + 'Transform'] = trnOpen + that.x + 'px,' + that.y + 'px' + trnClose + ' scale(' + that.scale + ')';
that.zoomed = false;
that.refresh();
if (that.options.onZoomEnd) that.options.onZoomEnd.call(that, e);
return;
}
if (!that.moved) {
if (hasTouch) {
if (that.doubleTapTimer && that.options.zoom) {
// Double tapped
clearTimeout(that.doubleTapTimer);
that.doubleTapTimer = null;
if (that.options.onZoomStart) that.options.onZoomStart.call(that, e);
that.zoom(that.pointX, that.pointY, that.scale == 1 ? that.options.doubleTapZoom : 1);
if (that.options.onZoomEnd) {
setTimeout(function() {
that.options.onZoomEnd.call(that, e);
}, 200); // 200 is default zoom duration
}
} else {
that.doubleTapTimer = setTimeout(function () {
that.doubleTapTimer = null;
// Find the last touched element
target = point.target;
while (target.nodeType != 1) target = target.parentNode;
if (target.tagName != 'SELECT' && target.tagName != 'INPUT' && target.tagName != 'TEXTAREA') {
ev = document.createEvent('MouseEvents');
ev.initMouseEvent('click', true, true, e.view, 1,
point.screenX, point.screenY, point.clientX, point.clientY,
e.ctrlKey, e.altKey, e.shiftKey, e.metaKey,
0, null);
ev._fake = true;
target.dispatchEvent(ev);
}
}, that.options.zoom ? 250 : 0);
}
}
that._resetPos(200);
if (that.options.onTouchEnd) that.options.onTouchEnd.call(that, e);
return;
}
if (duration < 300 && that.options.momentum) {
momentumX = newPosX ? that._momentum(newPosX - that.startX, duration, -that.x, that.scrollerW - that.wrapperW + that.x, that.options.bounce ? that.wrapperW : 0) : momentumX;
momentumY = newPosY ? that._momentum(newPosY - that.startY, duration, -that.y, (that.maxScrollY < 0 ? that.scrollerH - that.wrapperH + that.y - that.minScrollY : 0), that.options.bounce ? that.wrapperH : 0) : momentumY;
newPosX = that.x + momentumX.dist;
newPosY = that.y + momentumY.dist;
if ((that.x > 0 && newPosX > 0) || (that.x < that.maxScrollX && newPosX < that.maxScrollX)) momentumX = { dist:0, time:0 };
if ((that.y > that.minScrollY && newPosY > that.minScrollY) || (that.y < that.maxScrollY && newPosY < that.maxScrollY)) momentumY = { dist:0, time:0 };
}
if (momentumX.dist || momentumY.dist) {
newDuration = m.max(m.max(momentumX.time, momentumY.time), 10);
// Do we need to snap?
if (that.options.snap) {
distX = newPosX - that.absStartX;
distY = newPosY - that.absStartY;
if (m.abs(distX) < that.options.snapThreshold && m.abs(distY) < that.options.snapThreshold) { that.scrollTo(that.absStartX, that.absStartY, 200); }
else {
snap = that._snap(newPosX, newPosY);
newPosX = snap.x;
newPosY = snap.y;
newDuration = m.max(snap.time, newDuration);
}
}
that.scrollTo(newPosX, newPosY, newDuration);
if (that.options.onTouchEnd) that.options.onTouchEnd.call(that, e);
return;
}
// Do we need to snap?
if (that.options.snap) {
distX = newPosX - that.absStartX;
distY = newPosY - that.absStartY;
if (m.abs(distX) < that.options.snapThreshold && m.abs(distY) < that.options.snapThreshold) that.scrollTo(that.absStartX, that.absStartY, 200);
else {
snap = that._snap(that.x, that.y);
if (snap.x != that.x || snap.y != that.y) that.scrollTo(snap.x, snap.y, snap.time);
}
if (that.options.onTouchEnd) that.options.onTouchEnd.call(that, e);
return;
}
that._resetPos(200);
if (that.options.onTouchEnd) that.options.onTouchEnd.call(that, e);
},
_resetPos: function (time) {
var that = this,
resetX = that.x >= 0 ? 0 : that.x < that.maxScrollX ? that.maxScrollX : that.x,
resetY = that.y >= that.minScrollY || that.maxScrollY > 0 ? that.minScrollY : that.y < that.maxScrollY ? that.maxScrollY : that.y;
if (resetX == that.x && resetY == that.y) {
if (that.moved) {
that.moved = false;
if (that.options.onScrollEnd) that.options.onScrollEnd.call(that); // Execute custom code on scroll end
}
/*
if (that.hScrollbar && that.options.hideScrollbar) {
if (vendor == 'webkit') that.hScrollbarWrapper.style[vendor + 'TransitionDelay'] = '300ms';
that.hScrollbarWrapper.style.opacity = '0';
}
if (that.vScrollbar && that.options.hideScrollbar) {
if (vendor == 'webkit') that.vScrollbarWrapper.style[vendor + 'TransitionDelay'] = '300ms';
that.vScrollbarWrapper.style.opacity = '0';
}
*/
return;
}
that.scrollTo(resetX, resetY, time || 0);
},
_wheel: function (e) {
var that = this,
wheelDeltaX, wheelDeltaY,
deltaX, deltaY,
deltaScale;
if ('wheelDeltaX' in e) {
wheelDeltaX = e.wheelDeltaX / 12;
wheelDeltaY = e.wheelDeltaY / 12;
} else if ('detail' in e) {
wheelDeltaX = wheelDeltaY = -e.detail * 3;
} else {
wheelDeltaX = wheelDeltaY = -e.wheelDelta;
}
if (that.options.wheelAction == 'zoom') {
deltaScale = that.scale * Math.pow(2, 1/3 * (wheelDeltaY ? wheelDeltaY / Math.abs(wheelDeltaY) : 0));
if (deltaScale < that.options.zoomMin) deltaScale = that.options.zoomMin;
if (deltaScale > that.options.zoomMax) deltaScale = that.options.zoomMax;
if (deltaScale != that.scale) {
if (!that.wheelZoomCount && that.options.onZoomStart) that.options.onZoomStart.call(that, e);
that.wheelZoomCount++;
that.zoom(e.pageX, e.pageY, deltaScale, 400);
setTimeout(function() {
that.wheelZoomCount--;
if (!that.wheelZoomCount && that.options.onZoomEnd) that.options.onZoomEnd.call(that, e);
}, 400);
}
return;
}
deltaX = that.x + wheelDeltaX;
deltaY = that.y + wheelDeltaY;
if (deltaX > 0) deltaX = 0;
else if (deltaX < that.maxScrollX) deltaX = that.maxScrollX;
if (deltaY > that.minScrollY) deltaY = that.minScrollY;
else if (deltaY < that.maxScrollY) deltaY = that.maxScrollY;
that.scrollTo(deltaX, deltaY, 0);
},
_mouseout: function (e) {
var t = e.relatedTarget;
if (!t) {
this._end(e);
return;
}
while (t = t.parentNode) if (t == this.wrapper) return;
this._end(e);
},
_transitionEnd: function (e) {
var that = this;
if (e.target != that.scroller) return;
that._unbind('webkitTransitionEnd');
that._startAni();
},
/**
*
* Utilities
*
*/
_startAni: function () {
var that = this,
startX = that.x, startY = that.y,
startTime = (new Date).getTime(),
step, easeOut;
if (that.animating) return;
if (!that.steps.length) {
that._resetPos(400);
return;
}
step = that.steps.shift();
if (step.x == startX && step.y == startY) step.time = 0;
that.animating = true;
that.moved = true;
if (that.options.useTransition) {
that._transitionTime(step.time);
that._pos(step.x, step.y);
that.animating = false;
if (step.time) that._bind('webkitTransitionEnd');
else that._resetPos(0);
return;
}
(function animate () {
var now = (new Date).getTime(),
newX, newY;
if (now >= startTime + step.time) {
that._pos(step.x, step.y);
that.animating = false;
if (that.options.onAnimationEnd) that.options.onAnimationEnd.call(that); // Execute custom code on animation end
that._startAni();
return;
}
now = (now - startTime) / step.time - 1;
easeOut = m.sqrt(1 - now * now);
newX = (step.x - startX) * easeOut + startX;
newY = (step.y - startY) * easeOut + startY;
that._pos(newX, newY);
if (that.animating) that.aniTime = nextFrame(animate);
})();
},
_transitionTime: function (time) {
time += 'ms';
this.scroller.style[vendor + 'TransitionDuration'] = time;
if (this.hScrollbar) this.hScrollbarIndicator.style[vendor + 'TransitionDuration'] = time;
if (this.vScrollbar) this.vScrollbarIndicator.style[vendor + 'TransitionDuration'] = time;
},
_momentum: function (dist, time, maxDistUpper, maxDistLower, size) {
var deceleration = 0.0006,
speed = m.abs(dist) / time,
newDist = (speed * speed) / (2 * deceleration),
newTime = 0, outsideDist = 0;
// Proportinally reduce speed if we are outside of the boundaries
if (dist > 0 && newDist > maxDistUpper) {
outsideDist = size / (6 / (newDist / speed * deceleration));
maxDistUpper = maxDistUpper + outsideDist;
speed = speed * maxDistUpper / newDist;
newDist = maxDistUpper;
} else if (dist < 0 && newDist > maxDistLower) {
outsideDist = size / (6 / (newDist / speed * deceleration));
maxDistLower = maxDistLower + outsideDist;
speed = speed * maxDistLower / newDist;
newDist = maxDistLower;
}
newDist = newDist * (dist < 0 ? -1 : 1);
newTime = speed / deceleration;
return { dist: newDist, time: m.round(newTime) };
},
_offset: function (el) {
var left = -el.offsetLeft,
top = -el.offsetTop;
while (el = el.offsetParent) {
left -= el.offsetLeft;
top -= el.offsetTop;
}
if (el != this.wrapper) {
left *= this.scale;
top *= this.scale;
}
return { left: left, top: top };
},
_snap: function (x, y) {
var that = this,
i, l,
page, time,
sizeX, sizeY;
// Check page X
page = that.pagesX.length - 1;
for (i=0, l=that.pagesX.length; i<l; i++) {
if (x >= that.pagesX[i]) {
page = i;
break;
}
}
if (page == that.currPageX && page > 0 && that.dirX < 0) page--;
x = that.pagesX[page];
sizeX = m.abs(x - that.pagesX[that.currPageX]);
sizeX = sizeX ? m.abs(that.x - x) / sizeX * 500 : 0;
that.currPageX = page;
// Check page Y
page = that.pagesY.length-1;
for (i=0; i<page; i++) {
if (y >= that.pagesY[i]) {
page = i;
break;
}
}
if (page == that.currPageY && page > 0 && that.dirY < 0) page--;
y = that.pagesY[page];
sizeY = m.abs(y - that.pagesY[that.currPageY]);
sizeY = sizeY ? m.abs(that.y - y) / sizeY * 500 : 0;
that.currPageY = page;
// Snap with constant speed (proportional duration)
time = m.round(m.max(sizeX, sizeY)) || 200;
return { x: x, y: y, time: time };
},
_bind: function (type, el, bubble) {
(el || this.scroller).addEventListener(type, this, !!bubble);
},
_unbind: function (type, el, bubble) {
(el || this.scroller).removeEventListener(type, this, !!bubble);
},
/**
*
* Public methods
*
*/
destroy: function () {
var that = this;
that.scroller.style[vendor + 'Transform'] = '';
// Remove the scrollbars
that.hScrollbar = false;
that.vScrollbar = false;
that._scrollbar('h');
that._scrollbar('v');
// Remove the event listeners
that._unbind(RESIZE_EV, window);
that._unbind(START_EV);
that._unbind(MOVE_EV);
that._unbind(END_EV);
that._unbind(CANCEL_EV);
if (that.options.hasTouch) {
that._unbind('mouseout', that.wrapper);
that._unbind(WHEEL_EV);
}
if (that.options.useTransition) that._unbind('webkitTransitionEnd');
if (that.options.checkDOMChanges) clearInterval(that.checkDOMTime);
if (that.options.onDestroy) that.options.onDestroy.call(that);
},
refresh: function () {
var that = this,
offset,
i, l,
els,
pos = 0,
page = 0;
if (that.scale < that.options.zoomMin) that.scale = that.options.zoomMin;
that.wrapperW = that.wrapper.clientWidth || 1;
that.wrapperH = that.wrapper.clientHeight || 1;
that.minScrollY = -that.options.topOffset || 0;
that.scrollerW = m.round(that.scroller.offsetWidth * that.scale);
that.scrollerH = m.round((that.scroller.offsetHeight + that.minScrollY) * that.scale);
that.maxScrollX = that.wrapperW - that.scrollerW;
that.maxScrollY = that.wrapperH - that.scrollerH + that.minScrollY;
that.dirX = 0;
that.dirY = 0;
if (that.options.onRefresh) that.options.onRefresh.call(that);
that.hScroll = that.options.hScroll && that.maxScrollX < 0;
that.vScroll = that.options.vScroll && (!that.options.bounceLock && !that.hScroll || that.scrollerH > that.wrapperH);
that.hScrollbar = that.hScroll && that.options.hScrollbar;
that.vScrollbar = that.vScroll && that.options.vScrollbar && that.scrollerH > that.wrapperH;
offset = that._offset(that.wrapper);
that.wrapperOffsetLeft = -offset.left;
that.wrapperOffsetTop = -offset.top;
// Prepare snap
if (typeof that.options.snap == 'string') {
that.pagesX = [];
that.pagesY = [];
els = that.scroller.querySelectorAll(that.options.snap);
for (i=0, l=els.length; i<l; i++) {
pos = that._offset(els[i]);
pos.left += that.wrapperOffsetLeft;
pos.top += that.wrapperOffsetTop;
that.pagesX[i] = pos.left < that.maxScrollX ? that.maxScrollX : pos.left * that.scale;
that.pagesY[i] = pos.top < that.maxScrollY ? that.maxScrollY : pos.top * that.scale;
}
} else if (that.options.snap) {
that.pagesX = [];
while (pos >= that.maxScrollX) {
that.pagesX[page] = pos;
pos = pos - that.wrapperW;
page++;
}
if (that.maxScrollX%that.wrapperW) that.pagesX[that.pagesX.length] = that.maxScrollX - that.pagesX[that.pagesX.length-1] + that.pagesX[that.pagesX.length-1];
pos = 0;
page = 0;
that.pagesY = [];
while (pos >= that.maxScrollY) {
that.pagesY[page] = pos;
pos = pos - that.wrapperH;
page++;
}
if (that.maxScrollY%that.wrapperH) that.pagesY[that.pagesY.length] = that.maxScrollY - that.pagesY[that.pagesY.length-1] + that.pagesY[that.pagesY.length-1];
}
// Prepare the scrollbars
that._scrollbar('h');
that._scrollbar('v');
if (!that.zoomed) {
that.scroller.style[vendor + 'TransitionDuration'] = '0';
that._resetPos(200);
}
},
scrollTo: function (x, y, time, relative) {
var that = this,
step = x,
i, l;
that.stop();
if (!step.length) step = [{ x: x, y: y, time: time, relative: relative }];
for (i=0, l=step.length; i<l; i++) {
if (step[i].relative) { step[i].x = that.x - step[i].x; step[i].y = that.y - step[i].y; }
that.steps.push({ x: step[i].x, y: step[i].y, time: step[i].time || 0 });
}
that._startAni();
},
scrollToElement: function (el, time) {
var that = this, pos;
el = el.nodeType ? el : that.scroller.querySelector(el);
if (!el) return;
pos = that._offset(el);
pos.left += that.wrapperOffsetLeft;
pos.top += that.wrapperOffsetTop;
pos.left = pos.left > 0 ? 0 : pos.left < that.maxScrollX ? that.maxScrollX : pos.left;
pos.top = pos.top > that.minScrollY ? that.minScrollY : pos.top < that.maxScrollY ? that.maxScrollY : pos.top;
time = time === undefined ? m.max(m.abs(pos.left)*2, m.abs(pos.top)*2) : time;
// Added for scroll offset by Lissa
pos.left -= that.options.scrollOffsetLeft;
pos.top -= that.options.scrollOffsetTop;
that.scrollTo(pos.left, pos.top, time);
},
scrollToPage: function (pageX, pageY, time) {
var that = this, x, y;
if (that.options.snap) {
pageX = pageX == 'next' ? that.currPageX+1 : pageX == 'prev' ? that.currPageX-1 : pageX;
pageY = pageY == 'next' ? that.currPageY+1 : pageY == 'prev' ? that.currPageY-1 : pageY;
pageX = pageX < 0 ? 0 : pageX > that.pagesX.length-1 ? that.pagesX.length-1 : pageX;
pageY = pageY < 0 ? 0 : pageY > that.pagesY.length-1 ? that.pagesY.length-1 : pageY;
that.currPageX = pageX;
that.currPageY = pageY;
x = that.pagesX[pageX];
y = that.pagesY[pageY];
} else {
x = -that.wrapperW * pageX;
y = -that.wrapperH * pageY;
if (x < that.maxScrollX) x = that.maxScrollX;
if (y < that.maxScrollY) y = that.maxScrollY;
}
that.scrollTo(x, y, time || 400);
},
disable: function () {
this.stop();
this._resetPos(0);
this.enabled = false;
// If disabled after touchstart we make sure that there are no left over events
this._unbind(MOVE_EV);
this._unbind(END_EV);
this._unbind(CANCEL_EV);
},
enable: function () {
this.enabled = true;
},
stop: function () {
if (this.options.useTransition) this._unbind('webkitTransitionEnd');
else cancelFrame(this.aniTime);
this.steps = [];
this.moved = false;
this.animating = false;
},
zoom: function (x, y, scale, time) {
var that = this,
relScale = scale / that.scale;
if (!that.options.useTransform) return;
that.zoomed = true;
time = time === undefined ? 200 : time;
x = x - that.wrapperOffsetLeft - that.x;
y = y - that.wrapperOffsetTop - that.y;
that.x = x - x * relScale + that.x;
that.y = y - y * relScale + that.y;
that.scale = scale;
that.refresh();
that.x = that.x > 0 ? 0 : that.x < that.maxScrollX ? that.maxScrollX : that.x;
that.y = that.y > that.minScrollY ? that.minScrollY : that.y < that.maxScrollY ? that.maxScrollY : that.y;
that.scroller.style[vendor + 'TransitionDuration'] = time + 'ms';
that.scroller.style[vendor + 'Transform'] = trnOpen + that.x + 'px,' + that.y + 'px' + trnClose + ' scale(' + scale + ')';
that.zoomed = false;
},
isReady: function () {
return !this.moved && !this.zoomed && !this.animating;
}
};
if (typeof exports !== 'undefined') exports.iScroll = iScroll;
else window.iScroll = iScroll;
})();<|fim▁end|> |
that.refresh();
that._bind(RESIZE_EV, window); |
<|file_name|>uart.js<|end_file_name|><|fim▁begin|>'use strict';
var fs = require('fs'),
util = require('util'),
Duplexify = require('duplexify'),
_ = require('lodash'),
su = require('bindings')('serialutil.node'),
fsu = require('./fsutil'),
pins = require('./pins'),
Dto = require('./dto'),
dto = new Dto(__dirname + '/../templates/uart.dts');
var DEFAULT_OPTIONS;
function onopen(uart, options) {
if (uart._rxfd !== -1 && uart._txfd !== -1) {
su.setRawMode(uart._rxfd);
uart.baudRate(options.baudRate);
uart.characterSize(options.characterSize);
uart.parity(options.parity);
uart.stopBits(options.stopBits);
setImmediate(function () {
uart.emit('open');
uart.emit('ready');
});
}
}
function onclose(uart) {<|fim▁hole|> });
}
}
function createStreams(uart, options) {
uart._rxfd = -1;
uart._txfd = -1;
uart._rxstream = fs.createReadStream(uart.devPath, {
highWaterMark: options.highWaterMark,
encoding: options.encoding
});
uart._txstream = fs.createWriteStream(uart.devPath, {
highWaterMark: options.highWaterMark,
encoding: options.encoding,
flags: 'r+'
});
uart._rxstream.once('open', function (rxfd) {
uart._rxfd = rxfd;
onopen(uart, options);
});
uart._txstream.once('open', function (txfd) {
uart._txfd = txfd;
onopen(uart, options);
});
uart._rxstream.once('close', function () {
uart._rxfd = -1;
onclose(uart);
});
uart._txstream.once('close', function () {
uart._txfd = -1;
onclose(uart);
});
// TODO - test error handling
uart.setReadable(uart._rxstream);
uart.setWritable(uart._txstream);
}
function waitForUart(uart, options) {
fsu.waitForFile(uart.devPath, function (err, devPath) {
if (err) {
return uart.emit('error', err);
}
createStreams(uart, options);
});
}
function Uart(uartDef, options) {
var badPin,
config;
if (!(this instanceof Uart)) {
return new Uart(uartDef);
}
options = options ? _.defaults(options, DEFAULT_OPTIONS) : DEFAULT_OPTIONS;
// Consider calling Duplexify with the allowHalfOpen option set to false.
// It's super-class (Duplex) will then ensure that this.end is called when
// the read stream fires the 'end' event. (see:
// https://github.com/joyent/node/blob/v0.10.25/lib/_stream_duplex.js)
Duplexify.call(this, null, null);
if (typeof uartDef === 'string') {
this.uartDef = null;
this.devPath = uartDef;
this.name = null;
waitForUart(this, options);
} else {
if (uartDef.txPin.uart === undefined) {
badPin = new Error(uartDef.txPin + ' doesn\'t support uarts');
} else if (uartDef.rxPin.uart === undefined) {
badPin = new Error(uartDef.rxPin + ' doesn\'t support uarts');
}
if (badPin) {
setImmediate(function () {
this.emit('error', badPin);
}.bind(this));
return;
}
this.uartDef = uartDef;
this.devPath = '/dev/ttyO' + uartDef.id;
this.name = 'bot_uart' + uartDef.id;
config = {
txHeader: this.uartDef.txPin.name.toUpperCase().replace('_', '.'),
rxHeader: this.uartDef.rxPin.name.toUpperCase().replace('_', '.'),
hardwareIp: 'uart' + this.uartDef.id,
name: this.name,
rxMuxOffset: '0x' + this.uartDef.rxPin.muxOffset.toString(16),
rxMuxValue: '0x' + this.uartDef.rxPin.uart.muxValue.toString(16),
txMuxOffset: '0x' + this.uartDef.txPin.muxOffset.toString(16),
txMuxValue: '0x' + this.uartDef.txPin.uart.muxValue.toString(16),
targetUart: 'uart' + (this.uartDef.id + 1),
partNumber: this.name
};
dto.install(config, function (err) {
if (err) {
return this.emit('error', err);
}
waitForUart(this, options);
}.bind(this));
}
}
module.exports = Uart;
util.inherits(Uart, Duplexify);
Uart.B0 = su.B0;
Uart.B50 = su.B50;
Uart.B75 = su.B75;
Uart.B110 = su.B110;
Uart.B134 = su.B134;
Uart.B150 = su.B150;
Uart.B200 = su.B200;
Uart.B300 = su.B300;
Uart.B600 = su.B600;
Uart.B1200 = su.B1200;
Uart.B1800 = su.B1800;
Uart.B2400 = su.B2400;
Uart.B4800 = su.B4800;
Uart.B9600 = su.B9600;
Uart.B19200 = su.B19200;
Uart.B38400 = su.B38400;
Uart.B57600 = su.B57600;
Uart.B115200 = su.B115200;
Uart.B230400 = su.B230400;
Uart.B460800 = su.B460800;
Uart.B500000 = su.B500000;
Uart.B576000 = su.B576000;
Uart.B921600 = su.B921600;
Uart.B1000000 = su.B1000000;
Uart.B1152000 = su.B1152000;
Uart.B1500000 = su.B1500000;
Uart.B2000000 = su.B2000000;
Uart.B2500000 = su.B2500000;
Uart.B3000000 = su.B3000000;
Uart.B3500000 = su.B3500000;
Uart.B4000000 = su.B4000000;
Uart.PARITY_NONE = su.PARITY_NONE;
Uart.PARITY_ODD = su.PARITY_ODD;
Uart.PARITY_EVEN = su.PARITY_EVEN;
Uart.UART1 = {
id: 1,
txPin: pins.p9_24,
rxPin: pins.p9_26
};
Uart.UART2 = {
id: 2,
txPin: pins.p9_21,
rxPin: pins.p9_22
};
Uart.UART4 = {
id: 4,
txPin: pins.p9_13,
rxPin: pins.p9_11
};
DEFAULT_OPTIONS = {
baudRate: Uart.B38400,
characterSize: 8,
parity: Uart.PARITY_NONE,
stopBits: 1,
highWaterMark: 512,
encoding: null
};
Object.freeze(DEFAULT_OPTIONS);
Uart.prototype.baudRate = function (rate) {
if (rate === undefined) {
return su.getBaudRate(this._rxfd);
}
su.setBaudRate(this._rxfd, rate);
};
Uart.prototype.characterSize = function (size) {
if (size === undefined) {
return su.getCharacterSize(this._rxfd);
}
su.setCharacterSize(this._rxfd, size);
};
Uart.prototype.parity = function (type) {
if (type === undefined) {
return su.getParity(this._rxfd);
}
su.setParity(this._rxfd, type);
};
Uart.prototype.stopBits = function (count) {
if (count === undefined) {
return su.getStopBits(this._rxfd);
}
su.setStopBits(this._rxfd, count);
};
Uart.prototype.close = function () {
this.removeAllListeners('data'); // Is this a good idea? Should the user be doing this?
// TODO: the following is a bit of a hack.
// Here \n EOF is faked for this._rxfd inorder to close the read stream.
// It's faked three times as the uart may receive a character between
// \n and EOF and the stream will not be closed. Faking three times
// increases the chances of it working!
su.setCanonical(this._rxfd, true);
su.fakeInput(this._rxfd, '\n'.charCodeAt(0));
su.fakeInput(this._rxfd, 4); // fake eof
su.fakeInput(this._rxfd, '\n'.charCodeAt(0));
su.fakeInput(this._rxfd, 4); // fake eof
su.fakeInput(this._rxfd, '\n'.charCodeAt(0));
su.fakeInput(this._rxfd, 4); // fake eof
};<|fim▁end|> | if (uart._rxfd === -1 && uart._txfd === -1) {
setImmediate(function () {
uart.emit('close'); |
<|file_name|>test_universe_create.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
"""
This pretty much just tests creating a user, a universe, a planet, a building type name, a building
type, and a building.
"""
import os
import sys
import sqlalchemy
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
import legendary_waffle
# Database setup
db_engine = sqlalchemy.create_engine("sqlite://")
legendary_waffle.models.MODELBASE.metadata.create_all(db_engine)
legendary_waffle.models.MODELBASE.metadata.bind = db_engine
db_session = sqlalchemy.orm.sessionmaker(bind=db_engine)
db = db_session()
# Create the user
legendary_waffle.model_create(db, legendary_waffle.models.User, name='sk4ly')
print "Users: {}".format(legendary_waffle.model_read(db, legendary_waffle.models.User))
# Create the universe
universe_config = {
"name": 'poopiverse',
"map_size": 1000,
"max_planets": 1000,
"max_players": 10
}
legendary_waffle.model_create(db, legendary_waffle.models.Universe, **universe_config)
print "Universe: {}".format(legendary_waffle.model_read(db, legendary_waffle.models.Universe))
# Create the planet
planet_config = {
"universe": 1, # The pkid of the universe 'poopiverse'
"coordinate_x": 1,
"coordinate_y": 1,
"name": 'bloth',
"habitable": True,
"player_control": 1, # The pkid of user 'sk4ly'
"default_condition": 1000,
"default_resources": 1000,
"current_condition": 1000,
"current_resources": 1000
}
legendary_waffle.model_create(db, legendary_waffle.models.Planet, **planet_config)
print "Planet: {}".format(legendary_waffle.model_read(db, legendary_waffle.models.Planet))
# Create building type name
legendary_waffle.model_create(db, legendary_waffle.models.BuildingTypeName, name="Control Center")
print "Building Type Name: {}".format(legendary_waffle.model_read(db, legendary_waffle.models.BuildingTypeName))
# Create building type
building_type_config = {
"typename": 1, # The pkid of the building type name 'Control Center'
"description": "This is the control center",
"default_condition": 100,
"default_firepower": 0,
"default_storage": 100,
"rhr_passive": 0,<|fim▁hole|>}
legendary_waffle.model_create(db, legendary_waffle.models.BuildingType, **building_type_config)
print "Building Type: {}".format(legendary_waffle.model_read(db, legendary_waffle.models.BuildingType))
# Now create our new building
building_config = {
"building_type": 1, # The pkid of the building type with the name 'Control Center'
"universe": 1, # The pkid of the universe 'poopiverse'
"planet": 1, # The pkid of the planet 'bloth'
"player_control": 1, # The pkid of the user 'sk4ly'
}
legendary_waffle.model_create(db, legendary_waffle.models.Building, **building_config)
print "Building: {}".format(legendary_waffle.model_read(db, legendary_waffle.models.Building))<|fim▁end|> | "rhr_active": 0,
"rhr_destructive": 0,
"build_resource_reqs": 500, |
<|file_name|>issue-18532.rs<|end_file_name|><|fim▁begin|>// Test that overloaded call parameter checking does not ICE
// when a type error or unconstrained type variable propagates
// into it.
fn main() {
(return)((),()); //~ ERROR expected function, found `!`<|fim▁hole|>}<|fim▁end|> | |
<|file_name|>_text.py<|end_file_name|><|fim▁begin|># This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# Copyright (c), Toshio Kuratomi <[email protected]>, 2016
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
"""
.. warn:: This module_util is currently internal implementation.
We want to evaluate this code for stability and API suitability before
making backwards compatibility guarantees. The API may change between
releases. Do not use this unless you are willing to port your module code.
"""
import codecs
from ansible.module_utils.six import PY3, text_type, binary_type
try:
codecs.lookup_error('surrogateescape')
HAS_SURROGATEESCAPE = True
except LookupError:
HAS_SURROGATEESCAPE = False
_COMPOSED_ERROR_HANDLERS = frozenset((None, 'surrogate_or_escape',
'surrogate_or_strict',<|fim▁hole|>
def to_bytes(obj, encoding='utf-8', errors=None, nonstring='simplerepr'):
"""Make sure that a string is a byte string
:arg obj: An object to make sure is a byte string. In most cases this
will be either a text string or a byte string. However, with
``nonstring='simplerepr'``, this can be used as a traceback-free
version of ``str(obj)``.
:kwarg encoding: The encoding to use to transform from a text string to
a byte string. Defaults to using 'utf-8'.
:kwarg errors: The error handler to use if the text string is not
encodable using the specified encoding. Any valid `codecs error
handler <https://docs.python.org/2/library/codecs.html#codec-base-classes>`_
may be specified. There are three additional error strategies
specifically aimed at helping people to port code. The first two are:
:surrogate_or_strict: Will use ``surrogateescape`` if it is a valid
handler, otherwise it will use ``strict``
:surrogate_or_replace: Will use ``surrogateescape`` if it is a valid
handler, otherwise it will use ``replace``.
Because ``surrogateescape`` was added in Python3 this usually means that
Python3 will use ``surrogateescape`` and Python2 will use the fallback
error handler. Note that the code checks for ``surrogateescape`` when the
module is imported. If you have a backport of ``surrogateescape`` for
Python2, be sure to register the error handler prior to importing this
module.
The last error handler is:
:surrogate_then_replace: Will use ``surrogateescape`` if it is a valid
handler. If encoding with ``surrogateescape`` would traceback,
surrogates are first replaced with a replacement characters
and then the string is encoded using ``replace`` (which replaces
the rest of the nonencodable bytes). If ``surrogateescape`` is
not present it will simply use ``replace``. (Added in Ansible 2.3)
This strategy is designed to never traceback when it attempts
to encode a string.
The default until Ansible-2.2 was ``surrogate_or_replace``
From Ansible-2.3 onwards, the default is ``surrogate_then_replace``.
:kwarg nonstring: The strategy to use if a nonstring is specified in
``obj``. Default is 'simplerepr'. Valid values are:
:simplerepr: The default. This takes the ``str`` of the object and
then returns the bytes version of that string.
:empty: Return an empty byte string
:passthru: Return the object passed in
:strict: Raise a :exc:`TypeError`
:returns: Typically this returns a byte string. If a nonstring object is
passed in this may be a different type depending on the strategy
specified by nonstring. This will never return a text string.
.. note:: If passed a byte string, this function does not check that the
string is valid in the specified encoding. If it's important that the
byte string is in the specified encoding do::
encoded_string = to_bytes(to_text(input_string, 'latin-1'), 'utf-8')
.. version_changed:: 2.3
Added the ``surrogate_then_replace`` error handler and made it the default error handler.
"""
if isinstance(obj, binary_type):
return obj
# We're given a text string
# If it has surrogates, we know because it will decode
original_errors = errors
if errors in _COMPOSED_ERROR_HANDLERS:
if HAS_SURROGATEESCAPE:
errors = 'surrogateescape'
elif errors == 'surrogate_or_strict':
errors = 'strict'
else:
errors = 'replace'
if isinstance(obj, text_type):
try:
# Try this first as it's the fastest
return obj.encode(encoding, errors)
except UnicodeEncodeError:
if original_errors in (None, 'surrogate_then_replace'):
# Slow but works
return_string = obj.encode('utf-8', 'surrogateescape')
return_string = return_string.decode('utf-8', 'replace')
return return_string.encode(encoding, 'replace')
raise
# Note: We do these last even though we have to call to_bytes again on the
# value because we're optimizing the common case
if nonstring == 'simplerepr':
try:
value = str(obj)
except UnicodeError:
try:
value = repr(obj)
except UnicodeError:
# Giving up
return to_bytes('')
elif nonstring == 'passthru':
return obj
elif nonstring == 'empty':
# python2.4 doesn't have b''
return to_bytes('')
elif nonstring == 'strict':
raise TypeError('obj must be a string type')
else:
raise TypeError('Invalid value %s for to_bytes\' nonstring parameter' % nonstring)
return to_bytes(value, encoding, errors)
def to_text(obj, encoding='utf-8', errors=None, nonstring='simplerepr'):
"""Make sure that a string is a text string
:arg obj: An object to make sure is a text string. In most cases this
will be either a text string or a byte string. However, with
``nonstring='simplerepr'``, this can be used as a traceback-free
version of ``str(obj)``.
:kwarg encoding: The encoding to use to transform from a byte string to
a text string. Defaults to using 'utf-8'.
:kwarg errors: The error handler to use if the byte string is not
decodable using the specified encoding. Any valid `codecs error
handler <https://docs.python.org/2/library/codecs.html#codec-base-classes>`_
may be specified. We support three additional error strategies
specifically aimed at helping people to port code:
:surrogate_or_strict: Will use surrogateescape if it is a valid
handler, otherwise it will use strict
:surrogate_or_replace: Will use surrogateescape if it is a valid
handler, otherwise it will use replace.
:surrogate_then_replace: Does the same as surrogate_or_replace but
`was added for symmetry with the error handlers in
:func:`ansible.module_utils._text.to_bytes` (Added in Ansible 2.3)
Because surrogateescape was added in Python3 this usually means that
Python3 will use `surrogateescape` and Python2 will use the fallback
error handler. Note that the code checks for surrogateescape when the
module is imported. If you have a backport of `surrogateescape` for
python2, be sure to register the error handler prior to importing this
module.
The default until Ansible-2.2 was `surrogate_or_replace`
In Ansible-2.3 this defaults to `surrogate_then_replace` for symmetry
with :func:`ansible.module_utils._text.to_bytes` .
:kwarg nonstring: The strategy to use if a nonstring is specified in
``obj``. Default is 'simplerepr'. Valid values are:
:simplerepr: The default. This takes the ``str`` of the object and
then returns the text version of that string.
:empty: Return an empty text string
:passthru: Return the object passed in
:strict: Raise a :exc:`TypeError`
:returns: Typically this returns a text string. If a nonstring object is
passed in this may be a different type depending on the strategy
specified by nonstring. This will never return a byte string.
From Ansible-2.3 onwards, the default is `surrogate_then_replace`.
.. version_changed:: 2.3
Added the surrogate_then_replace error handler and made it the default error handler.
"""
if isinstance(obj, text_type):
return obj
if errors in _COMPOSED_ERROR_HANDLERS:
if HAS_SURROGATEESCAPE:
errors = 'surrogateescape'
elif errors == 'surrogate_or_strict':
errors = 'strict'
else:
errors = 'replace'
if isinstance(obj, binary_type):
# Note: We don't need special handling for surrogate_then_replace
# because all bytes will either be made into surrogates or are valid
# to decode.
return obj.decode(encoding, errors)
# Note: We do these last even though we have to call to_text again on the
# value because we're optimizing the common case
if nonstring == 'simplerepr':
try:
value = str(obj)
except UnicodeError:
try:
value = repr(obj)
except UnicodeError:
# Giving up
return u''
elif nonstring == 'passthru':
return obj
elif nonstring == 'empty':
return u''
elif nonstring == 'strict':
raise TypeError('obj must be a string type')
else:
raise TypeError('Invalid value %s for to_text\'s nonstring parameter' % nonstring)
return to_text(value, encoding, errors)
#: :py:func:`to_native`
#: Transform a variable into the native str type for the python version
#:
#: On Python2, this is an alias for
#: :func:`~ansible.module_utils.to_bytes`. On Python3 it is an alias for
#: :func:`~ansible.module_utils.to_text`. It makes it easier to
#: transform a variable into the native str type for the python version
#: the code is running on. Use this when constructing the message to
#: send to exceptions or when dealing with an API that needs to take
#: a native string. Example::
#:
#: try:
#: 1//0
#: except ZeroDivisionError as e:
#: raise MyException('Encountered and error: %s' % to_native(e))
if PY3:
to_native = to_text
else:
to_native = to_bytes<|fim▁end|> | 'surrogate_then_replace'))
|
<|file_name|>page.ts<|end_file_name|><|fim▁begin|>/**
* Created by d.d on 25/07/2017.
*/
export interface ITitle {
image: string;
title: string;
subtitle?: string;<|fim▁hole|><|fim▁end|> | } |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>extern crate chrono;
extern crate csv;
#[macro_use]
extern crate lazy_static;
extern crate regex;
extern crate safe_unwrap;
extern crate term_painter;
extern crate term_size;
extern crate textwrap;
extern crate try_from;
pub mod err;
mod regexes;
use chrono::{NaiveDate, NaiveDateTime, NaiveTime};
use err::{ColumnConstraintsError, ColumnTypeError, ErrorLoc, ErrorAtLocation, Location, ResultLoc,
SchemaLoadError, ValidationError, ValueError};
use std::{fmt, fs, path, slice};
use std::io::Read;
use safe_unwrap::SafeUnwrap;
use regexes::{IDENT_UNDERSCORE_RE, ENUM_EXPR_RE, CONSTRAINT_RE, DECIMAL_RE, DATE_RE, DATETIME_RE,
FN_RE, TIME_RE};
use try_from::TryFrom;
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct CsvxMetadata {
pub table_name: String,
pub date: NaiveDate,
pub schema: String,
}
impl CsvxMetadata {
pub fn is_schema(&self) -> bool {
self.schema.starts_with("csvx-schema-")
}
}
#[derive(Clone, Debug)]
pub enum ColumnType {
String,
Bool,
Integer,
Enum(Vec<String>),
Decimal,
Date,
DateTime,
Time,
}
impl fmt::Display for ColumnType {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
ColumnType::String => write!(f, "STRING"),
ColumnType::Bool => write!(f, "BOOL"),
ColumnType::Integer => write!(f, "INTEGER"),
ColumnType::Enum(ref variants) => write!(f, "ENUM({})", variants.join(",")),
ColumnType::Decimal => write!(f, "DECIMAL"),
ColumnType::Date => write!(f, "DATE"),
ColumnType::DateTime => write!(f, "DATETIME"),
ColumnType::Time => write!(f, "TIME"),
}
}
}
#[derive(Clone, Debug)]
pub struct ColumnConstraints {
pub nullable: bool,
pub unique: bool,
}
impl Default for ColumnConstraints {
fn default() -> ColumnConstraints {
ColumnConstraints {
nullable: false,
unique: false,
}
}
}
impl fmt::Display for ColumnConstraints {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut parts = Vec::new();
if self.nullable {
parts.push("NULLABLE");
}
if self.unique {
parts.push("UNIQUE");
}
write!(f, "{}", parts.join(","))
}
}
impl<S> TryFrom<S> for ColumnConstraints
where
S: AsRef<str>,
{
type Err = ColumnConstraintsError;
fn try_from(s: S) -> Result<ColumnConstraints, Self::Err> {
if !CONSTRAINT_RE.is_match(s.as_ref()) {
return Err(ColumnConstraintsError::MalformedConstraints(
s.as_ref().to_string(),
));
}
let mut ccs = ColumnConstraints::default();
if s.as_ref() == "" {
return Ok(ccs);
}
for fragment in s.as_ref().split(',') {
match fragment.as_ref() {
"NULLABLE" => {
ccs.nullable = true;
}
"UNIQUE" => {
ccs.unique = true;
}
_ => {
return Err(ColumnConstraintsError::UnknownConstraint(
s.as_ref().to_string(),
))
}
}
}
Ok(ccs)
}
}
impl<S> TryFrom<S> for ColumnType
where
S: AsRef<str>,
{
type Err = ColumnTypeError;
fn try_from(s: S) -> Result<ColumnType, Self::Err> {
match s.as_ref() {
"STRING" => Ok(ColumnType::String),
"BOOL" => Ok(ColumnType::Bool),
"INTEGER" => Ok(ColumnType::Integer),
"DECIMAL" => Ok(ColumnType::Decimal),
"DATE" => Ok(ColumnType::Date),
"DATETIME" => Ok(ColumnType::DateTime),
"TIME" => Ok(ColumnType::Time),
_ if ENUM_EXPR_RE.is_match(s.as_ref()) => {
let variants: Vec<_> = ENUM_EXPR_RE
.captures(s.as_ref())
.safe_unwrap("match already exists")
.get(1)
.safe_unwrap("group 1 exists in regex")
.as_str()
.split(',')
.map(|s| s.to_owned())
.collect();
Ok(ColumnType::Enum(variants))
}
_ => {
if s.as_ref().starts_with("ENUM") {
return Err(ColumnTypeError::BadEnum(s.as_ref().to_owned()));
}
return Err(ColumnTypeError::UnknownType(s.as_ref().to_owned()));
}
}
}
}
#[derive(Clone, Debug)]
pub struct CsvxColumnType {
pub id: String,
pub ty: ColumnType,<|fim▁hole|> pub constraints: ColumnConstraints,
pub description: String,
}
#[derive(Clone, Debug)]
pub enum Value {
String(String),
Bool(bool),
Integer(i64),
Enum(usize),
Decimal(String),
Date(NaiveDate),
DateTime(NaiveDateTime),
Time(NaiveTime),
}
impl Value {
pub fn to_string(self) -> Option<String> {
match self {
Value::String(s) => Some(s),
Value::Decimal(d) => Some(d),
_ => None,
}
}
pub fn to_bool(self) -> Option<bool> {
if let Value::Bool(val) = self {
Some(val)
} else {
None
}
}
pub fn to_i64(self) -> Option<i64> {
if let Value::Integer(val) = self {
Some(val)
} else {
None
}
}
pub fn to_date(self) -> Option<NaiveDate> {
if let Value::Date(val) = self {
Some(val)
} else {
None
}
}
pub fn to_datetime(self) -> Option<NaiveDateTime> {
if let Value::DateTime(val) = self {
Some(val)
} else {
None
}
}
pub fn to_time(self) -> Option<NaiveTime> {
if let Value::Time(val) = self {
Some(val)
} else {
None
}
}
pub fn to_usize(self) -> Option<usize> {
if let Value::Enum(v) = self {
Some(v)
} else {
None
}
}
}
impl CsvxColumnType {
pub fn validate_value<S: AsRef<str>>(&self, s: &S) -> Result<Option<Value>, ValueError> {
// FIXME: check UNIQUE
// null check
if s.as_ref() == "" {
if self.constraints.nullable {
return Ok(None);
} else {
return Err(ValueError::NonNullable);
}
}
match self.ty {
ColumnType::String => Ok(Some(Value::String(s.as_ref().to_string()))),
ColumnType::Bool => {
match s.as_ref() {
"TRUE" => Ok(Some(Value::Bool(true))),
"FALSE" => Ok(Some(Value::Bool(false))),
_ => Err(ValueError::InvalidBool(s.as_ref().to_owned())),
}
}
ColumnType::Integer => {
// FIXME: check for leading zeros
Ok(Some(Value::Integer(s.as_ref().parse().map_err(|_| {
ValueError::InvalidInt(s.as_ref().to_owned())
})?)))
}
ColumnType::Enum(ref variants) => {
let v = s.as_ref();
if let Some(p) = variants.iter().position(|e| e == v) {
Ok(Some(Value::Enum(p)))
} else {
Err(ValueError::InvalidEnum(
s.as_ref().to_owned(),
variants.clone(),
))
}
}
ColumnType::Decimal => {
if DECIMAL_RE.is_match(s.as_ref()) {
Ok(Some(Value::Decimal(s.as_ref().to_owned())))
} else {
Err(ValueError::InvalidDecimal(s.as_ref().to_owned()))
}
}
ColumnType::Date => {
match DATE_RE.captures(s.as_ref()) {
Some(ref c) => {
Ok(Some(Value::Date(
NaiveDate::from_ymd_opt(cap(c, 1), cap(c, 2), cap(c, 3))
.ok_or_else(|| ValueError::InvalidDate(s.as_ref().to_owned()))?,
)))
}
None => Err(ValueError::InvalidDate(s.as_ref().to_owned())),
}
}
ColumnType::DateTime => {
match DATETIME_RE.captures(s.as_ref()) {
Some(ref c) => {
let dt =
NaiveDate::from_ymd_opt(cap(c, 1), cap(c, 2), cap(c, 3))
.ok_or_else(|| ValueError::InvalidDate(s.as_ref().to_string()))?;
Ok(Some(Value::DateTime(
dt.and_hms_opt(cap(c, 4), cap(c, 5), cap(c, 6)).ok_or_else(
|| {
ValueError::InvalidTime(s.as_ref().to_string())
},
)?,
)))
}
None => Err(ValueError::InvalidDateTime(s.as_ref().to_string())),
}
}
ColumnType::Time => {
match TIME_RE.captures(s.as_ref()) {
Some(ref c) => {
Ok(Some(Value::Time(
NaiveTime::from_hms_opt(cap(c, 1), cap(c, 2), cap(c, 3))
.ok_or_else(|| ValueError::InvalidTime(s.as_ref().to_string()))?,
)))
}
None => Err(ValueError::InvalidTime(s.as_ref().to_string())),
}
}
}
}
}
#[derive(Clone, Debug)]
pub struct CsvxSchema {
columns: Vec<CsvxColumnType>,
}
impl CsvxSchema {
pub fn iter_columns(&self) -> slice::Iter<CsvxColumnType> {
self.columns.iter()
}
pub fn col_idx(&self, col: &str) -> Option<usize> {
self.columns.iter().position(|c| col == c.id)
}
pub fn from_file<P: AsRef<path::Path>>(
filename: P,
) -> Result<CsvxSchema, ErrorAtLocation<SchemaLoadError, Location>> {
// have a copy of the filename as a string ready for error locations
let filename_s: String = filename.as_ref().to_string_lossy().into_owned();
let mut file = fs::File::open(filename).err_at(|| {
Location::File(filename_s.clone())
})?;
let mut contents = String::new();
file.read_to_string(&mut contents).err_at(|| {
Location::File(filename_s.clone())
})?;
Self::from_string(contents.as_str(), filename_s.as_ref())
}
pub fn from_string(
src: &str,
filename: &str,
) -> Result<CsvxSchema, ErrorAtLocation<SchemaLoadError, Location>> {
// have a copy of the filename as a string ready for error locations
let filename_s = filename.to_string();
let mut rdr = csv::Reader::from_string(src).has_headers(false);
let mut it = rdr.decode();
let header: Option<Result<(String, String, String, String), _>> = it.next();
let mut columns = Vec::new();
match header {
None => {
return Err(SchemaLoadError::MissingHeader.at(Location::FileLine(
filename_s,
1,
)))
}
Some(res) => {
let fields = res.err_at(|| Location::File(filename_s.clone()))?;
if fields.0 != "id" || fields.1 != "type" || fields.2 != "constraints" ||
fields.3 != "description"
{
return Err(SchemaLoadError::BadHeader.at(
Location::FileLine(filename_s, 1),
));
}
for (recno, rec) in it.enumerate() {
let (id, ty, constraints, desc) =
rec.err_at(|| Location::FileLine(filename_s.clone(), 1))?;
let lineno = recno + 2;
// check identifier
if !IDENT_UNDERSCORE_RE.is_match(&id.as_str()) {
return Err(SchemaLoadError::BadIdentifier(id).at(
Location::FileLineField(
filename_s,
lineno,
1,
),
));
}
// create type
let col_type = match ColumnType::try_from(ty.as_str()) {
Ok(v) => v,
Err(e) => {
return Err(SchemaLoadError::BadType(e).at(Location::FileLineField(
filename_s,
lineno,
1,
)))
}
};
// create constraints
let col_constraints = match ColumnConstraints::try_from(constraints.as_str()) {
Ok(v) => v,
// FIXME: location
Err(e) => {
return Err(SchemaLoadError::BadConstraints(e).at(Location::FileLine(
filename_s,
lineno,
)))
}
};
let col = CsvxColumnType {
id: id,
ty: col_type,
constraints: col_constraints,
description: desc,
};
columns.push(col)
}
Ok(CsvxSchema { columns: columns })
}
}
}
pub fn validate_file<P: AsRef<path::Path>>(
&self,
filename: P,
) -> Result<(), Vec<ErrorAtLocation<ValidationError, Location>>> {
let filename_s = filename.as_ref().to_string_lossy().to_string();
let mut rdr = csv::Reader::from_file(filename)
.map_err(|e| vec![e.at(Location::File(filename_s.clone()))])?
.has_headers(true);
let headers = rdr.headers().map_err(|e| {
vec![e.at(Location::FileLine(filename_s.clone(), 1))]
})?;
if headers.len() != self.columns.len() {
return Err(vec![
ValidationError::MissingHeaders.at(Location::FileLine(
filename_s.clone(),
1,
)),
]);
}
let mut errs = Vec::new();
for (idx, (spec, actual)) in self.columns.iter().zip(headers.iter()).enumerate() {
if spec.id.as_str() != actual {
errs.push(ValidationError::HeaderMismatch(actual.to_string()).at(
Location::FileLineField(filename_s.clone(), 1, idx + 1),
));
}
}
// bail if headers are incorrect
if errs.len() != 0 {
return Err(errs);
}
for (rowid, row) in rdr.records().enumerate() {
let lineno = rowid + 2;
// bail early if we cannot read the fields, this is probably a
// major csv issue
let fields = row.map_err(
|e| vec![e.at(Location::FileLine(filename_s.clone(), 1))],
)?;
for (idx, (col, value)) in self.columns.iter().zip(fields.iter()).enumerate() {
if let Err(e) = col.validate_value(value) {
let col_idx = idx + 1;
errs.push(ValidationError::ValueError(e).at(Location::FileLineField(
filename_s.clone(),
lineno,
col_idx,
)));
continue;
}
}
}
if errs.len() != 0 {
return Err(errs);
} else {
Ok(())
}
}
pub fn parse_row<T: AsRef<[String]>>(
&self,
fields: &T,
) -> Result<Vec<Option<Value>>, ErrorAtLocation<ValidationError, usize>> {
let mut rv = Vec::with_capacity(self.columns.len());
let fields = fields.as_ref();
for (idx, (col, value)) in self.columns.iter().zip(fields.iter()).enumerate() {
match col.validate_value(value) {
Err(e) => {
let col_idx = idx + 1;
return Err(ValidationError::ValueError(e).at(col_idx));
}
Ok(v) => rv.push(v),
}
}
Ok(rv)
}
pub fn read_field<T: AsRef<[String]>>(
&self,
fields: &T,
idx: usize,
) -> Result<Option<Value>, ValidationError> {
let col = self.columns.get(idx).ok_or(ValidationError::SchemaMismatch)?;
let raw = fields.as_ref().get(idx).ok_or(
ValidationError::SchemaMismatch,
)?;
let field = col.validate_value(raw)?;
Ok(field)
}
pub fn read_field_by_name<T: AsRef<[String]>>(
&self,
fields: &T,
name: &str,
) -> Result<Option<Value>, ValidationError> {
let idx = self.col_idx(name).ok_or(ValidationError::SchemaMismatch)?;
self.read_field(fields, idx)
}
}
#[inline]
fn cap<T>(c: ®ex::Captures, idx: usize) -> T
where
T: std::str::FromStr,
T::Err: std::fmt::Debug,
{
c.get(idx)
.safe_unwrap("valid group")
.as_str()
.parse()
.safe_unwrap("already validated through regex")
}
pub fn parse_filename<S: AsRef<str>>(filename: S) -> Option<CsvxMetadata> {
match FN_RE.captures(filename.as_ref()) {
Some(caps) => {
let table_name = caps.get(1).safe_unwrap("known group").as_str().to_string();
let year = cap(&caps, 2);
let month = cap(&caps, 3);
let day = cap(&caps, 4);
let schema = caps.get(5).safe_unwrap("known group").as_str().to_string();
Some(CsvxMetadata {
table_name: table_name,
date: match NaiveDate::from_ymd_opt(year, month, day) {
Some(d) => d,
None => return None,
},
schema: schema,
})
}
None => None,
}
}
#[cfg(test)]
mod test {
use super::*;
use chrono::NaiveDate;
#[test]
fn filename_parsing_rejects_invalid() {
assert_eq!(parse_filename("asdf"), None);
assert_eq!(parse_filename(""), None);
assert_eq!(parse_filename("test.csv"), None);
assert_eq!(parse_filename("test.csv"), None);
}
#[test]
fn filename_parsing_parses_valid() {
assert_eq!(
parse_filename("zoo-nyc_20170401_animals-2.csv").unwrap(),
CsvxMetadata {
table_name: "zoo-nyc".to_owned(),
date: NaiveDate::from_ymd(2017, 04, 01),
schema: "animals-2".to_owned(),
}
);
}
}<|fim▁end|> | |
<|file_name|>set_insert.rs<|end_file_name|><|fim▁begin|>use crate::{cmd, Command};<|fim▁hole|>
pub trait Arg {
fn arg(self) -> cmd::Arg<()>;
}
impl Arg for Command {
fn arg(self) -> cmd::Arg<()> {
Self::new(TermType::SetInsert).with_arg(self).into_arg()
}
}<|fim▁end|> | use ql2::term::TermType; |
<|file_name|>plot_directive.py<|end_file_name|><|fim▁begin|>"""
A directive for including a matplotlib plot in a Sphinx document.
By default, in HTML output, `plot` will include a .png file with a
link to a high-res .png and .pdf. In LaTeX output, it will include a
.pdf.
The source code for the plot may be included in one of three ways:
1. **A path to a source file** as the argument to the directive::
.. plot:: path/to/plot.py
When a path to a source file is given, the content of the
directive may optionally contain a caption for the plot::
.. plot:: path/to/plot.py
This is the caption for the plot
Additionally, one my specify the name of a function to call (with
no arguments) immediately after importing the module::
.. plot:: path/to/plot.py plot_function1
2. Included as **inline content** to the directive::
.. plot::
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
import numpy as np
img = mpimg.imread('_static/stinkbug.png')
imgplot = plt.imshow(img)
3. Using **doctest** syntax::
.. plot::
A plotting example:
>>> import matplotlib.pyplot as plt
>>> plt.plot([1,2,3], [4,5,6])
Options
-------
The ``plot`` directive supports the following options:
format : {'python', 'doctest'}
Specify the format of the input
include-source : bool
Whether to display the source code. The default can be changed
using the `plot_include_source` variable in conf.py
encoding : str
If this source file is in a non-UTF8 or non-ASCII encoding,
the encoding must be specified using the `:encoding:` option.
The encoding will not be inferred using the ``-*- coding -*-``
metacomment.
context : bool
If provided, the code will be run in the context of all
previous plot directives for which the `:context:` option was
specified. This only applies to inline code plot directives,
not those run from files.
nofigs : bool
If specified, the code block will be run, but no figures will
be inserted. This is usually useful with the ``:context:``
option.
Additionally, this directive supports all of the options of the
`image` directive, except for `target` (since plot will add its own
target). These include `alt`, `height`, `width`, `scale`, `align` and
`class`.
Configuration options
---------------------
The plot directive has the following configuration options:
plot_include_source
Default value for the include-source option
plot_pre_code
Code that should be executed before each plot.
plot_basedir
Base directory, to which ``plot::`` file names are relative
to. (If None or empty, file names are relative to the
directoly where the file containing the directive is.)
plot_formats
File formats to generate. List of tuples or strings::
[(suffix, dpi), suffix, ...]
that determine the file format and the DPI. For entries whose
DPI was omitted, sensible defaults are chosen.
plot_html_show_formats
Whether to show links to the files in HTML.
plot_rcparams
A dictionary containing any non-standard rcParams that should
be applied before each plot.
plot_apply_rcparams
By default, rcParams are applied when `context` option is not used in
a plot directive. This configuration option overrides this behaviour
and applies rcParams before each plot.
plot_working_directory
By default, the working directory will be changed to the directory of
the example, so the code can get at its data files, if any. Also its
path will be added to `sys.path` so it can import any helper modules
sitting beside it. This configuration option can be used to specify
a central directory (also added to `sys.path`) where data files and
helper modules for all code are located.
plot_template
Provide a customized template for preparing resturctured text.
"""
from __future__ import print_function
import sys, os, glob, shutil, imp, warnings, cStringIO, re, textwrap
import traceback
from docutils.parsers.rst import directives
from docutils import nodes
from docutils.parsers.rst.directives.images import Image
align = Image.align
import sphinx
sphinx_version = sphinx.__version__.split(".")
# The split is necessary for sphinx beta versions where the string is
# '6b1'
sphinx_version = tuple([int(re.split('[a-z]', x)[0])
for x in sphinx_version[:2]])
try:
# Sphinx depends on either Jinja or Jinja2
import jinja2
def format_template(template, **kw):
return jinja2.Template(template).render(**kw)
except ImportError:
import jinja
def format_template(template, **kw):
return jinja.from_string(template, **kw)
import matplotlib
import matplotlib.cbook as cbook
matplotlib.use('Agg')
import matplotlib.pyplot as plt
from matplotlib import _pylab_helpers
__version__ = 2
#------------------------------------------------------------------------------
# Relative pathnames
#------------------------------------------------------------------------------
# os.path.relpath is new in Python 2.6
try:
from os.path import relpath
except ImportError:
# Copied from Python 2.7
if 'posix' in sys.builtin_module_names:
def relpath(path, start=os.path.curdir):
"""Return a relative version of a path"""
from os.path import sep, curdir, join, abspath, commonprefix, \
pardir
if not path:
raise ValueError("no path specified")
start_list = abspath(start).split(sep)
path_list = abspath(path).split(sep)
# Work out how much of the filepath is shared by start and path.
i = len(commonprefix([start_list, path_list]))
rel_list = [pardir] * (len(start_list)-i) + path_list[i:]
if not rel_list:
return curdir
return join(*rel_list)
elif 'nt' in sys.builtin_module_names:
def relpath(path, start=os.path.curdir):
"""Return a relative version of a path"""
from os.path import sep, curdir, join, abspath, commonprefix, \
pardir, splitunc
if not path:
raise ValueError("no path specified")
start_list = abspath(start).split(sep)
path_list = abspath(path).split(sep)
if start_list[0].lower() != path_list[0].lower():
unc_path, rest = splitunc(path)
unc_start, rest = splitunc(start)
if bool(unc_path) ^ bool(unc_start):
raise ValueError("Cannot mix UNC and non-UNC paths (%s and %s)"
% (path, start))
else:
raise ValueError("path is on drive %s, start on drive %s"
% (path_list[0], start_list[0]))
# Work out how much of the filepath is shared by start and path.
for i in range(min(len(start_list), len(path_list))):
if start_list[i].lower() != path_list[i].lower():
break
else:
i += 1
rel_list = [pardir] * (len(start_list)-i) + path_list[i:]
if not rel_list:
return curdir
return join(*rel_list)
else:
raise RuntimeError("Unsupported platform (no relpath available!)")
#------------------------------------------------------------------------------
# Registration hook
#------------------------------------------------------------------------------
def plot_directive(name, arguments, options, content, lineno,
content_offset, block_text, state, state_machine):
return run(arguments, content, options, state_machine, state, lineno)
plot_directive.__doc__ = __doc__
def _option_boolean(arg):
if not arg or not arg.strip():
# no argument given, assume used as a flag
return True
elif arg.strip().lower() in ('no', '0', 'false'):
return False
elif arg.strip().lower() in ('yes', '1', 'true'):
return True
else:
raise ValueError('"%s" unknown boolean' % arg)
def _option_format(arg):
return directives.choice(arg, ('python', 'doctest'))
def _option_align(arg):
return directives.choice(arg, ("top", "middle", "bottom", "left", "center",
"right"))
def mark_plot_labels(app, document):
"""
To make plots referenceable, we need to move the reference from
the "htmlonly" (or "latexonly") node to the actual figure node
itself.
"""
for name, explicit in document.nametypes.iteritems():
if not explicit:
continue
labelid = document.nameids[name]
if labelid is None:
continue
node = document.ids[labelid]
if node.tagname in ('html_only', 'latex_only'):
for n in node:
if n.tagname == 'figure':
sectname = name
for c in n:
if c.tagname == 'caption':
sectname = c.astext()
break
node['ids'].remove(labelid)
node['names'].remove(name)
n['ids'].append(labelid)
n['names'].append(name)
document.settings.env.labels[name] = \
document.settings.env.docname, labelid, sectname
break
def setup(app):
setup.app = app
setup.config = app.config
setup.confdir = app.confdir
options = {'alt': directives.unchanged,
'height': directives.length_or_unitless,
'width': directives.length_or_percentage_or_unitless,
'scale': directives.nonnegative_int,
'align': _option_align,
'class': directives.class_option,
'include-source': _option_boolean,
'format': _option_format,
'context': directives.flag,
'nofigs': directives.flag,
'encoding': directives.encoding
}
app.add_directive('plot', plot_directive, True, (0, 2, False), **options)
app.add_config_value('plot_pre_code', None, True)
app.add_config_value('plot_include_source', False, True)
app.add_config_value('plot_formats', ['png', 'hires.png', 'pdf'], True)
app.add_config_value('plot_basedir', None, True)
app.add_config_value('plot_html_show_formats', True, True)
app.add_config_value('plot_rcparams', {}, True)
app.add_config_value('plot_apply_rcparams', False, True)
app.add_config_value('plot_working_directory', None, True)
app.add_config_value('plot_template', None, True)
app.connect('doctree-read', mark_plot_labels)
#------------------------------------------------------------------------------
# Doctest handling
#------------------------------------------------------------------------------
def contains_doctest(text):
try:
# check if it's valid Python as-is
compile(text, '<string>', 'exec')
return False
except SyntaxError:
pass
r = re.compile(r'^\s*>>>', re.M)
m = r.search(text)
return bool(m)
def unescape_doctest(text):
"""
Extract code from a piece of text, which contains either Python code
or doctests.
"""
if not contains_doctest(text):
return text
code = ""
for line in text.split("\n"):
m = re.match(r'^\s*(>>>|\.\.\.) (.*)$', line)
if m:
code += m.group(2) + "\n"
elif line.strip():
code += "# " + line.strip() + "\n"
else:
code += "\n"
return code
def split_code_at_show(text):
"""
Split code at plt.show()
"""
parts = []
is_doctest = contains_doctest(text)
part = []
for line in text.split("\n"):
if (not is_doctest and line.strip() == 'plt.show()') or \
(is_doctest and line.strip() == '>>> plt.show()'):
part.append(line)
parts.append("\n".join(part))
part = []
else:
part.append(line)
if "\n".join(part).strip():
parts.append("\n".join(part))
return parts
#------------------------------------------------------------------------------
# Template
#------------------------------------------------------------------------------
TEMPLATE = """
{{ source_code }}
{{ only_html }}
{% if source_link or (html_show_formats and not multi_image) %}
(
{%- if source_link -%}
`Source code <{{ source_link }}>`__
{%- endif -%}
{%- if html_show_formats and not multi_image -%}
{%- for img in images -%}
{%- for fmt in img.formats -%}
{%- if source_link or not loop.first -%}, {% endif -%}
`{{ fmt }} <{{ dest_dir }}/{{ img.basename }}.{{ fmt }}>`__
{%- endfor -%}
{%- endfor -%}
{%- endif -%}
)
{% endif %}
{% for img in images %}
.. figure:: {{ build_dir }}/{{ img.basename }}.png
{%- for option in options %}
{{ option }}
{% endfor %}
{% if html_show_formats and multi_image -%}
(
{%- for fmt in img.formats -%}
{%- if not loop.first -%}, {% endif -%}
`{{ fmt }} <{{ dest_dir }}/{{ img.basename }}.{{ fmt }}>`__
{%- endfor -%}
)
{%- endif -%}
{{ caption }}
{% endfor %}
{{ only_latex }}
{% for img in images %}
.. image:: {{ build_dir }}/{{ img.basename }}.pdf
{% endfor %}
{{ only_texinfo }}
{% for img in images %}
.. image:: {{ build_dir }}/{{ img.basename }}.png
{%- for option in options %}
{{ option }}
{% endfor %}
{% endfor %}
"""
exception_template = """
.. htmlonly::
[`source code <%(linkdir)s/%(basename)s.py>`__]
Exception occurred rendering plot.
"""
# the context of the plot for all directives specified with the
# :context: option
plot_context = dict()
class ImageFile(object):
def __init__(self, basename, dirname):
self.basename = basename
self.dirname = dirname
self.formats = []
def filename(self, format):
return os.path.join(self.dirname, "%s.%s" % (self.basename, format))
def filenames(self):
return [self.filename(fmt) for fmt in self.formats]
def out_of_date(original, derived):
"""
Returns True if derivative is out-of-date wrt original,
both of which are full file paths.
"""
return (not os.path.exists(derived) or
(os.path.exists(original) and
os.stat(derived).st_mtime < os.stat(original).st_mtime))
class PlotError(RuntimeError):
pass
def run_code(code, code_path, ns=None, function_name=None):
"""
Import a Python module from a path, and run the function given by
name, if function_name is not None.
"""
# Change the working directory to the directory of the example, so
# it can get at its data files, if any. Add its path to sys.path
# so it can import any helper modules sitting beside it.
pwd = os.getcwd()
old_sys_path = list(sys.path)
if setup.config.plot_working_directory is not None:
try:
os.chdir(setup.config.plot_working_directory)
except OSError as err:
raise OSError(str(err) + '\n`plot_working_directory` option in'
'Sphinx configuration file must be a valid '
'directory path')
except TypeError as err:
raise TypeError(str(err) + '\n`plot_working_directory` option in '
'Sphinx configuration file must be a string or '
'None')
sys.path.insert(0, setup.config.plot_working_directory)
elif code_path is not None:
dirname = os.path.abspath(os.path.dirname(code_path))
os.chdir(dirname)
sys.path.insert(0, dirname)
# Redirect stdout
stdout = sys.stdout
sys.stdout = cStringIO.StringIO()
# Reset sys.argv
old_sys_argv = sys.argv
sys.argv = [code_path]
try:
try:
code = unescape_doctest(code)
if ns is None:
ns = {}
if not ns:
if setup.config.plot_pre_code is None:
exec "import numpy as np\nfrom matplotlib import pyplot as plt\n" in ns
else:
exec setup.config.plot_pre_code in ns
if "__main__" in code:
exec "__name__ = '__main__'" in ns
exec code in ns
if function_name is not None:
exec function_name + "()" in ns
except (Exception, SystemExit), err:
raise PlotError(traceback.format_exc())
finally:
os.chdir(pwd)
sys.argv = old_sys_argv
sys.path[:] = old_sys_path
sys.stdout = stdout
return ns
def clear_state(plot_rcparams):
plt.close('all')
matplotlib.rc_file_defaults()
matplotlib.rcParams.update(plot_rcparams)
def render_figures(code, code_path, output_dir, output_base, context,
function_name, config):
"""
Run a pyplot script and save the low and high res PNGs and a PDF
in outdir.
Save the images under *output_dir* with file names derived from
*output_base*
"""
# -- Parse format list
default_dpi = {'png': 80, 'hires.png': 200, 'pdf': 200}
formats = []
plot_formats = config.plot_formats
if isinstance(plot_formats, (str, unicode)):
plot_formats = eval(plot_formats)
for fmt in plot_formats:
if isinstance(fmt, str):
formats.append((fmt, default_dpi.get(fmt, 80)))
elif type(fmt) in (tuple, list) and len(fmt)==2:
formats.append((str(fmt[0]), int(fmt[1])))
else:
raise PlotError('invalid image format "%r" in plot_formats' % fmt)
# -- Try to determine if all images already exist
code_pieces = split_code_at_show(code)
# Look for single-figure output files first
# Look for single-figure output files first
all_exists = True
img = ImageFile(output_base, output_dir)
for format, dpi in formats:
if out_of_date(code_path, img.filename(format)):
all_exists = False
break
img.formats.append(format)
if all_exists:
return [(code, [img])]
# Then look for multi-figure output files
results = []
all_exists = True
for i, code_piece in enumerate(code_pieces):
images = []
for j in xrange(1000):
if len(code_pieces) > 1:
img = ImageFile('%s_%02d_%02d' % (output_base, i, j), output_dir)
else:
img = ImageFile('%s_%02d' % (output_base, j), output_dir)
for format, dpi in formats:
if out_of_date(code_path, img.filename(format)):
all_exists = False
break
img.formats.append(format)
# assume that if we have one, we have them all
if not all_exists:
all_exists = (j > 0)
break
images.append(img)
if not all_exists:
break
results.append((code_piece, images))
if all_exists:
return results
# We didn't find the files, so build them
results = []
if context:
ns = plot_context
else:
ns = {}
for i, code_piece in enumerate(code_pieces):
if not context or config.plot_apply_rcparams:
clear_state(config.plot_rcparams)
run_code(code_piece, code_path, ns, function_name)
images = []
fig_managers = _pylab_helpers.Gcf.get_all_fig_managers()
for j, figman in enumerate(fig_managers):
if len(fig_managers) == 1 and len(code_pieces) == 1:
img = ImageFile(output_base, output_dir)
elif len(code_pieces) == 1:
img = ImageFile("%s_%02d" % (output_base, j), output_dir)
else:
img = ImageFile("%s_%02d_%02d" % (output_base, i, j),
output_dir)
images.append(img)
for format, dpi in formats:
try:
figman.canvas.figure.savefig(img.filename(format), dpi=dpi)
except Exception,err:
raise PlotError(traceback.format_exc())
img.formats.append(format)
results.append((code_piece, images))
if not context or config.plot_apply_rcparams:
clear_state(config.plot_rcparams)
return results
def run(arguments, content, options, state_machine, state, lineno):
# The user may provide a filename *or* Python code content, but not both
if arguments and content:
raise RuntimeError("plot:: directive can't have both args and content")
document = state_machine.document<|fim▁hole|> options.setdefault('include-source', config.plot_include_source)
context = options.has_key('context')
rst_file = document.attributes['source']
rst_dir = os.path.dirname(rst_file)
if len(arguments):
if not config.plot_basedir:
source_file_name = os.path.join(setup.app.builder.srcdir,
directives.uri(arguments[0]))
else:
source_file_name = os.path.join(setup.confdir, config.plot_basedir,
directives.uri(arguments[0]))
# If there is content, it will be passed as a caption.
caption = '\n'.join(content)
# If the optional function name is provided, use it
if len(arguments) == 2:
function_name = arguments[1]
else:
function_name = None
with open(source_file_name, 'r') as fd:
code = fd.read()
output_base = os.path.basename(source_file_name)
else:
source_file_name = rst_file
code = textwrap.dedent("\n".join(map(str, content)))
counter = document.attributes.get('_plot_counter', 0) + 1
document.attributes['_plot_counter'] = counter
base, ext = os.path.splitext(os.path.basename(source_file_name))
output_base = '%s-%d.py' % (base, counter)
function_name = None
caption = ''
base, source_ext = os.path.splitext(output_base)
if source_ext in ('.py', '.rst', '.txt'):
output_base = base
else:
source_ext = ''
# ensure that LaTeX includegraphics doesn't choke in foo.bar.pdf filenames
output_base = output_base.replace('.', '-')
# is it in doctest format?
is_doctest = contains_doctest(code)
if options.has_key('format'):
if options['format'] == 'python':
is_doctest = False
else:
is_doctest = True
# determine output directory name fragment
source_rel_name = relpath(source_file_name, setup.confdir)
source_rel_dir = os.path.dirname(source_rel_name)
while source_rel_dir.startswith(os.path.sep):
source_rel_dir = source_rel_dir[1:]
# build_dir: where to place output files (temporarily)
build_dir = os.path.join(os.path.dirname(setup.app.doctreedir),
'plot_directive',
source_rel_dir)
# get rid of .. in paths, also changes pathsep
# see note in Python docs for warning about symbolic links on Windows.
# need to compare source and dest paths at end
build_dir = os.path.normpath(build_dir)
if not os.path.exists(build_dir):
os.makedirs(build_dir)
# output_dir: final location in the builder's directory
dest_dir = os.path.abspath(os.path.join(setup.app.builder.outdir,
source_rel_dir))
if not os.path.exists(dest_dir):
os.makedirs(dest_dir) # no problem here for me, but just use built-ins
# how to link to files from the RST file
dest_dir_link = os.path.join(relpath(setup.confdir, rst_dir),
source_rel_dir).replace(os.path.sep, '/')
build_dir_link = relpath(build_dir, rst_dir).replace(os.path.sep, '/')
source_link = dest_dir_link + '/' + output_base + source_ext
# make figures
try:
results = render_figures(code, source_file_name, build_dir, output_base,
context, function_name, config)
errors = []
except PlotError, err:
reporter = state.memo.reporter
sm = reporter.system_message(
2, "Exception occurred in plotting %s\n from %s:\n%s" % (output_base,
source_file_name, err),
line=lineno)
results = [(code, [])]
errors = [sm]
# Properly indent the caption
caption = '\n'.join(' ' + line.strip()
for line in caption.split('\n'))
# generate output restructuredtext
total_lines = []
for j, (code_piece, images) in enumerate(results):
if options['include-source']:
if is_doctest:
lines = ['']
lines += [row.rstrip() for row in code_piece.split('\n')]
else:
lines = ['.. code-block:: python', '']
lines += [' %s' % row.rstrip()
for row in code_piece.split('\n')]
source_code = "\n".join(lines)
else:
source_code = ""
if nofigs:
images = []
opts = [':%s: %s' % (key, val) for key, val in options.items()
if key in ('alt', 'height', 'width', 'scale', 'align', 'class')]
only_html = ".. only:: html"
only_latex = ".. only:: latex"
only_texinfo = ".. only:: texinfo"
if j == 0:
src_link = source_link
else:
src_link = None
result = format_template(
config.plot_template or TEMPLATE,
dest_dir=dest_dir_link,
build_dir=build_dir_link,
source_link=src_link,
multi_image=len(images) > 1,
only_html=only_html,
only_latex=only_latex,
only_texinfo=only_texinfo,
options=opts,
images=images,
source_code=source_code,
html_show_formats=config.plot_html_show_formats,
caption=caption)
total_lines.extend(result.split("\n"))
total_lines.extend("\n")
if total_lines:
state_machine.insert_input(total_lines, source=source_file_name)
# copy image files to builder's output directory, if necessary
if not os.path.exists(dest_dir):
cbook.mkdirs(dest_dir)
for code_piece, images in results:
for img in images:
for fn in img.filenames():
destimg = os.path.join(dest_dir, os.path.basename(fn))
if fn != destimg:
shutil.copyfile(fn, destimg)
# copy script (if necessary)
target_name = os.path.join(dest_dir, output_base + source_ext)
with open(target_name, 'w') as f:
if source_file_name == rst_file:
code_escaped = unescape_doctest(code)
else:
code_escaped = code
f.write(code_escaped)
return errors<|fim▁end|> | config = document.settings.env.config
nofigs = options.has_key('nofigs')
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.