prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>master.py<|end_file_name|><|fim▁begin|><|fim▁hole|># You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Model definition for the master data set.
The entities of the master data set consist of 'edges' and 'properties' and conform to the
fact based model. <Reference>
Every entity has a unique data_unit_index. It is horizontally partitioned wrt. the
partition key and a granularity.
An edge or property must define a unique data_unit_index, the partition key and granularity.
The schema contains the edges and properties of an entity and the entities which are related by the edge."""
import peachbox.model
class UserReviewEdge(peachbox.model.MasterDataSet,TaskImportModel):
"""A particular realization of an 'edge'. Here: the user review edge """
data_unit_index = 0
partition_key = 'true_as_of_seconds'
partition_granularity = 60*60*24*360
schema = [{'field':'user_id', 'type':'StringType'},
{'field':'review_id', 'type':'StringType'}]
def lhs_node(self, row):
pass
def calc_value(self,field,row):
field = 'review_id'
val = 4*3*row.review_id
self.set_value(field,val)
def import(row):
self.lhs_node(row.user_id)
self.rhs_node(row.review_id)
self.partition_key(row.time)
class ProductReviewEdge(peachbox.model.MasterDataSet):
"""A particular realization of an 'edge'. Here: the product review edge """
data_unit_index = 1
partition_key = 'true_as_of_seconds'
partition_granularity = 60*60*24*360
schema = [{'field':'review_id', 'type':'StringType'},
{'field':'product_id', 'type':'StringType'}]
class ReviewProperties(peachbox.model.MasterDataSet):
"""A particular realization of a node, containing several properties. Here: the review properties """
data_unit_index = 2
partition_key = 'true_as_of_seconds'
partition_granularity = 60*60*24*360
time_fill_method = fill_name('time')
model = [{'field':'review_id', 'type':'StringType', 'fill_method': fill_review_id},
{'field':'helpful', 'type':'IntegerType', 'fill_method': helpful},
{'field':'nothelpful', 'type':'IntegerType', 'fill_method':fill_nothelpful},
{'field':'score', 'type':'IntegerType'},
{'field':'summary', 'type':'StringType'},
{'field':'text', 'type':'StringType'}]
source_fields = [{'field:review_id','type:StringType','validation:notempty'},
{'field':'text','validation:notempty'}]
def __init__(self):
self.build_model()
def helpful(self, row, field=''):
lambda row: int(row['helpfulness'].split('/')[0])
def fill_review_id(self, row, field):
user_id = row['user_id']
product_id = row['product_id']
true_as_of_seconds = row['time']
return unicode(hash(user_id+product_id+str(true_as_of_seconds)))
def fill_nothelpful(self, row, field):
return int(row['helpfulness'].split('/')[1]) - fill_method['helpful'](row,'helpful')
class UserProperties(peachbox.model.MasterDataSet):
"""A particular realization of properties. Here: the user properties """
data_unit_index = 3
partition_key = 'true_as_seconds'
partition_granularity = 60*60*24*360
schema = [{'field':'user_id', 'type':'StringType'},
{'field':'profile_name', 'type':'StringType'}]<|fim▁end|> | # Copyright 2015 Philipp Pahl, Sven Schubert, Daniel Britzger
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. |
<|file_name|>models.ts<|end_file_name|><|fim▁begin|>import { SelectableItem, LoadableItem } from '../core/classes';
import { Company, CompanyDetail } from './contracts';
import { StorageItem } from './storage/models';
export class CompanyItem extends SelectableItem {
private _item : Company;
get item() {
return this._item;
}
set item(i : Company) {
this._item = i;
}
constructor(i : Company) {
super();
this._item = i;
this.isOpen = false;
}
private _isOpen : boolean;
get isOpen() {
return !!this._isOpen;
}
set isOpen(val : boolean) {
this._isOpen = val;
}
}
export class CompanyDetailItem extends LoadableItem {
private _item : CompanyDetail;
private _storage : StorageItem[];
constructor(i? : CompanyDetail) {
super();
if(!!i) {
this._item = i;
this.checkLoading();
}
this.isOpen = false;
}
checkLoading() {
if(!!this._item && !!this._storage) this.isLoading = false;
else this.isLoading = true;
}
get item() {
return this._item;
}
set item(i : CompanyDetail) {
this._item = i;
this.checkLoading();<|fim▁hole|> set storage(list : StorageItem[]) {
this._storage = list;
this.checkLoading();
}
private _isOpen : boolean;
get isOpen() {
return !!this._isOpen;
}
set isOpen(val : boolean) {
this._isOpen = val;
}
}<|fim▁end|> | }
get storage() {
return this._storage;
} |
<|file_name|>address.go<|end_file_name|><|fim▁begin|>/*
Copyright 2015 Philippe Hilger, aka "PeerGum"
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
//-----------------------------------------------------
// IMPORTANT:
// This address layer is network-agnostic.
// It should not make any reference to the IP network.
//-----------------------------------------------------
package layer
import (
"fmt"
"math"
"math/rand"
"reflect"
"strconv"
"time"
)
type AddressDistance uint64
const DIST_MAX AddressDistance = 1 << 16 // maximum distance
const ADDRESS_SIZE = 2 // number of 64bits values
const (
ADDRESS_INACTIVE = iota
ADDRESS_ACTIVE
)
type AddressValue [ADDRESS_SIZE]uint64
type PeerList []AddressValue
type RerouteList struct {
Gimme *PeerList
Get *PeerList
}
// Address is the peer address structure.
// One peer can possibly have several peer addresses
// distributed on its interfaces
type Address struct {
name string // name for this peer address
value AddressValue // peer address
interfaces Interfaces // physical interfaces
external ExternalAddress // external (ip) address
port int // peer port
mgmt int // management port
status int // status of this address
}
type Addresses []*Address
var (
SELF = Address{
name: "self",
value: AddressValue{},
port: DEFAULT_PORT,
mgmt: MGMT_PORT,
status: ADDRESS_ACTIVE,
}
)
func init() {
rand.Seed(time.Now().UnixNano())
fmt.Println("ok")
}
func (addresses *Addresses) InitSelf() {
address := SELF
address.SetValue(GetRandomAddress())
address.SetExternal(GetExternalAddress())
address.SetPort(*peerPort)
Logln("port:", *peerPort)
interfaces, err := GetInterfaces()
if err != nil {
Logln("Can't get interfaces:", err)
panic(err)
}
address.SetInterfaces(interfaces)
addresses.Add(&address)
}
func NewAddress(v ...interface{}) *Address {
address := &Address{}
for _, item := range v {
switch item.(type) {
case string: // must be name
address.SetName(item.(string))
case AddressValue:
address.SetValue(item.(AddressValue))
case ExternalAddress:
address.SetExternal(item.(ExternalAddress))
case int: // must be port
address.SetPort(item.(int))
default:
Logln("Unknown element when creating an address:", item)
}
}
return address
}
func GetRandomAddress() (addressValue AddressValue) {
for i := range addressValue {
addressValue[i] |= (uint64(rand.Int63()) & 0x7fff7fff7fff7fff)
}
return addressValue
}
func (addresses *Addresses) Add(address *Address) {
*addresses = append(*addresses, address)
}
func (address *Address) GetName() string {
return address.name
}
func (address *Address) SetName(name string) *Address {
address.name = name
return address
}
func (address *Address) GetValue() AddressValue {
return address.value
}
func (address *Address) SetValue(addressValue AddressValue) *Address {
address.value = addressValue
return address
}
func (address *Address) GetExternal() ExternalAddress {
return address.external
}
func (address *Address) SetExternal(externalAddress ExternalAddress) *Address {
address.external = externalAddress
return address
}
func (address *Address) GetPort() int {
return address.port
}
func (address *Address) GetPortString() string {
return strconv.Itoa(address.GetPort())
}
func (address *Address) SetPort(port int) *Address {
address.port = port
return address
}
func (address *Address) GetConnectionString() string {
return GetConnectionString(address)
}
// Is this me?
func (address *Address) IsSelf() bool {
if reflect.DeepEqual(*address, SELF) {
return true
}
return false
}
// GetInterfaces returns the interfaces used by the address
func (address *Address) GetInterfaces() Interfaces {
return address.interfaces
}
// SetInterfaces define a set of interfaces for this address
func (address *Address) SetInterfaces(interfaces Interfaces) *Address {
address.interfaces = interfaces
return address
}
//
// --- calculate distances
//
// Distance between 2 addresses
// is the minimum linear distance
// between 16bits groups taken as circles"
//
func (address *Address) Distance(address2 *Address) AddressDistance {
return (address.value).Distance(address2.value)
}
func (address AddressValue) Distance(address2 AddressValue) AddressDistance {
var d AddressDistance = DIST_MAX / 2
var vLow, v2Low [ADDRESS_SIZE * 4]uint64
for i, v := range address {
v2 := address2[i]
for j := 0; j < 4; j++ {<|fim▁hole|> v2Low[pos] = v2 % uint64(DIST_MAX)
di := DIST_MAX/2 - AddressDistance(
math.Abs(float64(int64(DIST_MAX/2)-int64(v2Low[pos]-vLow[pos]))))
if di > DIST_MAX/2 {
di = DIST_MAX - di
}
if di < d {
d = di
}
v = v >> 16
v2 = v2 >> 16
}
}
if d > 0 {
WORMHOLE_TEST:
for i, v := range vLow {
for j, v2 := range v2Low {
if v == v2 && i != j {
d = 0
break WORMHOLE_TEST
}
}
}
}
return d
}
//
// --- string conversions
//
func (addresses Addresses) String() (result string) {
if len(addresses) == 0 {
return "(None)"
}
for i, address := range addresses {
if i > 0 {
result += "\n"
}
result += fmt.Sprintf("- %d: %s", i, address)
}
return
}
func (addresses Addresses) Details() (result string) {
if len(addresses) == 0 {
return "(None)"
}
for i, address := range addresses {
if i > 0 {
result += "\n"
}
result += fmt.Sprintf("- %d:\n%v", i, address.Details())
}
return
}
func (address *Address) String() string {
return fmt.Sprintf("%s [%s/%s:%d]", address.value, address.name, address.external, address.port)
}
func (address *Address) Details() string {
return fmt.Sprintf("%s [%s/%s:%d]\n%v\n", address.value, address.name, address.external, address.port, address.interfaces)
}
func (value AddressValue) String() string {
strAddress := ""
for _, val := range value {
strAddress = fmt.Sprintf("%s%016x", strAddress, val)
}
result := ""
for i, char := range strAddress {
if i%4 == 0 && i > 0 {
result += "-"
}
result = fmt.Sprintf("%s%c", result, char)
}
return result
}
func (peerList PeerList) String() (result string) {
for i, address := range peerList {
if i > 0 {
result += "\n"
}
result += fmt.Sprintf("[%d] %s", i, address)
}
return
}
func (rerouteList RerouteList) String() string {
return fmt.Sprintf("Gimme:\n%s\nGet:\n%s", rerouteList.Gimme, rerouteList.Get)
}
//
// -- peerlist sorting
//
type PeerListByZero PeerList
// -- ordering relative to address ZERO
func (peerList PeerListByZero) Less(i, j int) bool {
zero := &AddressValue{0, 0}
return zero.Distance(peerList[i]) < zero.Distance(peerList[j])
}
func (peerList PeerListByZero) Swap(i, j int) {
peerList[i], peerList[j] = peerList[j], peerList[i]
}
func (peerList PeerListByZero) Len() int {
return len(peerList)
}
// -- basic ordering relative to Me()
func (peerList PeerList) Less(i, j int) bool {
myAddress := me.GetAddress().GetValue()
return myAddress.Distance(peerList[i]) < myAddress.Distance(peerList[j])
}
func (peerList PeerList) Search(addressValue AddressValue) int {
for i, value := range peerList {
if value == addressValue {
return i
}
}
return peerList.Len()
}
func (peerList PeerList) Swap(i, j int) {
peerList[i], peerList[j] = peerList[j], peerList[i]
}
func (peerList PeerList) Len() int {
return len(peerList)
}<|fim▁end|> | pos := i*4 + j
vLow[pos] = v % uint64(DIST_MAX) |
<|file_name|>SketchUploadState.java<|end_file_name|><|fim▁begin|><|fim▁hole|>package com.punchthrough.bean.sdk.internal.upload.sketch;
public enum SketchUploadState {
INACTIVE, RESETTING_REMOTE, SENDING_START_COMMAND, SENDING_BLOCKS, FINISHED
}<|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>"""Plugins that are not OS-specific"""
# pylint: disable=unused-import
from rekall.plugins.common import address_resolver
from rekall.plugins.common import api
from rekall.plugins.common import bovine
from rekall.plugins.common import efilter_plugins
from rekall.plugins.common import inspection
from rekall.plugins.common import memmap
from rekall.plugins.common import profile_index<|fim▁hole|><|fim▁end|> | from rekall.plugins.common import scanners
from rekall.plugins.common import sigscan |
<|file_name|>rest.py<|end_file_name|><|fim▁begin|>from datetime import datetime
from flask import Blueprint, jsonify, request
from app.dao.fact_notification_status_dao import (
get_total_notifications_for_date_range,
)
from app.dao.fact_processing_time_dao import (
get_processing_time_percentage_for_date_range,<|fim▁hole|>)
from app.dao.services_dao import get_live_services_with_organisation
from app.errors import register_errors
from app.performance_dashboard.performance_dashboard_schema import (
performance_dashboard_request,
)
from app.schema_validation import validate
performance_dashboard_blueprint = Blueprint('performance_dashboard', __name__, url_prefix='/performance-dashboard')
register_errors(performance_dashboard_blueprint)
@performance_dashboard_blueprint.route('')
def get_performance_dashboard():
# All statistics are as of last night this matches the existing performance platform
# and avoids the need to query notifications.
if request.args:
# Is it ok to reuse this? - should probably create a new one
validate(request.args, performance_dashboard_request)
# If start and end date are not set, we are expecting today's stats.
today = str(datetime.utcnow().date())
start_date = datetime.strptime(request.args.get('start_date', today), '%Y-%m-%d').date()
end_date = datetime.strptime(request.args.get('end_date', today), '%Y-%m-%d').date()
total_for_all_time = get_total_notifications_for_date_range(start_date=None, end_date=None)
total_notifications, emails, sms, letters = transform_results_into_totals(total_for_all_time)
totals_for_date_range = get_total_notifications_for_date_range(start_date=start_date, end_date=end_date)
processing_time_results = get_processing_time_percentage_for_date_range(start_date=start_date, end_date=end_date)
services = get_live_services_with_organisation()
stats = {
"total_notifications": total_notifications,
"email_notifications": emails,
"sms_notifications": sms,
"letter_notifications": letters,
"notifications_by_type": transform_into_notification_by_type_json(totals_for_date_range),
"processing_time": transform_processing_time_results_to_json(processing_time_results),
"live_service_count": len(services),
"services_using_notify": transform_services_to_json(services)
}
return jsonify(stats)
def transform_results_into_totals(total_notifications_results):
total_notifications = 0
emails = 0
sms = 0
letters = 0
for x in total_notifications_results:
total_notifications += x.emails
total_notifications += x.sms
total_notifications += x.letters
emails += x.emails
sms += x.sms
letters += x.letters
return total_notifications, emails, sms, letters
def transform_into_notification_by_type_json(total_notifications):
j = []
for x in total_notifications:
j.append({"date": x.bst_date, "emails": x.emails, "sms": x.sms, "letters": x.letters})
return j
def transform_processing_time_results_to_json(processing_time_results):
j = []
for x in processing_time_results:
j.append({"date": x.date, "percentage_under_10_seconds": x.percentage})
return j
def transform_services_to_json(services_results):
j = []
for x in services_results:
j.append({"service_id": x.service_id, "service_name": x.service_name,
"organisation_id": x.organisation_id, "organisation_name": x.organisation_name}
)
return j<|fim▁end|> | |
<|file_name|>angular_jqxtabs.ts<|end_file_name|><|fim▁begin|>/*
jQWidgets v11.0.0 (2020-Nov)
Copyright (c) 2011-2020 jQWidgets.
License: https://jqwidgets.com/license/
*/
/* eslint-disable */
/// <reference path="jqwidgets.d.ts" />
import '../jqwidgets/jqxcore.js';
import '../jqwidgets/jqxbuttons.js';
import '../jqwidgets/jqxtabs.js';
import { Component, Input, Output, EventEmitter, ElementRef, OnChanges, SimpleChanges } from '@angular/core';
declare let JQXLite: any;
@Component({
selector: 'jqxTabs',<|fim▁hole|>{
@Input('animationType') attrAnimationType: string;
@Input('autoHeight') attrAutoHeight: boolean;
@Input('closeButtonSize') attrCloseButtonSize: number;
@Input('collapsible') attrCollapsible: boolean;
@Input('contentTransitionDuration') attrContentTransitionDuration: number;
@Input('disabled') attrDisabled: boolean;
@Input('enabledHover') attrEnabledHover: boolean;
@Input('enableScrollAnimation') attrEnableScrollAnimation: boolean;
@Input('enableDropAnimation') attrEnableDropAnimation: boolean;
@Input('initTabContent') attrInitTabContent: (tab?: number) => void;
@Input('keyboardNavigation') attrKeyboardNavigation: boolean;
@Input('next') attrNext: any;
@Input('previous') attrPrevious: any;
@Input('position') attrPosition: string;
@Input('reorder') attrReorder: boolean;
@Input('rtl') attrRtl: boolean;
@Input('scrollAnimationDuration') attrScrollAnimationDuration: number;
@Input('selectedItem') attrSelectedItem: number;
@Input('selectionTracker') attrSelectionTracker: boolean;
@Input('scrollable') attrScrollable: boolean;
@Input('scrollPosition') attrScrollPosition: string;
@Input('scrollStep') attrScrollStep: number;
@Input('showCloseButtons') attrShowCloseButtons: boolean;
@Input('toggleMode') attrToggleMode: string;
@Input('theme') attrTheme: string;
@Input('width') attrWidth: string | number;
@Input('height') attrHeight: string | number;
@Input('auto-create') autoCreate: boolean = true;
properties: string[] = ['animationType','autoHeight','closeButtonSize','collapsible','contentTransitionDuration','disabled','enabledHover','enableScrollAnimation','enableDropAnimation','height','initTabContent','keyboardNavigation','next','previous','position','reorder','rtl','scrollAnimationDuration','selectedItem','selectionTracker','scrollable','scrollPosition','scrollStep','showCloseButtons','toggleMode','theme','width'];
host: any;
elementRef: ElementRef;
widgetObject: jqwidgets.jqxTabs;
constructor(containerElement: ElementRef) {
this.elementRef = containerElement;
}
ngOnInit() {
if (this.autoCreate) {
this.createComponent();
}
};
ngOnChanges(changes: SimpleChanges) {
if (this.host) {
for (let i = 0; i < this.properties.length; i++) {
let attrName = 'attr' + this.properties[i].substring(0, 1).toUpperCase() + this.properties[i].substring(1);
let areEqual: boolean = false;
if (this[attrName] !== undefined) {
if (typeof this[attrName] === 'object') {
if (this[attrName] instanceof Array) {
areEqual = this.arraysEqual(this[attrName], this.host.jqxTabs(this.properties[i]));
}
if (areEqual) {
return false;
}
this.host.jqxTabs(this.properties[i], this[attrName]);
continue;
}
if (this[attrName] !== this.host.jqxTabs(this.properties[i])) {
this.host.jqxTabs(this.properties[i], this[attrName]);
}
}
}
}
}
arraysEqual(attrValue: any, hostValue: any): boolean {
if ((attrValue && !hostValue) || (!attrValue && hostValue)) {
return false;
}
if (attrValue.length != hostValue.length) {
return false;
}
for (let i = 0; i < attrValue.length; i++) {
if (attrValue[i] !== hostValue[i]) {
return false;
}
}
return true;
}
manageAttributes(): any {
let options = {};
for (let i = 0; i < this.properties.length; i++) {
let attrName = 'attr' + this.properties[i].substring(0, 1).toUpperCase() + this.properties[i].substring(1);
if (this[attrName] !== undefined) {
options[this.properties[i]] = this[attrName];
}
}
return options;
}
moveClasses(parentEl: HTMLElement, childEl: HTMLElement): void {
let classes: any = parentEl.classList;
if (classes.length > 0) {
childEl.classList.add(...classes);
}
parentEl.className = '';
}
moveStyles(parentEl: HTMLElement, childEl: HTMLElement): void {
let style = parentEl.style.cssText;
childEl.style.cssText = style
parentEl.style.cssText = '';
}
createComponent(options?: any): void {
if (this.host) {
return;
}
if (options) {
JQXLite.extend(options, this.manageAttributes());
}
else {
options = this.manageAttributes();
}
this.host = JQXLite(this.elementRef.nativeElement.firstChild);
this.moveClasses(this.elementRef.nativeElement, this.host[0]);
this.moveStyles(this.elementRef.nativeElement, this.host[0]);
this.__wireEvents__();
this.widgetObject = jqwidgets.createInstance(this.host, 'jqxTabs', options);
}
createWidget(options?: any): void {
this.createComponent(options);
}
__updateRect__() : void {
if(this.host) this.host.css({ width: this.attrWidth, height: this.attrHeight });
}
setOptions(options: any) : void {
this.host.jqxTabs('setOptions', options);
}
// jqxTabsComponent properties
animationType(arg?: string): string {
if (arg !== undefined) {
this.host.jqxTabs('animationType', arg);
} else {
return this.host.jqxTabs('animationType');
}
}
autoHeight(arg?: boolean): boolean {
if (arg !== undefined) {
this.host.jqxTabs('autoHeight', arg);
} else {
return this.host.jqxTabs('autoHeight');
}
}
closeButtonSize(arg?: number): number {
if (arg !== undefined) {
this.host.jqxTabs('closeButtonSize', arg);
} else {
return this.host.jqxTabs('closeButtonSize');
}
}
collapsible(arg?: boolean): boolean {
if (arg !== undefined) {
this.host.jqxTabs('collapsible', arg);
} else {
return this.host.jqxTabs('collapsible');
}
}
contentTransitionDuration(arg?: number): number {
if (arg !== undefined) {
this.host.jqxTabs('contentTransitionDuration', arg);
} else {
return this.host.jqxTabs('contentTransitionDuration');
}
}
disabled(arg?: boolean): boolean {
if (arg !== undefined) {
this.host.jqxTabs('disabled', arg);
} else {
return this.host.jqxTabs('disabled');
}
}
enabledHover(arg?: boolean): boolean {
if (arg !== undefined) {
this.host.jqxTabs('enabledHover', arg);
} else {
return this.host.jqxTabs('enabledHover');
}
}
enableScrollAnimation(arg?: boolean): boolean {
if (arg !== undefined) {
this.host.jqxTabs('enableScrollAnimation', arg);
} else {
return this.host.jqxTabs('enableScrollAnimation');
}
}
enableDropAnimation(arg?: boolean): boolean {
if (arg !== undefined) {
this.host.jqxTabs('enableDropAnimation', arg);
} else {
return this.host.jqxTabs('enableDropAnimation');
}
}
height(arg?: string | number): string | number {
if (arg !== undefined) {
this.host.jqxTabs('height', arg);
} else {
return this.host.jqxTabs('height');
}
}
initTabContent(arg?: (tab?: number) => void): (tab?: number) => void {
if (arg !== undefined) {
this.host.jqxTabs('initTabContent', arg);
} else {
return this.host.jqxTabs('initTabContent');
}
}
keyboardNavigation(arg?: boolean): boolean {
if (arg !== undefined) {
this.host.jqxTabs('keyboardNavigation', arg);
} else {
return this.host.jqxTabs('keyboardNavigation');
}
}
next(arg?: any): any {
if (arg !== undefined) {
this.host.jqxTabs('next', arg);
} else {
return this.host.jqxTabs('next');
}
}
previous(arg?: any): any {
if (arg !== undefined) {
this.host.jqxTabs('previous', arg);
} else {
return this.host.jqxTabs('previous');
}
}
position(arg?: string): string {
if (arg !== undefined) {
this.host.jqxTabs('position', arg);
} else {
return this.host.jqxTabs('position');
}
}
reorder(arg?: boolean): boolean {
if (arg !== undefined) {
this.host.jqxTabs('reorder', arg);
} else {
return this.host.jqxTabs('reorder');
}
}
rtl(arg?: boolean): boolean {
if (arg !== undefined) {
this.host.jqxTabs('rtl', arg);
} else {
return this.host.jqxTabs('rtl');
}
}
scrollAnimationDuration(arg?: number): number {
if (arg !== undefined) {
this.host.jqxTabs('scrollAnimationDuration', arg);
} else {
return this.host.jqxTabs('scrollAnimationDuration');
}
}
selectedItem(arg?: number): number {
if (arg !== undefined) {
this.host.jqxTabs('selectedItem', arg);
} else {
return this.host.jqxTabs('selectedItem');
}
}
selectionTracker(arg?: boolean): boolean {
if (arg !== undefined) {
this.host.jqxTabs('selectionTracker', arg);
} else {
return this.host.jqxTabs('selectionTracker');
}
}
scrollable(arg?: boolean): boolean {
if (arg !== undefined) {
this.host.jqxTabs('scrollable', arg);
} else {
return this.host.jqxTabs('scrollable');
}
}
scrollPosition(arg?: string): string {
if (arg !== undefined) {
this.host.jqxTabs('scrollPosition', arg);
} else {
return this.host.jqxTabs('scrollPosition');
}
}
scrollStep(arg?: number): number {
if (arg !== undefined) {
this.host.jqxTabs('scrollStep', arg);
} else {
return this.host.jqxTabs('scrollStep');
}
}
showCloseButtons(arg?: boolean): boolean {
if (arg !== undefined) {
this.host.jqxTabs('showCloseButtons', arg);
} else {
return this.host.jqxTabs('showCloseButtons');
}
}
toggleMode(arg?: string): string {
if (arg !== undefined) {
this.host.jqxTabs('toggleMode', arg);
} else {
return this.host.jqxTabs('toggleMode');
}
}
theme(arg?: string): string {
if (arg !== undefined) {
this.host.jqxTabs('theme', arg);
} else {
return this.host.jqxTabs('theme');
}
}
width(arg?: string | number): string | number {
if (arg !== undefined) {
this.host.jqxTabs('width', arg);
} else {
return this.host.jqxTabs('width');
}
}
// jqxTabsComponent functions
addAt(index: number, title: string, content: string): void {
this.host.jqxTabs('addAt', index, title, content);
}
addFirst(htmlElement1: any, htmlElement2: any): void {
this.host.jqxTabs('addFirst', htmlElement1, htmlElement2);
}
addLast(htmlElement1: any, htmlElement2?: any): void {
this.host.jqxTabs('addLast', htmlElement1, htmlElement2);
}
collapse(): void {
this.host.jqxTabs('collapse');
}
disable(): void {
this.host.jqxTabs('disable');
}
disableAt(index: number): void {
this.host.jqxTabs('disableAt', index);
}
destroy(): void {
this.host.jqxTabs('destroy');
}
ensureVisible(index: number): void {
this.host.jqxTabs('ensureVisible', index);
}
enableAt(index: number): void {
this.host.jqxTabs('enableAt', index);
}
expand(): void {
this.host.jqxTabs('expand');
}
enable(): void {
this.host.jqxTabs('enable');
}
focus(): void {
this.host.jqxTabs('focus');
}
getTitleAt(index: number): string {
return this.host.jqxTabs('getTitleAt', index);
}
getContentAt(index: number): any {
return this.host.jqxTabs('getContentAt', index);
}
getDisabledTabsCount(): any {
return this.host.jqxTabs('getDisabledTabsCount');
}
hideCloseButtonAt(index: number): void {
this.host.jqxTabs('hideCloseButtonAt', index);
}
hideAllCloseButtons(): void {
this.host.jqxTabs('hideAllCloseButtons');
}
length(): number {
return this.host.jqxTabs('length');
}
removeAt(index: number): void {
this.host.jqxTabs('removeAt', index);
}
removeFirst(): void {
this.host.jqxTabs('removeFirst');
}
removeLast(): void {
this.host.jqxTabs('removeLast');
}
select(index: number): void {
this.host.jqxTabs('select', index);
}
setContentAt(index: number, htmlElement: string): void {
this.host.jqxTabs('setContentAt', index, htmlElement);
}
setTitleAt(index: number, htmlElement: string): void {
this.host.jqxTabs('setTitleAt', index, htmlElement);
}
showCloseButtonAt(index: number): void {
this.host.jqxTabs('showCloseButtonAt', index);
}
showAllCloseButtons(): void {
this.host.jqxTabs('showAllCloseButtons');
}
val(value?: string): any {
if (value !== undefined) {
return this.host.jqxTabs('val', value);
} else {
return this.host.jqxTabs('val');
}
};
// jqxTabsComponent events
@Output() onAdd = new EventEmitter();
@Output() onCollapsed = new EventEmitter();
@Output() onDragStart = new EventEmitter();
@Output() onDragEnd = new EventEmitter();
@Output() onExpanded = new EventEmitter();
@Output() onRemoved = new EventEmitter();
@Output() onSelecting = new EventEmitter();
@Output() onSelected = new EventEmitter();
@Output() onTabclick = new EventEmitter();
@Output() onUnselecting = new EventEmitter();
@Output() onUnselected = new EventEmitter();
__wireEvents__(): void {
this.host.on('add', (eventData: any) => { this.onAdd.emit(eventData); });
this.host.on('collapsed', (eventData: any) => { this.onCollapsed.emit(eventData); });
this.host.on('dragStart', (eventData: any) => { this.onDragStart.emit(eventData); });
this.host.on('dragEnd', (eventData: any) => { this.onDragEnd.emit(eventData); });
this.host.on('expanded', (eventData: any) => { this.onExpanded.emit(eventData); });
this.host.on('removed', (eventData: any) => { this.onRemoved.emit(eventData); });
this.host.on('selecting', (eventData: any) => { this.onSelecting.emit(eventData); });
this.host.on('selected', (eventData: any) => { this.onSelected.emit(eventData); });
this.host.on('tabclick', (eventData: any) => { this.onTabclick.emit(eventData); });
this.host.on('unselecting', (eventData: any) => { this.onUnselecting.emit(eventData); });
this.host.on('unselected', (eventData: any) => { this.onUnselected.emit(eventData); });
}
} //jqxTabsComponent<|fim▁end|> | template: '<div><ng-content></ng-content></div>'
})
export class jqxTabsComponent implements OnChanges |
<|file_name|>SolomonServer.java<|end_file_name|><|fim▁begin|>/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package solomonserver;
import java.io.IOException;
import java.net.ServerSocket;
import java.net.Socket;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import runnables.ConnectClientsRunnable;
import runnables.ConnectToUnityDemoRunnable;
import runnables.ProcessDatabaseDataRunnable;
/**
*
* @author beia
*/
public class SolomonServer {
//solomon server variables
public static ServerSocket serverSocket;
public static Thread connectClients;
public static Thread processDatabaseData;
//unity demo server variables
public static ServerSocket unityDemoServerSocket;
public static Socket unityDemoSocket;
public static Thread connectToUnityDemoThread;
//sql server variables<|fim▁hole|> public static Connection con;
//data processing variables
public static volatile int lastLocationEntryId = 1;
public static void main(String[] args) throws IOException, SQLException, Exception
{
//connect to a mySql database
connectToDatabase();
//create a tcp serverSocket and wait for client connections
serverSocket = new ServerSocket(8000);
connectClients = new Thread(new ConnectClientsRunnable(serverSocket));
connectClients.start();
unityDemoServerSocket = new ServerSocket(7000);
connectToUnityDemoThread = new Thread(new ConnectToUnityDemoRunnable(unityDemoServerSocket));
connectToUnityDemoThread.start();
//extract user location data from the database and process it at a fixed amount of time
processDatabaseData = new Thread(new ProcessDatabaseDataRunnable());
processDatabaseData.start();
}
public static void connectToDatabase() throws ClassNotFoundException, SQLException, Exception
{
try
{
Class.forName("com.mysql.cj.jdbc.Driver");
con = DriverManager.getConnection("jdbc:mysql://localhost:3306/solomondb?autoReconnect=true&useJDBCCompliantTimezoneShift=true&useJDBCCompliantTimezoneShift=true&serverTimezone=UTC&useSSL=false", "root", "Puihoward_1423"); // nu uitati sa puneti parola corecta de root pe care o aveti setata pe serverul vostru de MySql.
System.out.println("Successfully connected to the database!");
}
catch (ClassNotFoundException cnfe)
{
error = "ClassNotFoundException: Can't find the driver for the database.";
throw new ClassNotFoundException(error);
}
catch (SQLException cnfe)
{
cnfe.printStackTrace();
error = "SQLException: Can't connect to the database.";
throw new SQLException(error);
}
catch (Exception e)
{
error = "Exception: Unexpected exception occured while we tried to connect to the database.";
throw new Exception(error);
}
}
public static void addUser(String username, String password, String lastName, String firstName, int age) throws SQLException, Exception
{
if (con != null)
{
try
{
// create a prepared SQL statement
String userInsertionStatement = "insert into users(username, password, lastName, firstName, age) values(?,?,?,?,?)";
PreparedStatement updateUsers = con.prepareStatement(userInsertionStatement);
updateUsers.setString(1, username);
updateUsers.setString(2, password);
updateUsers.setString(3, lastName);
updateUsers.setString(4, firstName);
updateUsers.setInt(5, age);
updateUsers.executeUpdate();
System.out.println("Inserted user '" + username + "'\n password: " + password + "\nlast name: " + lastName + "\nfirst name: " + firstName + "\nage: " + age + " into the database\n\n");
}
catch (SQLException sqle)
{
error = "SqlException: Update failed; duplicates may exist.";
throw new SQLException(error);
}
}
else
{
error = "Exception : Database connection was lost.";
throw new Exception(error);
}
}
public static void addLocationData(int idUser, int idStore, String zoneName, boolean zoneEntered, String time) throws SQLException, Exception
{
if (con != null)
{
try
{
// create a prepared SQL statement
String userLocationInsertionStatement = "insert into userlocations(idUser, idStore, zoneName, zoneEntered, time) values(?,?,?,?,?)";
PreparedStatement updateUserLocation = con.prepareStatement(userLocationInsertionStatement);
updateUserLocation.setInt(1, idUser);
updateUserLocation.setInt(2, idStore);
updateUserLocation.setString(3, zoneName);
updateUserLocation.setBoolean(4, zoneEntered);
updateUserLocation.setString(5, time);
updateUserLocation.executeUpdate();
System.out.println("Inserted userLocation into the database\nuser id: " + idUser + "\nstore id: " + idStore + "\nzone name: " + zoneName + "\nzone entered: " + zoneEntered + "\ntime: " + time + "\n\n");
}
catch (SQLException sqle)
{
sqle.printStackTrace();
}
}
else
{
error = "Exception : Database connection was lost.";
throw new Exception(error);
}
}
public static void addZoneTimeData(int idUser, int idStore, String[] zonesTime) throws SQLException, Exception
{
if (con != null)
{
try
{
// create a prepared SQL statement
String userRoomTimeInsertionStatementFirstPart = "insert into userroomtime(idUser, idStore";
String userRoomTimeInsertionStatementLastPart = "values(" + idUser + ", " + idStore;
String outputFeedBackString;
Statement updateRoomTimeData = con.createStatement();
outputFeedBackString = "Inserted user room time data ";
for(int i = 0; i < zonesTime.length; i++)
{
userRoomTimeInsertionStatementFirstPart += ", room" + (i + 1) + "Time";
userRoomTimeInsertionStatementLastPart += ", '" + zonesTime[i] + "'";
outputFeedBackString += "room" + (i + 1) + " time = " + zonesTime[i];
}
userRoomTimeInsertionStatementFirstPart += ") ";
userRoomTimeInsertionStatementLastPart += ")";
String statementString = userRoomTimeInsertionStatementFirstPart + userRoomTimeInsertionStatementLastPart;
updateRoomTimeData.executeUpdate(statementString);
}
catch (SQLException sqle)
{
sqle.printStackTrace();
}
}
else
{
error = "Exception : Database connection was lost.";
throw new Exception(error);
}
}
public static void updateZoneTimeData(int idUser, int idStore, String zoneName, String zoneTime) throws SQLException, Exception
{
if (con != null)
{
try
{
// create a prepared SQL statement
Statement updateStatement = con.createStatement();
String userRoomTimeUpdateStatement = "update userroomtime set " + zoneName + "='" + zoneTime + "' where idUser=" + idUser + " and idStore=" + idStore;
updateStatement.executeUpdate(userRoomTimeUpdateStatement);
}
catch (SQLException sqle)
{
sqle.printStackTrace();
}
}
else
{
error = "Exception : Database connection was lost.";
throw new Exception(error);
}
}
public static ResultSet getUserDataFromDatabase(String tabelName, String username) throws SQLException, Exception
{
ResultSet rs = null;
try
{
// Execute query
String queryString = ("select * from " + tabelName + " where username = '" + username + "';");
Statement stmt = con.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY);
rs = stmt.executeQuery(queryString); //sql exception
}
catch (SQLException sqle)
{
error = "SQLException: Query was not possible.";
sqle.printStackTrace();
throw new SQLException(error);
}
catch (Exception e)
{
error = "Exception occured when we extracted the data.";
throw new Exception(error);
}
return rs;
}
public static ResultSet getRoomTimeDataFromDatabase(String tableName, int idUser, int idStore) throws SQLException, Exception
{
ResultSet rs = null;
try
{
// Execute query
String queryString = ("select * from " + tableName + " where idUser=" + idUser + " and idStore=" + idStore);
Statement stmt = con.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY);
rs = stmt.executeQuery(queryString); //sql exception
}
catch (SQLException sqle)
{
error = "SQLException: Query was not possible.";
sqle.printStackTrace();
throw new SQLException(error);
}
catch (Exception e)
{
error = "Exception occured when we extracted the data.";
throw new Exception(error);
}
return rs;
}
public static ResultSet getTableData(String tabelName) throws SQLException, Exception
{
ResultSet rs = null;
try
{
// Execute query
String queryString = ("select * from " + tabelName + ";");
Statement stmt = con.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY);
rs = stmt.executeQuery(queryString); //sql exception
}
catch (SQLException sqle)
{
error = "SQLException: Query was not possible.";
sqle.printStackTrace();
throw new SQLException(error);
}
catch (Exception e)
{
error = "Exception occured when we extracted the data.";
throw new Exception(error);
}
return rs;
}
public static ResultSet getNewTableData(String tabelName, String idName, int lastId) throws SQLException, Exception
{
ResultSet rs = null;
try
{
// Execute query
String queryString = ("select * from "+ tabelName + " where " + idName + " > " + lastId);
Statement stmt = con.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY);
rs = stmt.executeQuery(queryString); //sql exception
}
catch (SQLException sqle)
{
error = "SQLException: Query was not possible.";
sqle.printStackTrace();
throw new SQLException(error);
}
catch (Exception e)
{
error = "Exception occured when we extracted the data.";
throw new Exception(error);
}
return rs;
}
}<|fim▁end|> | public static String error; |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>//! Structures that can be passed into Weld.
//!
//! This currently defines the type layout specified for the single threaded backend. In general,
//! type layouts vary from backend to backend especially for builders.
//!
//! # Primitives
//!<|fim▁hole|>//! defined in a struct with `repr(C)`.
//!
//! # Vectors
//!
//! Vectors will always have the same layout: a pointer followed by a 64-bit length.
//!
//! # Builders
//!
//! Builders are backend-specific and have layouts that may change at any time. Therefore, the
//! builder definitions here should be used as _opaque sized types_ rather than as structs whose
//! fields can be accessed.
use std::convert::AsRef;
use std::marker::PhantomData;
use std::fmt;
/// A boolean in Weld.
///
/// Weld booleans are always defined as a single-byte unsigned value. Weld will always return a
/// boolean with value 0 or 1, corresponding to `false` and `true` respectively. When passing
/// booleans as input, Weld will consider _any_ non-zero value to be `true`, and 0 to be false.
pub type WeldBool = u8;
/// A dynamically sized constant vector.
///
/// Vectors are always defined as a pointer and a length.
#[derive(Clone, Debug)]
#[repr(C)]
pub struct WeldVec<T> {
pub data: *const T,
pub len: i64,
}
unsafe impl<T> Send for WeldVec<T> {}
unsafe impl<T> Sync for WeldVec<T> {}
impl<T> WeldVec<T> {
/// Return a new WeldVec from a pointer and a length.
///
/// Consider using `WeldVec::from` instead, which automatically derives the length.
pub fn new(ptr: *const T, len: i64) -> WeldVec<T> {
WeldVec { data: ptr, len }
}
}
impl<'a, T, U> From<&'a U> for WeldVec<T>
where
U: AsRef<[T]>,
{
fn from(s: &'a U) -> WeldVec<T> {
WeldVec::new(s.as_ref().as_ptr(), s.as_ref().len() as i64)
}
}
impl<T> PartialEq for WeldVec<T>
where
T: PartialEq + Clone,
{
fn eq(&self, other: &WeldVec<T>) -> bool {
if self.len != other.len {
return false;
}
for i in 0..self.len {
let v1 = unsafe { (*self.data.offset(i as isize)).clone() };
let v2 = unsafe { (*other.data.offset(i as isize)).clone() };
if v1 != v2 {
return false;
}
}
true
}
}
impl<T> fmt::Display for WeldVec<T>
where
T: fmt::Display + Clone,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "[ ")?;
for i in 0..self.len {
let v = unsafe { (*self.data.offset(i as isize)).clone() };
write!(f, "{} ", v)?;
}
write!(f, "] ")?;
write!(f, "(length={})", self.len)
}
}
/// The `appender` builder type.
#[derive(Clone, Debug)]
#[repr(C)]
pub struct Appender<T> {
pointer: *mut T,
size: i64,
capacity: i64,
}
/// The dictionary type.
///
/// Like builders, dictionaries currently have an opaque format. At some point, dictionaries will
/// have methods for accessing keys and values and iterating over them. For now, these operations
/// require compiling a Weld program.
#[derive(Clone, Debug)]
#[repr(C)]
pub struct Dict<K, V> {
// Dictionaries are just opaque pointers.
pointer: *mut (),
phantom_key: PhantomData<K>, // 0-sized
phantom_val: PhantomData<V>, // 0-sized
}
/// The `dictmerger` builder type.
#[derive(Clone, Debug)]
#[repr(C)]
pub struct DictMerger<K, V> {
d: Dict<K, V>,
}
/// The `groupmerger` builder type.
#[derive(Clone, Debug)]
#[repr(C)]
pub struct GroupMerger<K, V> {
d: Dict<K, WeldVec<V>>,
}
// Ensures that the sizes of the types defined here match the sizes of the types in the backend.
#[test]
fn size_check() {
use crate::ast::BinOpKind::Add;
use crate::ast::ScalarKind::I32;
use crate::ast::*;
use crate::codegen::size_of;
use std::mem;
let i32_ty = Box::new(Type::Scalar(I32));
let vector = &Type::Vector(i32_ty.clone());
assert_eq!(size_of(vector), mem::size_of::<WeldVec<i32>>());
let dict = &Type::Dict(i32_ty.clone(), i32_ty.clone());
assert_eq!(size_of(dict), mem::size_of::<Dict<i32, i32>>());
let appender = &Type::Builder(BuilderKind::Appender(i32_ty.clone()), Annotations::new());
assert_eq!(size_of(appender), mem::size_of::<Appender<i32>>());
let dictmerger = &Type::Builder(
BuilderKind::DictMerger(i32_ty.clone(), i32_ty.clone(), Add),
Annotations::new(),
);
assert_eq!(size_of(dictmerger), mem::size_of::<DictMerger<i32, i32>>());
let groupmerger = &Type::Builder(
BuilderKind::GroupMerger(i32_ty.clone(), i32_ty.clone()),
Annotations::new(),
);
assert_eq!(
size_of(groupmerger),
mem::size_of::<GroupMerger<i32, i32>>()
);
}<|fim▁end|> | //! Primitives in Weld match their Rust counterparts, _except for booleans_. Booleans in Weld are
//! guaranteed to be one byte in size, but are defined as `_Bool` from `stdbool.h` in Rust when |
<|file_name|>BlockNodeManipulator.java<|end_file_name|><|fim▁begin|>package tb.common.block;
import tb.common.item.ItemNodeFoci;
import tb.common.tile.TileNodeManipulator;
import tb.init.TBItems;
import net.minecraft.block.Block;
import net.minecraft.block.BlockContainer;
import net.minecraft.block.material.Material;
import net.minecraft.entity.item.EntityItem;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.item.ItemStack;
import net.minecraft.tileentity.TileEntity;
import net.minecraft.world.World;
public class BlockNodeManipulator extends BlockContainer{
public BlockNodeManipulator()
{
super(Material.rock);
}
@Override
public TileEntity createNewTileEntity(World w, int meta) {
return new TileNodeManipulator();
}
public boolean isOpaqueCube()
{
return false;
}
public boolean renderAsNormalBlock()
{
return false;
}
public int getRenderType()
{
return 0x421922;
}
public boolean onBlockActivated(World w, int x, int y, int z, EntityPlayer p, int side, float vecX, float vecY, float vecZ)
{
if(p.getCurrentEquippedItem() != null)
{
ItemStack current = p.getCurrentEquippedItem();
if(current.getItem() instanceof ItemNodeFoci)
{
if(w.getBlockMetadata(x, y, z) != 0)
{
int meta = w.getBlockMetadata(x, y, z);
ItemStack stk = new ItemStack(TBItems.nodeFoci,1,meta-1);
EntityItem itm = new EntityItem(w,x+0.5D,y,z+0.5D,stk);
if(!w.isRemote)<|fim▁hole|> w.setBlockMetadataWithNotify(x, y, z, current.getItemDamage()+1, 3);
p.destroyCurrentEquippedItem();
return true;
}
}else
{
if(w.getBlockMetadata(x, y, z) != 0)
{
int meta = w.getBlockMetadata(x, y, z);
ItemStack stk = new ItemStack(TBItems.nodeFoci,1,meta-1);
EntityItem itm = new EntityItem(w,x+0.5D,y,z+0.5D,stk);
if(!w.isRemote)
w.spawnEntityInWorld(itm);
}
w.setBlockMetadataWithNotify(x, y, z, 0, 3);
}
return true;
}
@Override
public void breakBlock(World w, int x, int y, int z, Block b, int meta)
{
if(meta > 0) //Fix for the manipulator not dropping the foci.
{
ItemStack foci = new ItemStack(TBItems.nodeFoci,1,meta-1);
EntityItem itm = new EntityItem(w,x+0.5D,y+0.5D,z+0.5D,foci);
if(!w.isRemote)
w.spawnEntityInWorld(itm);
}
super.breakBlock(w, x, y, z, b, meta);
}
}<|fim▁end|> | w.spawnEntityInWorld(itm);
} |
<|file_name|>task-comm-7.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>#![allow(unused_must_use)]
#![allow(unused_assignments)]
// ignore-emscripten no threads support
use std::sync::mpsc::{channel, Sender};
use std::thread;
pub fn main() { test00(); }
fn test00_start(c: &Sender<isize>, start: isize,
number_of_messages: isize) {
let mut i: isize = 0;
while i < number_of_messages { c.send(start + i).unwrap(); i += 1; }
}
fn test00() {
let mut r: isize = 0;
let mut sum: isize = 0;
let (tx, rx) = channel();
let number_of_messages: isize = 10;
let tx2 = tx.clone();
let t1 = thread::spawn(move|| {
test00_start(&tx2, number_of_messages * 0, number_of_messages);
});
let tx2 = tx.clone();
let t2 = thread::spawn(move|| {
test00_start(&tx2, number_of_messages * 1, number_of_messages);
});
let tx2 = tx.clone();
let t3 = thread::spawn(move|| {
test00_start(&tx2, number_of_messages * 2, number_of_messages);
});
let tx2 = tx.clone();
let t4 = thread::spawn(move|| {
test00_start(&tx2, number_of_messages * 3, number_of_messages);
});
let mut i: isize = 0;
while i < number_of_messages {
r = rx.recv().unwrap();
sum += r;
r = rx.recv().unwrap();
sum += r;
r = rx.recv().unwrap();
sum += r;
r = rx.recv().unwrap();
sum += r;
i += 1;
}
assert_eq!(sum, number_of_messages * 4 * (number_of_messages * 4 - 1) / 2);
t1.join();
t2.join();
t3.join();
t4.join();
}<|fim▁end|> | // run-pass |
<|file_name|>Input_Value_Line.cpp<|end_file_name|><|fim▁begin|>//-----------------------------------------------------------------------------
// Class implementation: Input_Value::Line
//-----------------------------------------------------------------------------
#include "../pragmas.h"
#include "Input_Value_Line.h"
#include "Input_Text_File.h"
#include "Assert_That.h"
//----------------------------------------------------------------------------
<|fim▁hole|> name_(""),
name_pos_(0),
unmatched_()
{
}
//----------------------------------------------------------------------------
bool Input_Value::Line::name_matches(const std::string & name,
Str_Equal_Func str_equal_func,
std::string & rest_of_line)
{
ASSERT_THAT(str_equal_func != 0)
if (str_equal_func(name, name_)) {
rest_of_line = str_.substr(name_pos_ + name_.size());
return true;
} else {
unmatched_.push_back(name);
return false;
}
}
//----------------------------------------------------------------------------
bool Input_Value::Line::read(Input::Text_File & file)
{
if (file.read_line(str_)) {
std::istringstream strm(str_);
if (strm >> name_) {
name_pos_ = str_.find(name_);
} else {
name_ = "";
name_pos_ = 0;
}
unmatched_.clear();
return true;
} else {
str_ = "";
name_ = "";
name_pos_ = 0;
return false;
}
}<|fim▁end|> | Input_Value::Line::Line()
: str_(""),
|
<|file_name|>pageVisibility.js<|end_file_name|><|fim▁begin|>var
utils = require('./utils'),
Signals = require('./Signals');
/**
* Support for the W3C Page Visibility API - http://www.w3.org/TR/page-visibility
*
* {@link module:enyo/pageVisibility.hidden} and {@link module:enyo/pageVisibility.visibilityState}
* contain the same information as `document.hidden` and
* `document.visibilityState` in supported browsers. The `visibilitychange`
* event is channelled through the [Signals]{@link module:enyo/Signals~Signals} mechanism.
*
* Partly based on {@linkplain http://stackoverflow.com/a/1060034}.
*
* Example:
*
* ```javascript
* var
* kind = require('enyo/kind'),
* Signals = require('enyo/Signals');
*
* module.exports = kind({
* name: 'App',
* components: [
* {kind: Signals, onvisibilitychange: 'visibilitychanged'}
* ],
* visibilitychanged: function() {
* if(enyo.hidden){
* // page hidden
* } else {
* // page visible
* }
* }
* });
* ```
*
* @module enyo/pageVisibility
* @private
*/
var
doc = global.document,
hidden = 'hidden',
visibilityState = 'visibilityState',
hiddenMap = {};
var pageVisibility = module.exports = {
// set inital values for enyo.hidden and enyo.visibilityState it's probably save to assume
// that the current document is visible when loading the page
/**
* `true` if the document is hidden; otherwise, `false`.
*
* @readonly
* @type {Boolean}
* @default false
* @public
*/
hidden: typeof doc[hidden] !== 'undefined' ? doc[hidden] : false,
/**
* String indicating the document's visibility state.
*
* @readonly
* @type {String}
* @default 'visible'
* @public<|fim▁hole|> */
visibilityState: typeof doc[visibilityState] !== 'undefined' ? doc[visibilityState] : 'visible'
};
// map compatibility events to document.hidden state
hiddenMap.blur = hiddenMap.focusout = hiddenMap.pagehide = true;
hiddenMap.focus = hiddenMap.focusin = hiddenMap.pageshow = false;
function onchange (event) {
event = event || global.event;
pageVisibility.hidden = (event.type in hiddenMap) ? hiddenMap[event.type] : doc[hidden];
pageVisibility.visibilityState = (event.type in hiddenMap) ? (hiddenMap[event.type] ? 'hidden' : 'visible' ) : doc[visibilityState];
Signals.send('onvisibilitychange', utils.mixin(event, {hidden: pageVisibility.hidden}));
}
// Standards:
if (hidden in doc) {
doc.addEventListener('visibilitychange', onchange);
} else if ((hidden = 'mozHidden') in doc) {
doc.addEventListener('mozvisibilitychange', onchange);
visibilityState = 'mozVisibilityState';
} else if ((hidden = 'webkitHidden') in doc) {
doc.addEventListener('webkitvisibilitychange', onchange);
visibilityState = 'webkitVisibilityState';
} else if ((hidden = 'msHidden') in doc) {
doc.addEventListener('msvisibilitychange', onchange);
visibilityState = 'msVisibilityState';
} else if ('onfocusin' in doc) { // IE 9 and lower:
doc.onfocusin = doc.onfocusout = onchange;
} else { // All others:
global.onpageshow = global.onpagehide = global.onfocus = global.onblur = onchange;
}<|fim▁end|> | |
<|file_name|>get_all_audience_segments.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
#
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This code example gets all audience segments.
To create audience segments, run create_audience_segments.py.
"""
# Import appropriate modules from the client library.
from googleads import dfp
def main(client):<|fim▁hole|> client = dfp.DfpClient.LoadFromStorage()
# Initialize appropriate service.
audience_segment_service = client.GetService(
'AudienceSegmentService', version='v201411')
# Create statement object to select all audience segments.
statement = dfp.FilterStatement()
# Get audience segments by statement.
while True:
response = audience_segment_service.getAudienceSegmentsByStatement(
statement.ToStatement())
if 'results' in response:
# Display results.
for segment in response['results']:
print ('Audience segment with id \'%s\' and name '
'\'%s\' of size %s was found.' %
(segment['id'], segment['name'], segment['size']))
statement.offset += dfp.SUGGESTED_PAGE_LIMIT
else:
break
print '\nNumber of results found: %s' % response['totalResultSetSize']
if __name__ == '__main__':
# Initialize client object.
dfp_client = dfp.DfpClient.LoadFromStorage()
main(dfp_client)<|fim▁end|> | # Initialize client object. |
<|file_name|>ShowPrice.java<|end_file_name|><|fim▁begin|>package pricing;
import org.configureme.ConfigurationManager;
import org.configureme.Environment;
import org.configureme.GlobalEnvironment;
import org.configureme.environments.DynamicEnvironment;
public class ShowPrice {
public static void main(String a[]){
showPrice();
showPriceIn("USA", GlobalEnvironment.INSTANCE);
showPriceIn("United Kingdom", new DynamicEnvironment("europe", "uk"));
showPriceIn("Germany", new DynamicEnvironment("europe", "de"));
showPriceIn("Austria", new DynamicEnvironment("europe", "at"));
}
private static void showPriceIn(String description, Environment environment){
Pricing pricing = new Pricing();
ConfigurationManager.INSTANCE.configure(pricing, environment);
System.out.println("Price in "+description+" is "+pricing.getProductPrice());
}
private static void showPrice(){
Pricing pricing = new Pricing();<|fim▁hole|> }
}<|fim▁end|> | ConfigurationManager.INSTANCE.configure(pricing);
System.out.println("Please pay "+pricing.getProductPrice()); |
<|file_name|>_xsrc.py<|end_file_name|><|fim▁begin|>import _plotly_utils.basevalidators
class XsrcValidator(_plotly_utils.basevalidators.SrcValidator):
def __init__(self, plotly_name="xsrc", parent_name="histogram2d", **kwargs):
super(XsrcValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,<|fim▁hole|><|fim▁end|> | edit_type=kwargs.pop("edit_type", "none"),
**kwargs
) |
<|file_name|>0003_auto__add_unique_category_name.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding unique constraint on 'Category', fields ['name']
db.create_unique(u'website_category', ['name'])
<|fim▁hole|>
models = {
u'website.category': {
'Meta': {'object_name': 'Category'},
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '150'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '100', 'blank': 'True'}),
'typo': ('django.db.models.fields.CharField', [], {'max_length': '20'})
},
u'website.keyword': {
'Meta': {'ordering': "['codname']", 'unique_together': "(('codname', 'category'),)", 'object_name': 'Keyword'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'keywords'", 'to': u"orm['website.Category']"}),
'codname': ('django.db.models.fields.CharField', [], {'max_length': '150'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '100', 'blank': 'True'})
}
}
complete_apps = ['website']<|fim▁end|> | def backwards(self, orm):
# Removing unique constraint on 'Category', fields ['name']
db.delete_unique(u'website_category', ['name']) |
<|file_name|>ServletInitializer.java<|end_file_name|><|fim▁begin|>package yaycrawler.admin;
import org.springframework.boot.builder.SpringApplicationBuilder;<|fim▁hole|>public class ServletInitializer extends SpringBootServletInitializer {
@Override
protected SpringApplicationBuilder configure(SpringApplicationBuilder application) {
return application.sources(Application.class);
}
}<|fim▁end|> | import org.springframework.boot.context.web.SpringBootServletInitializer;
|
<|file_name|>ManageCoursePage.js<|end_file_name|><|fim▁begin|>import React, {PropTypes} from 'react';
import {connect} from 'react-redux';
import {bindActionCreators} from 'redux';
import * as courseActions from '../../actions/courseActions';
import CourseForm from './CourseForm';
import {authorsFormattedForDropdown} from '../../selectors/selectors';
import toastr from 'toastr';
export class ManageCoursePage extends React.Component {
constructor(props, context) {
super(props, context);
this.state = {
course: Object.assign({}, props.course),
errors: {},
saving: false
};
this.updateCourseState = this.updateCourseState.bind(this);
this.saveCourse = this.saveCourse.bind(this);
}
componentWillReceiveProps(nextProps) {
if (this.props.course.id != nextProps.course.id) {
// Necessary to populate form when existing course is loaded directly.
this.setState({course: Object.assign({}, nextProps.course)});
}
}
updateCourseState(event) { // handler for each form field
const field = event.target.name;
let course = this.state.course;
course[field] = event.target.value;
return this.setState({course: course});
}
courseFormValid() {
let formIsValid = true;
let errors = {};
if (this.state.course.title.length < 5) {
errors.title = 'Title must be at least 5 characters.';
formIsValid = false;
}
this.setState({errors: errors});
return formIsValid;
}
saveCourse(event) {
event.preventDefault();
if (!this.courseFormValid()) {
return;
}
this.setState({saving: true});
this.props.actions.saveCourse(this.state.course)
.then(() => this.redirect())
.catch(error => {
toastr.error(error);
this.setState({saving: false});
});
}
redirect() {
// redirect to courses route
this.setState({saving: false});
toastr.success('Course saved!');
this.context.router.push('/courses');
}
render() {
return (
<CourseForm allAuthors={this.props.authors}
onChange={this.updateCourseState}
onSave={this.saveCourse}
errors={this.state.errors}
course={this.state.course}
saving={this.state.saving}/>
);
}
}
ManageCoursePage.propTypes = {
course: PropTypes.object.isRequired,
authors: PropTypes.array.isRequired,
actions: PropTypes.object.isRequired
};
//Pull in the React Router context so router is available on this.context.router.
ManageCoursePage.contextTypes = {
router: PropTypes.object
};
function getCourseById(courses, id) {
const course = courses.filter(course => course.id == id);
if (course.length) return course[0]; //since filter returns an array, have to grab the first.
return null;
}
function mapStateToProps(state, ownProps) {
let course = {
id: "",
title: "",
watchHref: "",
authorId: "",
length: "23",
category: ""
};
const courseId = ownProps.params.id; // from the path `/course/:id`
if (courseId && state.courses.length > 0) {
course = getCourseById(state.courses, courseId);
}
return {
course: course,
authors: authorsFormattedForDropdown(state.authors)<|fim▁hole|> }
}
function mapDispatchToProps(dispatch) {
return {
actions: bindActionCreators(courseActions, dispatch)
};
}
export default connect(mapStateToProps, mapDispatchToProps)(ManageCoursePage);<|fim▁end|> | |
<|file_name|>ExcludeRegexpFilter.java<|end_file_name|><|fim▁begin|>/*
No-Babylon a job search engine with filtering ability
Copyright (C) 2012-2014 [email protected]
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>
*/
package org.laatusys.nobabylon.support;
import java.util.regex.Pattern;
<|fim▁hole|>
private final Pattern pattern;
public ExcludeRegexpFilter(String regexp, boolean caseSensitive) {
pattern = caseSensitive ? Pattern.compile(regexp) : Pattern.compile(regexp, Pattern.CASE_INSENSITIVE);
}
public ExcludeRegexpFilter(String regexp) {
this(regexp, false);
}
@Override
public boolean accept(String description) {
return !pattern.matcher(description).find();
}
}<|fim▁end|> | public class ExcludeRegexpFilter implements Filter { |
<|file_name|>db.rs<|end_file_name|><|fim▁begin|>use std::path::Path;
use std::fs::File;
use std::io::{Write, Read, self, Error, ErrorKind};
use db::Entry;
use nacl::secretbox::{SecretKey, SecretMsg};
use rand::{ Rng, OsRng };
use crypto::bcrypt::bcrypt;
use serde_json;
const DB_VERSION: u8 = 1u8;
const SALT_SIZE: usize = 16;
const PASS_SIZE: usize = 24;
const BCRYPT_COST: u32 = 10;
pub struct DatabaseInFile {
pub db: Database,
pub filepath: String
}
impl DatabaseInFile {
pub fn save(&self) -> io::Result<()>{
self.db.save_to_file(Path::new(&self.filepath))
}
}
pub struct Database {
bcrypt_salt: [u8; SALT_SIZE],
bcrypt_pass: [u8; PASS_SIZE],
pub entries: Vec<Entry>
}
impl Database {
pub fn empty(password: &str) -> Database {
let mut salt = [0u8; SALT_SIZE]; // 16bytes of salt bcrypt
let mut bcrypt_output = [0u8; PASS_SIZE]; // output 24 bytes
OsRng::new().unwrap().fill_bytes(&mut salt);
// TODO take only first 72 characters of input
bcrypt(BCRYPT_COST, &salt, password.as_bytes(), &mut bcrypt_output);
Database {
bcrypt_salt: salt,
bcrypt_pass: bcrypt_output,
entries: Vec::new()
}
}
pub fn open_from_file(path: &Path, password: &str) -> io::Result<Database> {
// let mut file = try!(File::open(Path::new(file_path)));
let mut file = try!(File::open(path));
Database::open(password, &mut file)
}
pub fn save_to_file(&self, path: &Path) -> io::Result<()> {
// Open the file in write-only mode
let mut file = try!(File::create(path));
self.save(&mut file)
}
pub fn open<T: Read>(password: &str, src: &mut T) -> io::Result<Database> {
let mut salt = [0u8; SALT_SIZE]; // 16bytes of salt bcrypt
let mut bcrypt_output = [0u8; PASS_SIZE]; // output 24 bytes
let mut version_buffer = [0u8; 1];
match src.read(&mut version_buffer){
Ok(_) => (),
Err(why) => return Err(why)
};
if version_buffer[0] != DB_VERSION {
return Database::invalid_data_error(format!("Cannot process DB version {}", version_buffer[0]));
}
match src.read(&mut salt){
Ok(SALT_SIZE) => (),
Ok(count) => return Database::invalid_data_error(format!("Bad number of bytes {} read for salt.", count)),
Err(why) => return Err(why)
}
// Read the rest
let mut buffer = Vec::new();
try!(src.read_to_end(&mut buffer));
// Run Bcrypt
bcrypt(BCRYPT_COST, &salt, password.as_bytes(), &mut bcrypt_output);
// Decrypt
let secret = match SecretMsg::from_bytes(&buffer) {
Some(msg) => msg,
None => return Database::invalid_data_error("Too few bytes (less than NONCE + ZERO bytes of SecretMsg).".to_string())
};
let key = SecretKey::from_slice(&bcrypt_output);
let dec = key.decrypt(&secret).unwrap();
let deserialized_entries: Vec<Entry> = serde_json::from_slice(&dec).unwrap();
Ok(Database{
bcrypt_salt: salt,
bcrypt_pass: bcrypt_output,
entries: deserialized_entries
})
}
pub fn save<T: Write>(&self, dest: &mut T) -> io::Result<()>{
let serialized = serde_json::to_string(&self.entries).unwrap();
let key = SecretKey::from_slice(&self.bcrypt_pass);
let enc: SecretMsg = key.encrypt(serialized.as_bytes());
// write version
try!(dest.write(&[DB_VERSION]));
// write salt first
try!(dest.write(&self.bcrypt_salt));
try!(dest.flush());
// write nonce + encrypted data
try!(dest.write(&enc.nonce));
try!(dest.flush());
try!(dest.write(&enc.cipher));
try!(dest.flush());
Ok(())
}
pub fn add(&mut self, entry: Entry){
self.entries.push(entry);
}
pub fn get(&self, entry_title: &str) -> Option<&Entry> {
self.entries.iter().find(|entry| entry.title.eq(entry_title))
}
pub fn remove(&mut self, entry_title: &str) -> bool{
let pos = self.entries
.iter()
.position(|entry| entry.title.eq(entry_title));
return match pos {
Some(index) => {
self.entries.remove(index);
true
}
None => false
}
}
fn invalid_data_error(text: String) -> io::Result<Database>{<|fim▁hole|>
#[cfg(test)]
mod tests {
use db::Entry;
use db::Database;
use std::io::Cursor;
use std::io::Read;
#[test]
fn test_save_and_load() {
let mut buff: Cursor<Vec<u8>> = Cursor::new(vec![]);
{
let mut db = Database::empty("test");
db.add(Entry::new("service_a", "name_a", "pass_a"));
db.add(Entry::new("service_b", "name_b", "pass_b"));
db.add(Entry::new("service_c", "name_c", "pass_c"));
db.save(&mut buff);
}
// Cursor position has to be reset before reading
buff.set_position(0);
let db = Database::open("test", &mut buff).unwrap();
assert_eq!(db.entries.len(), 3);
}
}<|fim▁end|> | Err(Error::new(ErrorKind::InvalidData, text))
}
} |
<|file_name|>cmd_init.py<|end_file_name|><|fim▁begin|># coding: utf-8
import os
import click
from chado import ChadoInstance
from chakin.cli import pass_context
from chakin import config
from chakin.io import warn, info
CONFIG_TEMPLATE = """## Chado's chakin: Global Configuration File.
# Each stanza should contain a single chado server to control.
#
# You can set the key __default to the name of a default instance
__default: local
local:
dbhost: "%(dbhost)s"<|fim▁hole|> dbport: "%(dbport)s"
dbschema: "%(schema)s"
"""
SUCCESS_MESSAGE = (
"Ready to go! Type `chakin` to get a list of commands you can execute."
)
@click.command("config_init")
@pass_context
def cli(ctx, url=None, api_key=None, admin=False, **kwds):
"""Help initialize global configuration (in home directory)
"""
click.echo("""Welcome to Chado's Chakin! (茶巾)""")
if os.path.exists(config.global_config_path()):
info("Your chakin configuration already exists. Please edit it instead: %s" % config.global_config_path())
return 0
while True:
# Check environment
dbhost = click.prompt("PGHOST")
dbname = click.prompt("PGDATABASE")
dbuser = click.prompt("PGUSER")
dbpass = click.prompt("PGPASS", hide_input=True)
dbport = click.prompt("PGPORT")
schema = click.prompt("PGSCHEMA")
info("Testing connection...")
try:
instance = ChadoInstance(dbhost=dbhost, dbname=dbname, dbuser=dbuser, dbpass=dbpass, dbport=dbport, dbschema=schema)
# We do a connection test during startup.
info("Ok! Everything looks good.")
break
except Exception as e:
warn("Error, we could not access the configuration data for your instance: %s", e)
should_break = click.prompt("Continue despite inability to contact this instance? [y/n]")
if should_break in ('Y', 'y'):
break
config_path = config.global_config_path()
if os.path.exists(config_path):
warn("File %s already exists, refusing to overwrite." % config_path)
return -1
with open(config_path, "w") as f:
f.write(CONFIG_TEMPLATE % {
'dbhost': dbhost,
'dbname': dbname,
'dbuser': dbuser,
'dbpass': dbpass,
'dbport': dbport,
'schema': schema,
})
info(SUCCESS_MESSAGE)<|fim▁end|> | dbname: "%(dbname)s"
dbuser: "%(dbuser)s"
dbpass: "%(dbpass)s" |
<|file_name|>calculate_distance.go<|end_file_name|><|fim▁begin|>package main
import (
"bufio"
"fmt"
"log"
"math"
"os"
)
func sqrt(a int) int {
return int(math.Sqrt(float64(a)))
}
<|fim▁hole|>func main() {
var x1, y1, x2, y2 int
data, err := os.Open(os.Args[1])
if err != nil {
log.Fatal(err)
}
defer data.Close()
scanner := bufio.NewScanner(data)
for scanner.Scan() {
fmt.Sscanf(scanner.Text(), "(%d, %d) (%d, %d)", &x1, &y1, &x2, &y2)
x, y := x1-x2, y1-y2
fmt.Println(sqrt(x*x + y*y))
}
}<|fim▁end|> | |
<|file_name|>stat.go<|end_file_name|><|fim▁begin|>package stat
import (
"fmt"
"time"
// "encoding/json"
)
type RevStat struct {
RevId string `json:"RevId"`
UserName string `json:"UserName"`
WordCount int `json:"WordCount"`
ModDate string `json:"ModDate"`
WordFreq []WordPair `json:"WordFreq"`
}
type DocStat struct {
FileId string `json:"FileId"`
Title string `json:"Title"`
LastMod string `json:"LastMod"`
RevList []RevStat `json:"RevList"`
}
func (rev RevStat) GetTime() string {
x, _ := time.Parse("2006-01-02T15:04:05.000Z", rev.ModDate)
return x.Format("15:04")
}
<|fim▁hole|> return fmt.Sprintf("[%s %s] %d words by %s. \n\t Words [%s]", rev.ModDate, rev.RevId, rev.WordCount, rev.UserName, rev.WordFreq)
}
func (doc DocStat) String() string {
s := fmt.Sprintf("[%s] '%s' last mod on %s with revs\n", doc.FileId, doc.Title, doc.LastMod)
for i, v := range doc.RevList {
s += fmt.Sprintf("\t %d:%s\n", i, v)
}
return s
}<|fim▁end|> | func (rev RevStat) String() string { |
<|file_name|>dbrp_mapping.go<|end_file_name|><|fim▁begin|>package influxdb
import (
"context"
"strconv"
"strings"
"unicode"
)
// DBRPMappingServiceV2 provides CRUD to DBRPMappingV2s.
type DBRPMappingServiceV2 interface {
// FindBy returns the dbrp mapping for the specified ID.
// Requires orgID because every resource will be org-scoped.
FindByID(ctx context.Context, orgID, id ID) (*DBRPMappingV2, error)
// FindMany returns a list of dbrp mappings that match filter and the total count of matching dbrp mappings.
FindMany(ctx context.Context, dbrp DBRPMappingFilterV2, opts ...FindOptions) ([]*DBRPMappingV2, int, error)
// Create creates a new dbrp mapping, if a different mapping exists an error is returned.
Create(ctx context.Context, dbrp *DBRPMappingV2) error
// Update a new dbrp mapping
Update(ctx context.Context, dbrp *DBRPMappingV2) error
// Delete removes a dbrp mapping.
// Deleting a mapping that does not exists is not an error.
// Requires orgID because every resource will be org-scoped.
Delete(ctx context.Context, orgID, id ID) error
}
// DBRPMappingV2 represents a mapping of a database and retention policy to an organization ID and bucket ID.
type DBRPMappingV2 struct {
ID ID `json:"id"`
Database string `json:"database"`
RetentionPolicy string `json:"retention_policy"`
// Default indicates if this mapping is the default for the cluster and database.
Default bool `json:"default"`
OrganizationID ID `json:"organization_id"`
BucketID ID `json:"bucket_id"`
}
// Validate reports any validation errors for the mapping.
func (m DBRPMappingV2) Validate() error {
if !validName(m.Database) {
return &Error{
Code: EInvalid,
Msg: "database must contain at least one character and only be letters, numbers, '_', '-', and '.'",
}
}
if !validName(m.RetentionPolicy) {
return &Error{
Code: EInvalid,
Msg: "retentionPolicy must contain at least one character and only be letters, numbers, '_', '-', and '.'",
}
}
if !m.OrganizationID.Valid() {
return &Error{
Code: EInvalid,
Msg: "organizationID is required",
}
}
if !m.BucketID.Valid() {
return &Error{
Code: EInvalid,
Msg: "bucketID is required",
}
}
return nil
}
// Equal checks if the two mappings are identical.
func (m *DBRPMappingV2) Equal(o *DBRPMappingV2) bool {
if m == o {
return true
}
if m == nil || o == nil {
return false
}
return m.Database == o.Database &&
m.RetentionPolicy == o.RetentionPolicy &&
m.Default == o.Default &&
m.OrganizationID.Valid() &&
o.OrganizationID.Valid() &&
m.BucketID.Valid() &&
o.BucketID.Valid() &&
o.ID.Valid() &&
m.ID == o.ID &&
m.OrganizationID == o.OrganizationID &&
m.BucketID == o.BucketID
}
// DBRPMappingFilterV2 represents a set of filters that restrict the returned results.
type DBRPMappingFilterV2 struct {
ID *ID
OrgID *ID
BucketID *ID
Database *string
RetentionPolicy *string
Default *bool
}
func (f DBRPMappingFilterV2) String() string {
var s strings.Builder
s.WriteString("{ id:")
if f.ID != nil {
s.WriteString(f.ID.String())
} else {
s.WriteString("<nil>")<|fim▁hole|> s.WriteString(" org_id:")
if f.ID != nil {
s.WriteString(f.OrgID.String())
} else {
s.WriteString("<nil>")
}
s.WriteString(" bucket_id:")
if f.ID != nil {
s.WriteString(f.OrgID.String())
} else {
s.WriteString("<nil>")
}
s.WriteString(" db:")
if f.Database != nil {
s.WriteString(*f.Database)
} else {
s.WriteString("<nil>")
}
s.WriteString(" rp:")
if f.RetentionPolicy != nil {
s.WriteString(*f.RetentionPolicy)
} else {
s.WriteString("<nil>")
}
s.WriteString(" default:")
if f.Default != nil {
s.WriteString(strconv.FormatBool(*f.Default))
} else {
s.WriteString("<nil>")
}
s.WriteString("}")
return s.String()
}
// DBRPMappingService provides a mapping of cluster, database and retention policy to an organization ID and bucket ID.
type DBRPMappingService interface {
// FindBy returns the dbrp mapping the for cluster, db and rp.
FindBy(ctx context.Context, cluster, db, rp string) (*DBRPMapping, error)
// Find returns the first dbrp mapping the matches the filter.
Find(ctx context.Context, filter DBRPMappingFilter) (*DBRPMapping, error)
// FindMany returns a list of dbrp mappings that match filter and the total count of matching dbrp mappings.
FindMany(ctx context.Context, filter DBRPMappingFilter, opt ...FindOptions) ([]*DBRPMapping, int, error)
// Create creates a new dbrp mapping, if a different mapping exists an error is returned.
Create(ctx context.Context, dbrpMap *DBRPMapping) error
// Delete removes a dbrp mapping.
// Deleting a mapping that does not exists is not an error.
Delete(ctx context.Context, cluster, db, rp string) error
}
// DBRPMapping represents a mapping of a cluster, database and retention policy to an organization ID and bucket ID.
type DBRPMapping struct {
Cluster string `json:"cluster"`
Database string `json:"database"`
RetentionPolicy string `json:"retention_policy"`
// Default indicates if this mapping is the default for the cluster and database.
Default bool `json:"default"`
OrganizationID ID `json:"organization_id"`
BucketID ID `json:"bucket_id"`
}
// Validate reports any validation errors for the mapping.
func (m DBRPMapping) Validate() error {
if !validName(m.Cluster) {
return &Error{
Code: EInvalid,
Msg: "cluster must contain at least one character and only be letters, numbers, '_', '-', and '.'",
}
}
if !validName(m.Database) {
return &Error{
Code: EInvalid,
Msg: "database must contain at least one character and only be letters, numbers, '_', '-', and '.'",
}
}
if !validName(m.RetentionPolicy) {
return &Error{
Code: EInvalid,
Msg: "retentionPolicy must contain at least one character and only be letters, numbers, '_', '-', and '.'",
}
}
if !m.OrganizationID.Valid() {
return &Error{
Code: EInvalid,
Msg: "organizationID is required",
}
}
if !m.BucketID.Valid() {
return &Error{
Code: EInvalid,
Msg: "bucketID is required",
}
}
return nil
}
// validName checks to see if the given name can would be valid for DB/RP name
func validName(name string) bool {
for _, r := range name {
if !unicode.IsPrint(r) {
return false
}
}
return name != "" &&
name != "." &&
name != ".." &&
!strings.ContainsAny(name, `/\`)
}
// Equal checks if the two mappings are identical.
func (m *DBRPMapping) Equal(o *DBRPMapping) bool {
if m == o {
return true
}
if m == nil || o == nil {
return false
}
return m.Cluster == o.Cluster &&
m.Database == o.Database &&
m.RetentionPolicy == o.RetentionPolicy &&
m.Default == o.Default &&
m.OrganizationID.Valid() &&
o.OrganizationID.Valid() &&
m.BucketID.Valid() &&
o.BucketID.Valid() &&
m.OrganizationID == o.OrganizationID &&
m.BucketID == o.BucketID
}
// DBRPMappingFilter represents a set of filters that restrict the returned results by cluster, database and retention policy.
type DBRPMappingFilter struct {
Cluster *string
Database *string
RetentionPolicy *string
Default *bool
}
func (f DBRPMappingFilter) String() string {
var s strings.Builder
s.WriteString("{")
s.WriteString("cluster:")
if f.Cluster != nil {
s.WriteString(*f.Cluster)
} else {
s.WriteString("<nil>")
}
s.WriteString(" db:")
if f.Database != nil {
s.WriteString(*f.Database)
} else {
s.WriteString("<nil>")
}
s.WriteString(" rp:")
if f.RetentionPolicy != nil {
s.WriteString(*f.RetentionPolicy)
} else {
s.WriteString("<nil>")
}
s.WriteString(" default:")
if f.Default != nil {
s.WriteString(strconv.FormatBool(*f.Default))
} else {
s.WriteString("<nil>")
}
s.WriteString("}")
return s.String()
}<|fim▁end|> | }
|
<|file_name|>0001_initial.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Generated by Django 1.9.12 on 2018-12-04 15:13
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('anagrafica', '0049_auto_20181028_1639'),
]
operations = [
migrations.CreateModel(
name='Question',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('text', models.CharField(max_length=255)),
('is_active', models.BooleanField(default=True)),
('required', models.BooleanField(default=True, verbose_name='Obbligatorio')),
],
options={
'verbose_name': 'Domanda',
'verbose_name_plural': 'Domande',
},
),
migrations.CreateModel(
name='Survey',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('is_active', models.BooleanField(default=True)),
('text', models.CharField(max_length=255)),
],
options={
'verbose_name': 'Questionario di gradimento',
'verbose_name_plural': 'Questionari di gradimento',<|fim▁hole|> migrations.CreateModel(
name='SurveyResult',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('response', models.TextField(blank=True, max_length=1000, null=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('question', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='survey.Question')),
('survey', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='survey.Survey')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='anagrafica.Persona')),
],
options={
'verbose_name': "Risposta dell'utente",
'verbose_name_plural': 'Risposte degli utenti',
},
),
migrations.AddField(
model_name='question',
name='survey',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='survey.Survey'),
),
]<|fim▁end|> | },
), |
<|file_name|>MainActivity.java<|end_file_name|><|fim▁begin|>package com.rarnu.tools.neo.activity;
import android.Manifest;
import android.app.AlertDialog;
import android.app.Fragment;
import android.content.pm.PackageManager;
import android.os.Build;
import android.os.Bundle;
import com.rarnu.tools.neo.R;
import com.rarnu.tools.neo.api.NativeAPI;
import com.rarnu.tools.neo.base.BaseActivity;
import com.rarnu.tools.neo.fragment.MainFragment;
import com.rarnu.tools.neo.utils.UIUtils;
import com.rarnu.tools.neo.xposed.XpStatus;
public class MainActivity extends BaseActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
UIUtils.initDisplayMetrics(this, getWindowManager(), false);
super.onCreate(savedInstanceState);
NativeAPI.isRejected = !NativeAPI.mount();
if (!XpStatus.isEnable()) {
new AlertDialog.Builder(this)
.setTitle(R.string.alert_hint)
.setMessage(R.string.alert_xposed)<|fim▁hole|> .setCancelable(false)
.setPositiveButton(R.string.alert_ok, null)
.show();
}
if (Build.VERSION.SDK_INT >= 24) {
new AlertDialog.Builder(this)
.setTitle(R.string.alert_hint)
.setMessage(R.string.alert_androidn_pending)
.setPositiveButton(R.string.alert_ok, null)
.show();
} else {
if (NativeAPI.isRejected) {
new AlertDialog.Builder(this)
.setTitle(R.string.alert_hint)
.setMessage(R.string.alert_root)
.setCancelable(false)
.setPositiveButton(R.string.alert_ok, null)
.show();
}
}
requirePermission();
}
@Override
public int getIcon() {
return R.drawable.ic_launcher;
}
@Override
public Fragment replaceFragment() {
return new MainFragment();
}
@Override
public int customTheme() {
return 0;
}
@Override
public boolean getActionBarCanBack() {
return false;
}
private void requirePermission() {
if (Build.VERSION.SDK_INT >= 23) {
if (checkSelfPermission(Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) {
requestPermissions(new String[]{Manifest.permission.WRITE_EXTERNAL_STORAGE, Manifest.permission.READ_EXTERNAL_STORAGE}, 0);
} else {
XpStatus.canWriteSdcard = true;
}
} else {
XpStatus.canWriteSdcard = true;
}
}
// No override here for compact with 5.0
// @Override
public void onRequestPermissionsResult(int requestCode, String[] permissions, int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
for (int i = 0; i < permissions.length; i++) {
if (permissions[i].equals(Manifest.permission.WRITE_EXTERNAL_STORAGE)) {
XpStatus.canWriteSdcard = grantResults[i] == PackageManager.PERMISSION_GRANTED;
break;
}
}
}
}<|fim▁end|> | |
<|file_name|>connection.js<|end_file_name|><|fim▁begin|>"use strict";
var net = require('net');
var events = require('events');
var util = require('util');
var async = require('async');
var tls = require('tls');
var Encoder = require('./encoder.js');
var writers = require('./writers');
var requests = require('./requests');
var streams = require('./streams');
var utils = require('./utils');
var types = require('./types');
var errors = require('./errors');
var StreamIdStack = require('./stream-id-stack');
/** @const */
var idleQuery = 'SELECT key from system.local';
/** @const */
var maxProtocolVersion = 4;
/**
* Represents a connection to a Cassandra node
* @param {String} endPoint An string containing ip address and port of the host
* @param {Number} protocolVersion
* @param {ClientOptions} options
* @constructor
*/
function Connection(endPoint, protocolVersion, options) {
events.EventEmitter.call(this);
this.setMaxListeners(0);
if (!endPoint || endPoint.indexOf(':') <= 0) {
throw new Error('EndPoint must contain the ip address and port separated by : symbol');
}
this.endPoint = endPoint;
var hostAndPort = endPoint.split(':');
this.address = hostAndPort[0];
this.port = hostAndPort[1];
Object.defineProperty(this, "options", { value: options, enumerable: false, writable: false});
if (protocolVersion === null) {
//Set initial protocol version
protocolVersion = maxProtocolVersion;
if (options.protocolOptions.maxVersion > 0 && options.protocolOptions.maxVersion < maxProtocolVersion) {
//limit the protocol version
protocolVersion = options.protocolOptions.maxVersion;
}
//Allow to check version using this connection instance
this.checkingVersion = true;
}
this.protocolVersion = protocolVersion;
this.streamHandlers = {};
this.pendingWrites = [];
this.preparing = {};
/**
* The timeout state for the idle request (heartbeat)
*/
this.idleTimeout = null;
this.timedOutHandlers = 0;
this.streamIds = new StreamIdStack(this.protocolVersion);
this.encoder = new Encoder(protocolVersion, options);
}
util.inherits(Connection, events.EventEmitter);
Connection.prototype.log = utils.log;
/**
* Binds the necessary event listeners for the socket
*/
Connection.prototype.bindSocketListeners = function() {
//Remove listeners that were used for connecting
this.netClient.removeAllListeners('connect');
this.netClient.removeAllListeners('timeout');
var self = this;
this.netClient.on('close', function() {
self.log('info', 'Connection to ' + self.address + ':' + self.port + ' closed');
self.connected = false;
self.connecting = false;
self.clearAndInvokePending();
});
var protocol = new streams.Protocol({objectMode: true}, this.protocolVersion);
this.parser = new streams.Parser({objectMode: true}, this.encoder);
var resultEmitter = new streams.ResultEmitter({objectMode: true});
resultEmitter.on('result', this.handleResult.bind(this));
resultEmitter.on('row', this.handleRow.bind(this));
resultEmitter.on('frameEnded', this.freeStreamId.bind(this));
resultEmitter.on('nodeEvent', this.handleNodeEvent.bind(this));
this.netClient
.pipe(protocol)
.pipe(this.parser)
.pipe(resultEmitter);
this.writeQueue = new writers.WriteQueue(this.netClient, this.encoder, this.options);
};
/**
* Connects a socket and sends the startup protocol messages.
*/
Connection.prototype.open = function (callback) {
var self = this;
this.log('info', 'Connecting to ' + this.address + ':' + this.port);
this.connecting = true;
if (!this.options.sslOptions) {
this.netClient = new net.Socket();
this.netClient.connect(this.port, this.address, function connectCallback() {
self.log('verbose', 'Socket connected to ' + self.address + ':' + self.port);
self.bindSocketListeners();
self.startup(callback);
});
}
else {
//use TLS
var sslOptions = utils.extend({rejectUnauthorized: false}, this.options.sslOptions);
this.netClient = tls.connect(this.port, this.address, sslOptions, function tlsConnectCallback() {
self.log('verbose', 'Secure socket connected to ' + self.address + ':' + self.port);
self.bindSocketListeners();
self.startup(callback);
});
}
this.netClient.once('error', function (err) {
self.errorConnecting(err, false, callback);
});
this.netClient.once('timeout', function connectTimedOut() {
var err = new types.DriverError('Connection timeout');
self.errorConnecting(err, true, callback);
});
this.netClient.setTimeout(this.options.socketOptions.connectTimeout);
// Improve failure detection with TCP keep-alives
if (this.options.socketOptions.keepAlive) {
this.netClient.setKeepAlive(true, this.options.socketOptions.keepAliveDelay);
}
this.netClient.setNoDelay(!!this.options.socketOptions.tcpNoDelay);
};
/**
* Determines the protocol version to use and sends the STARTUP request
* @param {Function} callback
*/
Connection.prototype.startup = function (callback) {
if (this.checkingVersion) {
this.log('info', 'Trying to use protocol version ' + this.protocolVersion);
}
var self = this;
this.sendStream(new requests.StartupRequest(), null, function (err, response) {
if (err && self.checkingVersion && self.protocolVersion > 1) {
var invalidProtocol = (err instanceof errors.ResponseError &&
err.code === types.responseErrorCodes.protocolError &&
err.message.indexOf('Invalid or unsupported protocol version') >= 0);
if (!invalidProtocol && self.protocolVersion > 3) {
//For some versions of Cassandra, the error is wrapped into a server error
//See CASSANDRA-9451
invalidProtocol = (err instanceof errors.ResponseError &&
err.code === types.responseErrorCodes.serverError &&
err.message.indexOf('ProtocolException: Invalid or unsupported protocol version') > 0);
}
if (invalidProtocol) {
self.log('info', 'Protocol v' + self.protocolVersion + ' not supported, using v' + (self.protocolVersion-1));
self.decreaseVersion();
//The host closed the connection, close the socket
setImmediate(function () {
self.close(function () {
//Retry
self.open(callback);
});
});
return;
}
}
if (response && response.mustAuthenticate) {
return self.authenticate(null, null, startupCallback);
}
startupCallback(err);
});
function startupCallback(err) {
if (err) {
return self.errorConnecting(err, true, callback);
}
//The socket is connected and the connection is authenticated
return self.connectionReady(callback);
}
};
Connection.prototype.errorConnecting = function (err, destroy, callback) {
this.connecting = false;
this.log('warning', 'There was an error when trying to connect to the host ' + this.address, err);
if (destroy) {
//there is a TCP connection that should be killed.
this.netClient.destroy();
}
callback(err);
};
/**
* Sets the connection to ready/connected status
*/
Connection.prototype.connectionReady = function (callback) {
this.emit('connected');
this.connected = true;
this.connecting = false;
// Remove existing error handlers as the connection is now ready.
this.netClient.removeAllListeners('error');
this.netClient.on('error', this.handleSocketError.bind(this));
callback();
};
Connection.prototype.decreaseVersion = function () {
this.protocolVersion--;
this.encoder.setProtocolVersion(this.protocolVersion);
this.streamIds.setVersion(this.protocolVersion);
};
/**
* Handle socket errors, if the socket is not readable invoke all pending callbacks
*/
Connection.prototype.handleSocketError = function (err) {
this.clearAndInvokePending(err);
};
/**
* Cleans all internal state and invokes all pending callbacks of sent streams
*/
Connection.prototype.clearAndInvokePending = function (innerError) {
if (this.idleTimeout) {
//Remove the idle request
clearTimeout(this.idleTimeout);
this.idleTimeout = null;
}
this.streamIds.clear();
var err = new types.DriverError('Socket was closed');<|fim▁hole|> err.innerError = innerError;
}
//copy all handlers
var handlers = utils.objectValues(this.streamHandlers);
//remove it from the map
this.streamHandlers = {};
if (handlers.length > 0) {
this.log('info', 'Invoking ' + handlers.length + ' pending callbacks');
}
var self = this;
//invoke all handlers
async.each(handlers, function (item, next) {
self.invokeCallback(item, err);
next();
});
var pendingWritesCopy = this.pendingWrites;
this.pendingWrites = [];
async.each(pendingWritesCopy, function (item, next) {
if (!item.callback) return;
item.callback(err);
next();
});
};
/**
* Handles authentication requests and responses.
* @param {Authenticator} authenticator
* @param {Buffer} token
* @param {Function} callback
*/
Connection.prototype.authenticate = function(authenticator, token, callback) {
var self = this;
if (authenticator === null) {
//initial token
if (!this.options.authProvider) {
return callback(new errors.AuthenticationError('Authentication provider not set'));
}
authenticator = this.options.authProvider.newAuthenticator();
authenticator.initialResponse(function (err, t) {
//let's start again with the correct args
if (err) return callback(err);
self.authenticate(authenticator, t, callback);
});
return;
}
var request = new requests.AuthResponseRequest(token);
if (this.protocolVersion === 1) {
//No Sasl support, use CREDENTIALS
//noinspection JSUnresolvedVariable
if (!authenticator.username) {
return callback(new errors.AuthenticationError('Only plain text authenticator providers allowed under protocol v1'));
}
//noinspection JSUnresolvedVariable
request = new requests.CredentialsRequest(authenticator.username, authenticator.password);
}
this.sendStream(request, null, function (err, result) {
if (err) {
if (err instanceof errors.ResponseError && err.code === types.responseErrorCodes.badCredentials) {
var authError = new errors.AuthenticationError(err.message);
authError.additionalInfo = err;
err = authError;
}
return callback(err);
}
if (result.ready) {
authenticator.onAuthenticationSuccess();
return callback();
}
if (result.authChallenge) {
authenticator.evaluateChallenge(result.token, function (err, t) {
if (err) {
return callback(err);
}
//here we go again
self.authenticate(authenticator, t, callback);
});
}
callback(new errors.DriverInternalError('Unexpected response from Cassandra: ' + util.inspect(result)))
});
};
/**
* Executes a 'USE ' query, if keyspace is provided and it is different from the current keyspace
* @param {?String} keyspace
* @param {Function} callback
*/
Connection.prototype.changeKeyspace = function (keyspace, callback) {
if (!keyspace || this.keyspace === keyspace) {
return callback();
}
if (this.toBeKeyspace === keyspace) {
return this.once('keyspaceChanged', callback);
}
this.toBeKeyspace = keyspace;
var query = util.format('USE "%s"', keyspace);
var self = this;
this.sendStream(
new requests.QueryRequest(query, null, null),
null,
function (err) {
if (!err) {
self.keyspace = keyspace;
}
callback(err);
self.emit('keyspaceChanged', err, keyspace);
});
};
/**
* Prepares a query on a given connection. If its already being prepared, it queues the callback.
* @param {String} query
* @param {function} callback
*/
Connection.prototype.prepareOnce = function (query, callback) {
var name = ( this.keyspace || '' ) + query;
var info = this.preparing[name];
if (this.preparing[name]) {
//Its being already prepared
return info.once('prepared', callback);
}
info = new events.EventEmitter();
info.setMaxListeners(0);
info.once('prepared', callback);
this.preparing[name] = info;
var self = this;
this.sendStream(new requests.PrepareRequest(query), null, function (err, response) {
info.emit('prepared', err, response);
delete self.preparing[name];
});
};
/**
* Uses the frame writer to write into the wire
* @param request
* @param options
* @param {function} callback Function to be called once the response has been received
*/
Connection.prototype.sendStream = function (request, options, callback) {
var self = this;
var streamId = this.getStreamId();
if (streamId === null) {
self.log('info',
'Enqueuing ' +
this.pendingWrites.length +
', if this message is recurrent consider configuring more connections per host or lowering the pressure');
return this.pendingWrites.push({request: request, options: options, callback: callback});
}
if (!callback) {
callback = function noop () {};
}
this.log('verbose', 'Sending stream #' + streamId);
request.streamId = streamId;
request.version = this.protocolVersion;
this.writeQueue.push(request, this.getWriteCallback(request, options, callback));
};
Connection.prototype.getWriteCallback = function (request, options, callback) {
var self = this;
return (function writeCallback (err) {
if (err) {
if (!(err instanceof TypeError)) {
//TypeError is raised when there is a serialization issue
//If it is not a serialization issue is a socket issue
err.isServerUnhealthy = true;
}
return callback(err);
}
self.log('verbose', 'Sent stream #' + request.streamId + ' to ' + self.endPoint);
//the request was successfully written, use a timer to set the readTimeout
var timeout;
if (self.options.socketOptions.readTimeout > 0) {
timeout = setTimeout(function () {
self.onTimeout(request.streamId);
}, self.options.socketOptions.readTimeout);
}
if (request instanceof requests.ExecuteRequest || request instanceof requests.QueryRequest) {
if (options && options.byRow) {
self.parser.setOptions(request.streamId, { byRow: true });
}
}
if (self.options.pooling.heartBeatInterval) {
if (self.idleTimeout) {
//remove the previous timeout for the idle request
clearTimeout(self.idleTimeout);
}
self.idleTimeout = setTimeout(function () {
self.idleTimeoutHandler();
}, self.options.pooling.heartBeatInterval);
}
self.streamHandlers[request.streamId] = {
callback: callback,
options: options,
timeout: timeout
};
});
};
/**
* Function that gets executed once the idle timeout has passed to issue a request to keep the connection alive
*/
Connection.prototype.idleTimeoutHandler = function () {
var self = this;
if (this.sendingIdleQuery) {
//don't issue another
//schedule for next time
this.idleTimeout = setTimeout(function () {
self.idleTimeoutHandler();
}, this.options.pooling.heartBeatInterval);
return;
}
this.log('verbose', 'Connection idling, issuing a Request to prevent idle disconnects');
this.sendingIdleQuery = true;
this.sendStream(new requests.QueryRequest(idleQuery), utils.emptyObject, function (err) {
self.sendingIdleQuery = false;
if (!err) {
//The sending succeeded
//There is a valid response but we don't care about the response
return;
}
self.log('warning', 'Received heartbeat request error', err);
self.emit('idleRequestError', err);
});
};
/**
* Returns an available streamId or null if there isn't any available
* @returns {Number}
*/
Connection.prototype.getStreamId = function() {
return this.streamIds.pop();
};
Connection.prototype.freeStreamId = function(header) {
var streamId = header.streamId;
if (streamId < 0) {
return;
}
delete this.streamHandlers[streamId];
this.streamIds.push(streamId);
this.writeNext();
this.log('verbose', 'Done receiving frame #' + streamId);
};
Connection.prototype.writeNext = function () {
var self = this;
setImmediate(function writeNextPending() {
var pending = self.pendingWrites.shift();
if (!pending) {
return;
}
self.sendStream(pending.request, pending.options, pending.callback);
});
};
/**
* Returns the number of requests waiting for response
* @returns {Number}
*/
Connection.prototype.getInFlight = function () {
return this.streamIds.inUse;
};
/**
* Handles a result and error response
*/
Connection.prototype.handleResult = function (header, err, result) {
var streamId = header.streamId;
if(streamId < 0) {
return this.log('verbose', 'event received', header);
}
var handler = this.streamHandlers[streamId];
if (!handler) {
return this.log('error', 'The server replied with a wrong streamId #' + streamId);
}
this.log('verbose', 'Received frame #' + streamId + ' from ' + this.endPoint);
this.invokeCallback(handler, err, result);
};
Connection.prototype.handleNodeEvent = function (header, event) {
switch (event.eventType) {
case types.protocolEvents.schemaChange:
this.emit('nodeSchemaChange', event);
break;
case types.protocolEvents.topologyChange:
this.emit('nodeTopologyChange', event);
break;
case types.protocolEvents.statusChange:
this.emit('nodeStatusChange', event);
break;
}
};
/**
* Handles a row response
*/
Connection.prototype.handleRow = function (header, row, meta, rowLength, flags) {
var streamId = header.streamId;
if(streamId < 0) {
return this.log('verbose', 'Event received', header);
}
var handler = this.streamHandlers[streamId];
if (!handler) {
return this.log('error', 'The server replied with a wrong streamId #' + streamId);
}
this.log('verbose', 'Received streaming frame #' + streamId);
if (handler.timeout) {
//It started receiving, clear the read timeout
clearTimeout(handler.timeout);
handler.timeout = null;
}
handler.rowIndex = handler.rowIndex || 0;
var rowCallback = handler.options && handler.options.rowCallback;
if (rowCallback) {
rowCallback(handler.rowIndex++, row, rowLength);
}
if (handler.rowIndex === rowLength) {
this.invokeCallback(handler, null, { rowLength: rowLength, meta: meta, flags: flags });
}
};
/**
* Invokes the handler callback and clears the callback and timers
* @param {{callback, timeout}} handler
* @param {Error} err
* @param [response]
*/
Connection.prototype.invokeCallback = function (handler, err, response) {
var callback = handler.callback;
//Prevent chained invocations
handler.callback = null;
if (handler.timeout) {
clearTimeout(handler.timeout);
handler.timeout = null;
}
if (callback) {
callback(err, response);
}
};
/**
* Executed on request timeout, it callbacks with OperationTimedOutError and clears the closures
*/
Connection.prototype.onTimeout = function (streamId) {
var handler = this.streamHandlers[streamId];
if (!handler || !handler.callback) {
//it's being cleared, don't mind
return;
}
this.timedOutHandlers++;
var originalCallback = handler.callback;
var self = this;
//clear callback closures
handler.callback = function () {
//if replies, remove from timedOutQueries
self.timedOutHandlers--;
};
if (handler.options && handler.options.rowCallback) {
handler.options.rowCallback = function noop() {};
}
var message = util.format('The host %s did not reply before timeout %d ms', this.endPoint, this.options.socketOptions.readTimeout);
originalCallback(new errors.OperationTimedOutError(message));
};
/**
* @param {Function} [callback]
*/
Connection.prototype.close = function (callback) {
this.log('verbose', 'disconnecting');
this.clearAndInvokePending();
if(!callback) {
callback = function () {};
}
if (!this.netClient) {
callback();
return;
}
if (!this.connected) {
this.netClient.destroy();
setImmediate(callback);
return;
}
var self = this;
this.netClient.once('close', function (hadError) {
if (hadError) {
self.log('info', 'The socket closed with a transmission error');
}
setImmediate(callback);
});
this.netClient.end();
this.streamHandlers = {};
};
module.exports = Connection;<|fim▁end|> | err.isServerUnhealthy = true;
if (innerError) { |
<|file_name|>bitor.rs<|end_file_name|><|fim▁begin|>#![feature(core)]
extern crate core;
#[cfg(test)]
mod tests {
use core::num::Wrapping;
use core::ops::BitOr;
// macro_rules! wrapping_impl {
// ($($t:ty)*) => ($(
// #[stable(feature = "rust1", since = "1.0.0")]
// impl Add for Wrapping<$t> {
// type Output = Wrapping<$t>;
//
// #[inline(always)]
// fn add(self, other: Wrapping<$t>) -> Wrapping<$t> {
// Wrapping(self.0.wrapping_add(other.0))
// }
// }
//
// #[stable(feature = "rust1", since = "1.0.0")]
// impl Sub for Wrapping<$t> {
// type Output = Wrapping<$t>;
//
// #[inline(always)]
// fn sub(self, other: Wrapping<$t>) -> Wrapping<$t> {
// Wrapping(self.0.wrapping_sub(other.0))
// }
// }
//
// #[stable(feature = "rust1", since = "1.0.0")]
// impl Mul for Wrapping<$t> {
// type Output = Wrapping<$t>;
//
// #[inline(always)]
// fn mul(self, other: Wrapping<$t>) -> Wrapping<$t> {
// Wrapping(self.0.wrapping_mul(other.0))
// }
// }
//
// #[stable(feature = "rust1", since = "1.0.0")]
// impl Not for Wrapping<$t> {
// type Output = Wrapping<$t>;
//
// #[inline(always)]
// fn not(self) -> Wrapping<$t> {
// Wrapping(!self.0)
// }
// }
//
// #[stable(feature = "rust1", since = "1.0.0")]
// impl BitXor for Wrapping<$t> {
// type Output = Wrapping<$t>;
//
// #[inline(always)]
// fn bitxor(self, other: Wrapping<$t>) -> Wrapping<$t> {
// Wrapping(self.0 ^ other.0)
// }
// }
//
// #[stable(feature = "rust1", since = "1.0.0")]
// impl BitOr for Wrapping<$t> {
// type Output = Wrapping<$t>;
//
// #[inline(always)]
// fn bitor(self, other: Wrapping<$t>) -> Wrapping<$t> {
// Wrapping(self.0 | other.0)
// }
// }
//
// #[stable(feature = "rust1", since = "1.0.0")]
// impl BitAnd for Wrapping<$t> {
// type Output = Wrapping<$t>;
//
// #[inline(always)]
// fn bitand(self, other: Wrapping<$t>) -> Wrapping<$t> {
// Wrapping(self.0 & other.0)
// }
// }
// )*)
// }<|fim▁hole|>
macro_rules! bitor_test {
($T:ty, $value:expr, $other:expr, $result:expr) => ({
let value: $T = $value;
let wrapping_value: Wrapping<$T> = Wrapping::<$T>(value);
let other: $T = $other;
let wrapping_other: Wrapping<$T> = Wrapping::<$T>(other);
let result: Wrapping<$T> = wrapping_value.bitor(wrapping_other);
assert_eq!(result.0, $result);
let result: Wrapping<$T> = wrapping_value | wrapping_other;
assert_eq!(result.0, $result);
})
}
#[test]
fn bitor_test1() {
bitor_test!( u32, 0xe7e7e7e7, 0x7e7e7e7e, 0xffffffff );
}
}<|fim▁end|> |
// wrapping_impl! { usize u8 u16 u32 u64 isize i8 i16 i64 i64 } |
<|file_name|>chef.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import chef
import datetime
from errbot import BotPlugin, botcmd
from time import time
STALE_TIME = 60 * 30 # 30 minutes
class Chef(BotPlugin):
def pretty_time(self, time):
return datetime.datetime.fromtimestamp(int(time)).strftime('%Y-%m-%d %H:%M:%S')
def search_node (self ,args):
api = chef.autoconfigure()
if not args:
raise Exception("No Search Query")
return chef.Search('node', args)
@botcmd
def search (self, mess, args):
""" Search and return nodes """
list = "Search results for query : %s\n" % args
for row in self.search_node(args):
list += "%s\n" % row.object.name
return(list)
@botcmd
def roles (self, mess, args):
""" Search and return roles """
api = chef.autoconfigure()
roles = ''
for row in chef.Search('role', 'name:*' + args + '*'):
roles += "%s\n" % row.object.name
return(roles)
@botcmd<|fim▁hole|> if row.object.attributes['ohai_time']:
ago = int(time() - row.object.attributes['ohai_time'])
pretty_ohai_time = self.pretty_time(row.object.attributes['ohai_time'])
if ago >= STALE_TIME:
list += "%s ran %s seconds ago ( %s )\n" % (row.object.name, ago, pretty_ohai_time)
return(list)
@botcmd
def dpkg (self, mess, args):
""" Search installed pacakge versions via Chef API ( requires ohai-dpkg) """
(search, package) = args.split()
if not package:
raise Exception("No package")
pacakges = ''
for row in self.search_node(search):
if not row.object.attributes['dpkg']:
continue
if not row.object.attributes['dpkg'][package]:
continue
pacakges += "%s\t%s\n" % ( row.object.name , row.object.attributes['dpkg'][package]['version'] )
return(pacakges)<|fim▁end|> | def stale(self, mess, args):
""" Search for stale nodes """
list = "Stale nodes for query : %s ( stale time %s seconds )\n" % (args, STALE_TIME)
for row in self.search_node(args): |
<|file_name|>FloatTag.java<|end_file_name|><|fim▁begin|>package org.jnbt;
/*
* JNBT License
*
* Copyright (c) 2010 Graham Edgecombe
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* * Neither the name of the JNBT team nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
<|fim▁hole|> */
public final class FloatTag extends Tag {
/**
* The value.
*/
private final float value;
/**
* Creates the tag.
* @param name The name.
* @param value The value.
*/
public FloatTag(String name, float value) {
super(name);
this.value = value;
}
@Override
public Float getValue() {
return value;
}
@Override
public String toString() {
String name = getName();
String append = "";
if(name != null && !name.equals("")) {
append = "(\"" + this.getName() + "\")";
}
return "TAG_Float" + append + ": " + value;
}
}<|fim▁end|> | /**
* The <code>TAG_Float</code> tag.
* @author Graham Edgecombe
* |
<|file_name|>phpcomposer.py<|end_file_name|><|fim▁begin|>#
# Copyright (c) 2015 nexB Inc. and others. All rights reserved.
# http://nexb.com and https://github.com/nexB/scancode-toolkit/
# The ScanCode software is licensed under the Apache License version 2.0.
# Data generated with ScanCode require an acknowledgment.
# ScanCode is a trademark of nexB Inc.
#
# You may not use this software except in compliance with the License.
# You may obtain a copy of the License at: http://apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
#
# When you publish or redistribute any data created with ScanCode or any ScanCode
# derivative work, you must accompany this data with the following acknowledgment:
#
# Generated with ScanCode and provided on an "AS IS" BASIS, WITHOUT WARRANTIES
# OR CONDITIONS OF ANY KIND, either express or implied. No content created from
# ScanCode should be considered or used as legal advice. Consult an Attorney
# for any legal advice.
# ScanCode is a free software code scanning tool from nexB Inc. and others.
# Visit https://github.com/nexB/scancode-toolkit/ for support and download.
from __future__ import absolute_import
from __future__ import print_function
import codecs
import logging
import json
from collections import OrderedDict
from functools import partial
from commoncode import filetype
from commoncode import fileutils
from packagedcode import models
from packagedcode.utils import parse_repo_url
"""
Handle PHP composer packages, refer to https://getcomposer.org/
"""
logger = logging.getLogger(__name__)
# import sys
# logging.basicConfig(level=logging.DEBUG, stream=sys.stdout)
# logger.setLevel(logging.DEBUG)
class PHPComposerPackage(models.Package):
metafiles = ('composer.json')
filetypes = ('.json',)
mimetypes = ('application/json')
repo_types = (models.repo_phpcomposer,)
type = models.StringType(default='phpcomposer')
primary_language = models.StringType(default='PHP')
@classmethod
def recognize(cls, location):
return parse(location)
def is_phpcomposer_json(location):
return (filetype.is_file(location)
and fileutils.file_name(location).lower() == 'composer.json')<|fim▁hole|>
def parse(location):
"""
Return a Package object from a composer.json file or None.
"""
if not is_phpcomposer_json(location):
return
# mapping of top level composer.json items to the Package object field name
plain_fields = OrderedDict([
('name', 'name'),
('description', 'summary'),
('keywords', 'keywords'),
('version', 'version'),
('homepage', 'homepage_url'),
])
# mapping of top level composer.json items to a function accepting as arguments
# the composer.json element value and returning an iterable of key, values Package Object to update
field_mappers = OrderedDict([
('authors', author_mapper),
('license', licensing_mapper),
('require', dependencies_mapper),
('require-dev', dev_dependencies_mapper),
('repositories', repository_mapper),
('support', support_mapper),
])
with codecs.open(location, encoding='utf-8') as loc:
data = json.load(loc, object_pairs_hook=OrderedDict)
if not data.get('name') or not data.get('description'):
# a composer.json without name and description is not a usable PHP composer package
# name and description fields are required: https://getcomposer.org/doc/04-schema.md#name
return
package = PHPComposerPackage()
# a composer.json is at the root of a PHP composer package
base_dir = fileutils.parent_directory(location)
package.location = base_dir
package.metafile_locations = [location]
for source, target in plain_fields.items():
value = data.get(source)
if value:
if isinstance(value, basestring):
value = value.strip()
if value:
setattr(package, target, value)
for source, func in field_mappers.items():
logger.debug('parse: %(source)r, %(func)r' % locals())
value = data.get(source)
if value:
if isinstance(value, basestring):
value = value.strip()
if value:
func(value, package)
vendor_mapper(package) # Parse vendor from name value
return package
def licensing_mapper(licenses, package):
"""
Update package licensing and return package.
Licensing data structure has evolved over time and is a tad messy.
https://getcomposer.org/doc/04-schema.md#license
licenses is either:
- a string with:
- an SPDX id or expression { "license": "(LGPL-2.1 or GPL-3.0+)" }
- array:
"license": [
"LGPL-2.1",
"GPL-3.0+"
]
"""
if not licenses:
return package
if isinstance(licenses, basestring):
package.asserted_licenses.append(models.AssertedLicense(license=licenses))
elif isinstance(licenses, list):
"""
"license": [
"LGPL-2.1",
"GPL-3.0+"
]
"""
for lic in licenses:
if isinstance(lic, basestring):
package.asserted_licenses.append(models.AssertedLicense(license=lic))
else:
# use the bare repr
if lic:
package.asserted_licenses.append(models.AssertedLicense(license=repr(lic)))
else:
# use the bare repr
package.asserted_licenses.append(models.AssertedLicense(license=repr(licenses)))
return package
def author_mapper(authors_content, package):
"""
Update package authors and return package.
https://getcomposer.org/doc/04-schema.md#authors
"""
authors = []
for name, email, url in parse_person(authors_content):
authors.append(models.Party(type=models.party_person, name=name, email=email, url=url))
package.authors = authors
return package
def support_mapper(support, package):
"""
Update support and bug tracking url.
https://getcomposer.org/doc/04-schema.md#support
"""
package.support_contacts = [support.get('email')]
package.bug_tracking_url = support.get('issues')
package.code_view_url = support.get('source')
return package
def vendor_mapper(package):
"""
Vender is part of name element.
https://getcomposer.org/doc/04-schema.md#name
"""
name = package.name
if name and '/' in name:
vendors = name.split('/')
if vendors[0]:
package.vendors = [models.Party(name=vendors[0])]
return package
def repository_mapper(repos, package):
"""
https://getcomposer.org/doc/04-schema.md#repositories
"repositories": [
{
"type": "composer",
"url": "http://packages.example.com"
},
{
"type": "composer",
"url": "https://packages.example.com",
"options": {
"ssl": {
"verify_peer": "true"
}
}
},
{
"type": "vcs",
"url": "https://github.com/Seldaek/monolog"
},
{
"type": "pear",
"url": "https://pear2.php.net"
},
{
"type": "package",
"package": {
"name": "smarty/smarty",
"version": "3.1.7",
"dist": {
"url": "http://www.smarty.net/files/Smarty-3.1.7.zip",
"type": "zip"
},
"source": {
"url": "https://smarty-php.googlecode.com/svn/",
"type": "svn",
"reference": "tags/Smarty_3_1_7/distribution/"
}
}
}
]
"""
if not repos:
return package
if isinstance(repos, basestring):
package.vcs_repository = parse_repo_url(repos)
elif isinstance(repos, list):
for repo in repos:
if repo.get('type') == 'vcs':
# vcs type includes git, svn, fossil or hg.
# refer to https://getcomposer.org/doc/05-repositories.md#vcs
repo_url = repo.get('url')
if repo_url.startswith('svn') or 'subversion.apache.org' in repo_url:
package.vcs_tool = 'svn'
elif repo_url.startswith('hg') or 'mercurial.selenic.com' in repo_url:
package.vcs_tool = 'hg'
elif repo_url.startswith('fossil') or 'fossil-scm.org' in repo_url:
package.vcs_tool = 'fossil'
else:
package.vcs_tool = 'git'
package.vcs_repository = parse_repo_url(repo.get('url'))
return package
def deps_mapper(deps, package, field_name):
"""
Handle deps such as dependencies, devDependencies
return a tuple of (dep type, list of deps)
https://getcomposer.org/doc/04-schema.md#package-links
"""
dep_types = {
'dependencies': models.dep_runtime,
'devDependencies': models.dep_dev,
}
resolved_type = dep_types[field_name]
dependencies = []
for name, version_constraint in deps.items():
dep = models.Dependency(name=name, version_constraint=version_constraint)
dependencies.append(dep)
if resolved_type in package.dependencies:
package.dependencies[resolved_type].extend(dependencies)
else:
package.dependencies[resolved_type] = dependencies
return package
dependencies_mapper = partial(deps_mapper, field_name='dependencies')
dev_dependencies_mapper = partial(deps_mapper, field_name='devDependencies')
def parse_person(persons):
"""
https://getcomposer.org/doc/04-schema.md#authors
A "person" is an object with a "name" field and optionally "url" and "email".
Yield a name, email, url tuple for a person object
A person can be in the form:
"authors": [
{
"name": "Nils Adermann",
"email": "[email protected]",
"homepage": "http://www.naderman.de",
"role": "Developer"
},
{
"name": "Jordi Boggiano",
"email": "[email protected]",
"homepage": "http://seld.be",
"role": "Developer"
}
]
Both forms are equivalent.
"""
if isinstance(persons, list):
for person in persons:
# ensure we have our three values
name = person.get('name')
email = person.get('email')
url = person.get('homepage')
yield name and name.strip(), email and email.strip('<> '), url and url.strip('() ')
else:
raise Exception('Incorrect PHP composer composer.json person: %(person)r' % locals())<|fim▁end|> | |
<|file_name|>ClusterUtils.py<|end_file_name|><|fim▁begin|># This file is part of profileNJ
#
# Date: 02/2014
# ClusterUtils contain implementation of nj and upgma clustering algo, and
# required methods
__author__ = "Emmanuel Noutahi"
from TreeClass import TreeClass
import os
import numpy as np
from StringIO import StringIO
import random
try:
from lxml import etree
# should work since etree is used by ete
except ImportError:
try:
import xml.etree.cElementTree as etree
except ImporError:
try:
import xml.etree.ElementTree as etree
except:
pass
np.set_printoptions(precision=3)
numerictypes = np.core.numerictypes.sctype2char
Float = numerictypes(float)
def find_smallest_index(matrice):
"""Return smallest number i,j index in a matrice
A Tuple (i,j) is returned.
Warning, the diagonal should have the largest number so it will never be choose
"""
index = np.tril_indices_from(matrice, -1)
return np.vstack(index)[:, matrice[index].argmin()]
def condense_matrix(matrice, smallest_index, method='upgma'):
"""Matrice condensation in the next iteration
Smallest index is returned from find_smallest_index.
For both leaf at i and j a new distance is calculated from the average of the corresponding
row or the corresponding columns
We then replace the first index (row and column) by the average vector obtained
and the second index by an array with large numbers so that
it is never chosen again with find_smallest_index.
Now the new regroupement distance value is at the first position! (on row and column)
"""
first_index, second_index = smallest_index
# get the rows and make a new vector by updating distance
rows = np.take(matrice, smallest_index, 1)
# default we use upgma
if(method.lower() == 'nj'):
new_vector = (
np.sum(rows, 1) - matrice[first_index, second_index]) * 0.5
else:
new_vector = np.average(rows, 1)
# replace info in the row and column for first index with new_vector
matrice[second_index] = new_vector
matrice[:, second_index] = new_vector
np.fill_diagonal(matrice, 0)
# replace the info in the row and column for the second index with
# high numbers so that it is ignored
return remove_ij(matrice, first_index, first_index)
def remove_ij(x, i, j):
# Row i and column j divide the array into 4 quadrants
y = x[:-1, :-1]
y[:i, j:] = x[:i, j + 1:]
y[i:, :j] = x[i + 1:, :j]
y[i:, j:] = x[i + 1:, j + 1:]
return y
def calculate_Q_ij(matrice, ind, n):
"""Calcutates Q_matrix for two taxa
"""
return (n - 2) * matrice[ind] - np.sum(matrice[ind[0]]) - np.sum(matrice[ind[1]])
def calculate_Q_matrix(matrice):
"""Calculate Q_matrix for nj algorithm
"""
n = matrice.shape[0]
Q_matrix = np.zeros(shape=matrice.shape)
it = np.nditer(matrice, flags=['multi_index'])
while not it.finished:
ind = it.multi_index
Q_matrix[ind] = calculate_Q_ij(matrice, ind, n)
it.iternext()
return Q_matrix
def paired_node_distance(matrice, smallest_index):
i, j = smallest_index
# i, j are the index of the recently joined node
n = matrice.shape[0]
# http://en.wikipedia.org/wiki/Neighbor_joining#equation_2
# distance from the pair members to the new node second term
x = np.sum(matrice[i]) - np.sum(matrice[:, j])
if(n - 2 > 0):
dist_i = 0.5 * matrice[i, j] + ((0.5 / (n - 2)) * (x))
dist_j = matrice[i, j] - dist_i
return dist_i, dist_j
else:
# We have only two node to join (final join)
# Find the index of the node not already joined
distance = matrice[i, j]
# In this case, we split the dist value by two
return distance / 2.0, distance / 2.0
def condense_node_order(matrice, smallest_index, node_order, method='upgma'):
"""
condenses two nodes in node_order based on smallest_index info
This function is used to create a tree while condensing a matrice
with the condense_matrix function. The smallest_index is retrieved
with find_smallest_index. The first index is replaced with a node object
that combines the two nodes corresponding to the indices in node order.
The second index in smallest_index is replaced with None.
Also sets the branch length of the nodes to 1/2 of the distance between
the nodes in the matrice"""
index1, index2 = smallest_index
node1 = node_order[index1]
node2 = node_order[index2]
# get the distance between the nodes and assign 1/2 the distance to the
# Length property of each node
if(method.lower() == 'nj'):
dist = paired_node_distance(matrice, smallest_index)
elif(method.lower() == 'upgma'):
distance = matrice[index1, index2]
dist = (distance / 2.0, distance / 2.0)
else:
dist = (0, 0)
nodes = [node1, node2]
pos = [0, 1]
for ind in pos:
nodes[ind].add_features(length=dist[ind])
# combine the two nodes into a new TreeNode object
new_node = TreeClass()
new_node.add_child(node1)
new_node.add_child(node2)
new_node.add_features(length=sum(dist))
# replace the object at index1 with the combined node
node_order[index2] = new_node
# replace the object at index2 with None
del node_order[index1] # distance at i=index2 || j=index2
return node_order
def NJ_cluster(matrice, node_order, nj_depth=None):
"""
Node clustering with NJ
matrice is a np array.
node_order is a list of PhyloNode objects corresponding to the matrice.
WARNING: Changes matrice in-place.
before this function is called.
"""
# this is for a test, should made it into one function with upgma
num_entries = len(node_order)
if not nj_depth or nj_depth > (num_entries - 1):
nj_depth = num_entries - 1 # default, do all, same as upgma
tree = None
smallest_index = []
for i in range(nj_depth):
Q_matrix = calculate_Q_matrix(matrice)
index_1, index_2 = find_smallest_index(Q_matrix)
smallest_index = (index_1, index_2)
row_order = condense_node_order(
matrice, smallest_index, node_order, method='nj')
matrice = condense_matrix(matrice, smallest_index, method='nj')
tree = node_order[smallest_index[1]]
return tree, matrice, smallest_index
def UPGMA_cluster(matrice, node_order, upgma_depth=None):
"""cluster with UPGMA
matrice is a np array.
node_order is a list of TreeClass objects corresponding to the matrice.
WARNING: Changes matrice in-place.
before this function is called.
"""
num_entries = len(node_order)
if not upgma_depth or upgma_depth > (num_entries - 1):<|fim▁hole|> tree = None
smallest_index = []
for i in range(upgma_depth):
index_1, index_2 = find_smallest_index(matrice)
smallest_index = (index_1, index_2)
assert(index_1 > index_2)
row_order = condense_node_order(
matrice, smallest_index, node_order, method='upgma')
matrice = condense_matrix(matrice, smallest_index, method='upgma')
tree = node_order[smallest_index[1]]
return tree, matrice, smallest_index
def RAND_cluster(matrice, node_order, rand_depth=None):
"""Random clustering
matrice is a np array.
node_order is a list of PhyloNode objects corresponding to the matrice.s
WARNING: Changes matrice in-place.
before this function is called.
"""
num_entries = len(node_order)
if not rand_depth or rand_depth > (num_entries - 1):
rand_depth = num_entries - 1 # default, do all
tree = None
smallest_index = []
for i in range(rand_depth):
tochoose = [i for i, t in enumerate(node_order) if t is not None]
index1, index2 = random.sample(tochoose, 2)
smallest_index = (max(index1, index2), min(index1, index2))
node_order = condense_node_order(
matrice, smallest_index, node_order, method='rand')
tree = node_order[smallest_index[1]]
return tree, matrice, smallest_index
def treeCluster(matrice, node_order, depth=None, method='upgma'):
if(len(node_order) == 2):
smallest_index = (1, 0)
row_order = condense_node_order(
matrice, smallest_index, node_order, method='rand')
tree = node_order[smallest_index[1]]
return tree, None, smallest_index
if(method.lower() == 'nj'):
return NJ_cluster(matrice, node_order, nj_depth=depth)
elif(method.lower() == 'rand'):
return RAND_cluster(matrice, node_order, rand_depth=depth)
else:
return UPGMA_cluster(matrice, node_order, upgma_depth=depth)
def distMatProcessor(distances, nFlagVal=1e305, nFlag=False, ignoreNodes=[]):
"""Formating distance matrix from a file or string input and node order for
UPGMA or NJ join
"""
read_fl = False
dist_matrix = []
node_order = []
matrix = None
# Read in matrix if file name is given
if isinstance(distances, basestring) and os.path.exists(distances):
distances = open(distances, 'rU')
distances = distances.read()
distances_lines = distances.splitlines()
if '<?xml' in distances_lines[0]:
# this is an xml file
# parse it differently
matrix, node_order = parseFastPhyloXml(
StringIO(distances), nFlagVal, nFlag)
else:
x_ind = 0
for line in distances_lines:
line = line.strip()
if(line):
if not read_fl:
read_fl = True
else:
x_ind += 1
line_list = [getFloatValue(
x.strip(), x_ind, y_ind, nFlagVal, nFlag) for y_ind, x in enumerate(line.split())]
dist_matrix.append(line_list[1:])
node_order.append(line_list[0])
matrix = np.array(dist_matrix, dtype=np.float)
if ignoreNodes:
for n in ignoreNodes:
ind = node_order.index(n)
if ind > -1:
matrix = remove_ij(matrix, ind, ind)
node_order.remove(n)
return matrix, node_order
def makeFakeDstMatrice(n, dmin, dmax):
"""Create a fake distance matrice"""
b = (dmax - dmin) * np.random.random_sample(size=(n, n)) + dmin
b_sym = (b + b.T) / 2
np.fill_diagonal(b_sym, 0)
return b_sym
def saveMatrix(filename, matrix, node_order):
# matrix[np.where(matrix==1e305)]=0
with open(filename, 'w+') as out:
out.write("\t%i\n" % len(node_order))
lines = []
for entry in matrix.tolist():
line = node_order.pop(0) + "\t" + " ".join(map(str, entry)) + "\n"
lines.append(line)
out.writelines(lines)
return True
def getFloatValue(number, x_ind, y_ind, nFlagVal, nFlag=False):
"""Get a distance matrice validate input from a string"""
try:
n = float(number)
if(n < 0 and nFlag):
n = nFlagVal
return 0 if (x_ind == y_ind) else n
except ValueError:
return number
def parseFastPhyloXml(infile, nFlagVal, nFlag=False):
"""Parse the fastphylo xml format"""
xml = etree.parse(infile)
run = xml.find('//run')
dimension = int(run.attrib['dim'])
identities = run.find('identities')
node_order = [i.attrib['name'] for i in identities.iter('identity')]
dm = run.find('dms').find('dm')
distance_mat = np.zeros(shape=(dimension, dimension), dtype=np.float)
i = 0
for node in dm.iter('row'):
j = 0
for entry in node.iter('entry'):
val = float(entry.text)
if(val < 0 and nFlag):
val = nFlagVal
distance_mat[i, j] = val
distance_mat[j, i] = val
j += 1
i += 1
return distance_mat, node_order<|fim▁end|> | upgma_depth = num_entries - 1 # default, do all |
<|file_name|>top-nav.component.ts<|end_file_name|><|fim▁begin|>import { Component, OnInit } from '@angular/core';
<|fim▁hole|>})
export class TopNavComponent implements OnInit {
title = 'Blogging Application';
constructor() { }
ngOnInit() {
}
}<|fim▁end|> | @Component({
selector: 'app-top-nav',
templateUrl: './top-nav.component.html',
styleUrls: ['./top-nav.component.css'] |
<|file_name|>data_resync.go<|end_file_name|><|fim▁begin|>package vpp
import (
"github.com/ligato/vpp-agent/clientv2/vpp"
"github.com/contiv/vpp/mock/localclient/dsl"
"github.com/ligato/vpp-agent/api/models/vpp/abf"
"github.com/ligato/vpp-agent/api/models/vpp/acl"
"github.com/ligato/vpp-agent/api/models/vpp/interfaces"
"github.com/ligato/vpp-agent/api/models/vpp/ipsec"
"github.com/ligato/vpp-agent/api/models/vpp/l2"
"github.com/ligato/vpp-agent/api/models/vpp/l3"
"github.com/ligato/vpp-agent/api/models/vpp/nat"
"github.com/ligato/vpp-agent/api/models/vpp/punt"
"github.com/ligato/vpp-agent/api/models/vpp/stn"
)
// MockDataResyncDSL is mock for DataResyncDSL.
type MockDataResyncDSL struct {
dsl.CommonMockDSL
}
// NewMockDataResyncDSL is a constructor for MockDataResyncDSL.
func NewMockDataResyncDSL(commitFunc dsl.CommitFunc) *MockDataResyncDSL {
return &MockDataResyncDSL{CommonMockDSL: dsl.NewCommonMockDSL(commitFunc)}
}
// Interface adds interface to the RESYNC request.
func (d *MockDataResyncDSL) Interface(val *vpp_interfaces.Interface) vppclient.DataResyncDSL {
key := vpp_interfaces.InterfaceKey(val.Name)
d.Values[key] = val
return d
}
// ABF adds a request to create or update VPP ACL-based forwarding.
func (d *MockDataResyncDSL) ABF(val *vpp_abf.ABF) vppclient.DataResyncDSL {
key := vpp_abf.Key(val.Index)
d.Values[key] = val
return d
}
// BD adds VPP Bridge Domain to the mock RESYNC request.
func (d *MockDataResyncDSL) BD(val *vpp_l2.BridgeDomain) vppclient.DataResyncDSL {
key := vpp_l2.BridgeDomainKey(val.Name)
d.Values[key] = val
return d
}
// BDFIB adds VPP L2 FIB to the mock RESYNC request.
func (d *MockDataResyncDSL) BDFIB(val *vpp_l2.FIBEntry) vppclient.DataResyncDSL {
key := vpp_l2.FIBKey(val.BridgeDomain, val.PhysAddress)
d.Values[key] = val
return d
}
// XConnect adds VPP Cross Connect to the mock RESYNC request.
func (d *MockDataResyncDSL) XConnect(val *vpp_l2.XConnectPair) vppclient.DataResyncDSL {
key := vpp_l2.XConnectKey(val.ReceiveInterface)
d.Values[key] = val
return d
}
// StaticRoute adds VPP L3 Static Route to the mock RESYNC request.
func (d *MockDataResyncDSL) StaticRoute(val *vpp_l3.Route) vppclient.DataResyncDSL {
key := vpp_l3.RouteKey(val.OutgoingInterface, val.VrfId, val.DstNetwork, val.NextHopAddr)
d.Values[key] = val
return d
}
// Span adds VPP span to the RESYNC request.<|fim▁hole|> key := vpp_interfaces.SpanKey(val.InterfaceFrom, val.InterfaceTo)
d.Values[key] = val
return d
}
// ACL adds VPP Access Control List to the mock RESYNC request.
func (d *MockDataResyncDSL) ACL(val *vpp_acl.ACL) vppclient.DataResyncDSL {
key := vpp_acl.Key(val.Name)
d.Values[key] = val
return d
}
// Arp adds VPP L3 ARP to the RESYNC request.
func (d *MockDataResyncDSL) Arp(val *vpp_l3.ARPEntry) vppclient.DataResyncDSL {
key := vpp_l3.ArpEntryKey(val.Interface, val.IpAddress)
d.Values[key] = val
return d
}
// ProxyArp adds L3 proxy ARP to the RESYNC request.
func (d *MockDataResyncDSL) ProxyArp(val *vpp_l3.ProxyARP) vppclient.DataResyncDSL {
key := vpp_l3.ProxyARPKey()
d.Values[key] = val
return d
}
// IPScanNeighbor adds L3 IP Scan Neighbor to the RESYNC request.
func (d *MockDataResyncDSL) IPScanNeighbor(val *vpp_l3.IPScanNeighbor) vppclient.DataResyncDSL {
key := vpp_l3.IPScanNeighborKey()
d.Values[key] = val
return d
}
// StnRule adds Stn rule to the RESYNC request.
func (d *MockDataResyncDSL) StnRule(val *vpp_stn.Rule) vppclient.DataResyncDSL {
key := vpp_stn.Key(val.Interface, val.IpAddress)
d.Values[key] = val
return d
}
// NAT44Global adds a request to RESYNC global configuration for NAT44
func (d *MockDataResyncDSL) NAT44Global(val *vpp_nat.Nat44Global) vppclient.DataResyncDSL {
key := vpp_nat.GlobalNAT44Key()
d.Values[key] = val
return d
}
// DNAT44 adds a request to RESYNC a new DNAT configuration
func (d *MockDataResyncDSL) DNAT44(val *vpp_nat.DNat44) vppclient.DataResyncDSL {
key := vpp_nat.DNAT44Key(val.Label)
d.Values[key] = val
return d
}
// IPSecSA adds request to create a new Security Association
func (d *MockDataResyncDSL) IPSecSA(val *vpp_ipsec.SecurityAssociation) vppclient.DataResyncDSL {
key := vpp_ipsec.SAKey(val.Index)
d.Values[key] = val
return d
}
// IPSecSPD adds request to create a new Security Policy Database
func (d *MockDataResyncDSL) IPSecSPD(val *vpp_ipsec.SecurityPolicyDatabase) vppclient.DataResyncDSL {
key := vpp_ipsec.SPDKey(val.Index)
d.Values[key] = val
return d
}
// PuntIPRedirect adds request to RESYNC a rule used to punt L3 traffic via interface.
func (d *MockDataResyncDSL) PuntIPRedirect(val *vpp_punt.IPRedirect) vppclient.DataResyncDSL {
key := vpp_punt.IPRedirectKey(val.L3Protocol, val.TxInterface)
d.Values[key] = val
return d
}
// PuntToHost adds request to RESYNC a rule used to punt L4 traffic to a host.
func (d *MockDataResyncDSL) PuntToHost(val *vpp_punt.ToHost) vppclient.DataResyncDSL {
key := vpp_punt.ToHostKey(val.L3Protocol, val.L4Protocol, val.Port)
d.Values[key] = val
return d
}
// PuntException adds request to RESYNC a rule used to add punt exception in punting traffic to a host.
func (d *MockDataResyncDSL) PuntException(val *vpp_punt.Exception) vppclient.DataResyncDSL {
key := vpp_punt.ExceptionKey(val.Reason)
d.Values[key] = val
return d
}
// VrfTable adds VRF table to the RESYNC request.
func (d *MockDataResyncDSL) VrfTable(val *vpp_l3.VrfTable) vppclient.DataResyncDSL {
key := vpp_l3.VrfTableKey(val.Id, val.Protocol)
d.Values[key] = val
return d
}
// Send commits the transaction into the mock DB.
func (d *MockDataResyncDSL) Send() vppclient.Reply {
err := d.CommitFunc(d.Values)
return &dsl.Reply{Err: err}
}<|fim▁end|> | func (d *MockDataResyncDSL) Span(val *vpp_interfaces.Span) vppclient.DataResyncDSL { |
<|file_name|>二分查找.py<|end_file_name|><|fim▁begin|>def search(L, e):
def bSearch(L, e, low, high):<|fim▁hole|>
if L[mid] == e:
return True
elif L[mid] > e:
if low == mid:
return False
else:
return bSearch(L, e, low, mid - 1)
else:
return bSearch(L, e, mid + 1, high)
if len(L) == 0:
return False
else:
return bSearch(L, e, 0, len(L) - 1)
L = [1, 2, 5, 3, 6, 7, 9, 10]
search(L, 8)<|fim▁end|> | if high == low:
return L[low] == e
mid = (low + high) // 2 |
<|file_name|>grid_test.ts<|end_file_name|><|fim▁begin|>/// <reference path="grid_astar.ts" />
// create abstract grid representation (no nodes here)
var grid =
[[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1],
[1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1],
[1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1],
[1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 1],
[1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1],
[1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1],
[1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1],
[1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1],
[1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 1],
[1, 1, 1, 0, 0, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 1, 1, 1],
[1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1],
[1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1],
[1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1],
[1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1],
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]];
var tileSize = 20;
var start = [5,4];
var goal1 = [19,11];
var goal2 = [10,13];
window.onload = function() {
var canvas = document.getElementById("gridCanvas");
canvas.addEventListener("mousedown", function(event) {
var x = event.pageX - canvas.offsetLeft;
var y = event.pageY - canvas.offsetTop;
var cellX = Math.floor(x / tileSize);
var cellY = Math.floor(y / tileSize);
toggleGridCell(cellX, cellY);
testEuclidean();
}, false);
testEuclidean();
};
function toggleGridCell(x, y) {
if ((x === start[0] && y === start[1]) ||
(x === goal1[0] && y === goal1[1]) ||
(x === goal2[0] && y === goal2[1])) {
return;
}
if (grid[y][x] === 0) {
grid[y][x] = 1;
} else {
grid[y][x] = 0;
}
}
function drawGrid(path, visited) {
var canvas = <HTMLCanvasElement>document.getElementById("gridCanvas");
var context = canvas.getContext("2d");
var h = grid.length;
var w = grid[0].length;
for (var x = 0; x < w; x++) {
for (var y = 0; y < h; y++) {
if (grid[y][x] == 0) {
context.fillStyle = "#999";
} else {
context.fillStyle = "black";
}
context.fillRect(x*tileSize, y*tileSize, tileSize-1, tileSize-1);
}
}
for (var i = 0; i < visited.length; i++) {
var current = visited[i];
context.fillStyle = "lightgreen";
context.fillRect(current.x*tileSize, current.y*tileSize, tileSize-1, tileSize-1)
}
for (var i = 0; i < path.length; i++) {
var current = path[i];
context.fillStyle = "green";
context.fillRect(current.x*tileSize, current.y*tileSize, tileSize-1, tileSize-1)
}
context.fillStyle = "yellow";
context.fillRect(start[0]*tileSize, start[1]*tileSize, tileSize-1, tileSize-1);
context.fillStyle = "red";
context.fillRect(goal1[0]*tileSize, goal1[1]*tileSize, tileSize-1, tileSize-1);
context.fillRect(goal2[0]*tileSize, goal2[1]*tileSize, tileSize-1, tileSize-1);
}
function testHeuristic(heuristic) {
var graphGoal = new grid_astar.MultipleGoals([goal1,goal2]);
var graph = new astar.Graph(heuristic, graphGoal);
var graphStart = new grid_astar.Node(grid,start[0],start[1]);
var result = graph.searchPath(graphStart);
drawGrid(result.path, result.visited);
var resultString = document.getElementById("info");
if (result.found) {
resultString.innerHTML = "Length of path found: " + result.path.length;
} else {
resultString.innerHTML = "No path found.";
}
}
function testDijkstra(){
//test graph with no heuristics<|fim▁hole|>function testEuclidean(){
//test graph with Euclidean distance
testHeuristic(new grid_astar.EuclidianHeuristic());
}
function testManhattan(){
//test graph with Manhattan distance
testHeuristic(new grid_astar.ManhattanHeuristic());
}<|fim▁end|> | testHeuristic(new grid_astar.DijkstraHeuristic());
}
|
<|file_name|>pcValidation.js<|end_file_name|><|fim▁begin|>/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import angular from 'angular';
export class IgniteFormField {
static animName = 'ignite-form-field__error-blink';
static eventName = 'webkitAnimationEnd oAnimationEnd msAnimationEnd animationend';
static $inject = ['$element', '$scope'];
constructor($element, $scope) {
Object.assign(this, {$element});
this.$scope = $scope;
}
$postLink() {
this.onAnimEnd = () => this.$element.removeClass(IgniteFormField.animName);
this.$element.on(IgniteFormField.eventName, this.onAnimEnd);
}
$onDestroy() {
this.$element.off(IgniteFormField.eventName, this.onAnimEnd);
this.$element = this.onAnimEnd = null;
}
notifyAboutError() {
if (this.$element) this.$element.addClass(IgniteFormField.animName);
}
/**
* Exposes control in $scope
* @param {ng.INgModelController} control
*/
exposeControl(control, name = '$input') {
this.$scope[name] = control;
this.$scope.$on('$destroy', () => this.$scope[name] = null);
}
}
export default angular.module('ignite-console.page-configure.validation', [])
.directive('pcNotInCollection', function() {
class Controller {
/** @type {ng.INgModelController} */
ngModel;
/** @type {Array} */
items;
$onInit() {
this.ngModel.$validators.notInCollection = (item) => {
if (!this.items) return true;
return !this.items.includes(item);
};
}
$onChanges() {
this.ngModel.$validate();
}
}
return {
controller: Controller,
require: {
ngModel: 'ngModel'
},
bindToController: {
items: '<pcNotInCollection'
}
};
})
.directive('pcInCollection', function() {
class Controller {
/** @type {ng.INgModelController} */<|fim▁hole|> items;
/** @type {string} */
pluck;
$onInit() {
this.ngModel.$validators.inCollection = (item) => {
if (!this.items) return false;
const items = this.pluck ? this.items.map((i) => i[this.pluck]) : this.items;
return Array.isArray(item)
? item.every((i) => items.includes(i))
: items.includes(item);
};
}
$onChanges() {
this.ngModel.$validate();
}
}
return {
controller: Controller,
require: {
ngModel: 'ngModel'
},
bindToController: {
items: '<pcInCollection',
pluck: '@?pcInCollectionPluck'
}
};
})
.directive('pcPowerOfTwo', function() {
class Controller {
/** @type {ng.INgModelController} */
ngModel;
$onInit() {
this.ngModel.$validators.powerOfTwo = (value) => {
return !value || ((value & -value) === value);
};
}
}
return {
controller: Controller,
require: {
ngModel: 'ngModel'
},
bindToController: true
};
})
.directive('bsCollapseTarget', function() {
return {
require: {
bsCollapse: '^^bsCollapse'
},
bindToController: true,
controller: ['$element', '$scope', function($element, $scope) {
this.open = function() {
const index = this.bsCollapse.$targets.indexOf($element);
const isActive = this.bsCollapse.$targets.$active.includes(index);
if (!isActive) this.bsCollapse.$setActive(index);
};
this.$onDestroy = () => this.open = $element = null;
}]
};
})
.directive('ngModel', ['$timeout', function($timeout) {
return {
require: ['ngModel', '?^^bsCollapseTarget', '?^^igniteFormField', '?^^panelCollapsible'],
link(scope, el, attr, [ngModel, bsCollapseTarget, igniteFormField, panelCollapsible]) {
const off = scope.$on('$showValidationError', (e, target) => {
if (target !== ngModel) return;
ngModel.$setTouched();
bsCollapseTarget && bsCollapseTarget.open();
panelCollapsible && panelCollapsible.open();
$timeout(() => {
if (el[0].scrollIntoViewIfNeeded)
el[0].scrollIntoViewIfNeeded();
else
el[0].scrollIntoView();
if (!attr.bsSelect) $timeout(() => el[0].focus());
igniteFormField && igniteFormField.notifyAboutError();
});
});
}
};
}])
.directive('igniteFormField', function() {
return {
restrict: 'C',
controller: IgniteFormField,
scope: true
};
})
.directive('isValidJavaIdentifier', ['IgniteLegacyUtils', function(LegacyUtils) {
return {
link(scope, el, attr, ngModel) {
ngModel.$validators.isValidJavaIdentifier = (value) => LegacyUtils.VALID_JAVA_IDENTIFIER.test(value);
},
require: 'ngModel'
};
}])
.directive('notJavaReservedWord', ['IgniteLegacyUtils', function(LegacyUtils) {
return {
link(scope, el, attr, ngModel) {
ngModel.$validators.notJavaReservedWord = (value) => !LegacyUtils.JAVA_KEYWORDS.includes(value);
},
require: 'ngModel'
};
}]);<|fim▁end|> | ngModel;
/** @type {Array} */ |
<|file_name|>helpers.js<|end_file_name|><|fim▁begin|>export const generateGuid = () => {
function s4() {<|fim▁hole|> return Math.floor((1 + Math.random()) * 0x10000)
.toString(16)
.substring(1);
}
return `${s4() + s4()}-${s4()}${s4()}`;
};
export default {
generateGuid,
};<|fim▁end|> | |
<|file_name|>test_toggle.py<|end_file_name|><|fim▁begin|>from tests import tests<|fim▁hole|>
def test_toggle():
temporary = tests.toggled_seats
assert temporary == [[1, 1, 1], [1, 1, 1], [1, 1, 1]]<|fim▁end|> | |
<|file_name|>example_group.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>use std::sync::{Arc, Mutex};
use std::fmt;
use std::thread::JoinHandle;
use std::panic::{recover, RecoverSafe, UnwindSafe};
use world_state;
use util::{await_handles, SourceLocation};
use example::{Example, ExampleResult};
use example_group::example_group_result::{ExampleGroupResult};
use std::fmt::{Display, Formatter, Error};
#[derive(Debug)]
pub struct ExampleGroupMetadata {
pub description: String,
pub source_location: SourceLocation,
}
impl Display for ExampleGroupMetadata {
fn fmt(&self, f: &mut Formatter) -> Result<(), Error> {
write!(f, "`{}` at {}", self.description, self.source_location)
}
}
pub struct ExampleGroup {
examples: Vec<Example>,
pub metadata: ExampleGroupMetadata
}
impl fmt::Debug for ExampleGroup {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
write!(formatter, "<Example group with metadata {:?}>", self.metadata)
}
}
impl ExampleGroup {
pub fn new(description: &str, source_location: SourceLocation) -> ExampleGroup {
ExampleGroup {
examples: Vec::new(),
metadata: ExampleGroupMetadata {
description: description.to_string(),
source_location: source_location,
}
}
}
pub fn it<F>(&mut self, description: &str, source_location: SourceLocation, example_definition_block: F) where F: Fn() + Send + RecoverSafe + UnwindSafe + 'static {
let recovery_proc = Box::new(|| recover(example_definition_block));
let example = Example::new(description.into(), source_location, recovery_proc);
self.examples.push(example);
}
pub fn run(mut self, state: Arc<Mutex<world_state::WorldState>>, block: Box<Fn(&mut ExampleGroup) + Send + 'static>) -> ExampleGroupResult {
block(&mut self);
let running_examples = Self::build_running_examples(state, self.examples);
let results = await_handles(running_examples);
return ExampleGroupResult::new(self.metadata, results);
}
fn build_running_examples(state: Arc<Mutex<world_state::WorldState>>, examples: Vec<Example>) -> Vec<JoinHandle<ExampleResult>> {
examples.into_iter().map(|example| {
let state = state.clone();
example.spawn(state)
}).collect()
}
}<|fim▁end|> | |
<|file_name|>FlatDhcpAcquireDhcpServerIpReply.java<|end_file_name|><|fim▁begin|>package org.zstack.network.service.flat;
import org.zstack.header.message.MessageReply;
import org.zstack.header.network.l3.IpRangeInventory;
import org.zstack.header.network.l3.UsedIpInventory;
/**
* Created by frank on 10/11/2015.
*/
public class FlatDhcpAcquireDhcpServerIpReply extends MessageReply {
private String ip;
private String netmask;
private String usedIpUuid;
private UsedIpInventory usedIp;
private IpRangeInventory ipr;
public String getNetmask() {
return netmask;
}
public void setNetmask(String netmask) {
this.netmask = netmask;
}
public String getIp() {
return ip;
}
public void setIp(String ip) {
this.ip = ip;
}
public String getUsedIpUuid() {
return usedIpUuid;
}
public void setUsedIpUuid(String usedIpUuid) {
this.usedIpUuid = usedIpUuid;
}<|fim▁hole|> public UsedIpInventory getUsedIp() {
return usedIp;
}
public void setUsedIp(UsedIpInventory usedIp) {
this.usedIp = usedIp;
}
public IpRangeInventory getIpr() {
return ipr;
}
public void setIpr(IpRangeInventory ipr) {
this.ipr = ipr;
}
}<|fim▁end|> | |
<|file_name|>serve-static.d.ts<|end_file_name|><|fim▁begin|>// Type definitions for serve-static 1.7.1
// Project: https://github.com/expressjs/serve-static
// Definitions by: Uros Smolnik <https://github.com/urossmolnik/>
// Definitions: https://github.com/borisyankov/DefinitelyTyped
/* =================== USAGE ===================<|fim▁hole|> import * as serveStatic from "serve-static";
app.use(serveStatic("public/ftp", {"index": ["default.html", "default.htm"]}))
=============================================== */
/// <reference path="../express/express.d.ts" />
/// <reference path="../mime/mime.d.ts" />
declare module "serve-static" {
import * as express from "express";
/**
* Create a new middleware function to serve files from within a given root directory.
* The file to serve will be determined by combining req.url with the provided root directory.
* When a file is not found, instead of sending a 404 response, this module will instead call next() to move on to the next middleware, allowing for stacking and fall-backs.
*/
function serveStatic(root: string, options?: {
/**
* Set how "dotfiles" are treated when encountered. A dotfile is a file or directory that begins with a dot (".").
* Note this check is done on the path itself without checking if the path actually exists on the disk.
* If root is specified, only the dotfiles above the root are checked (i.e. the root itself can be within a dotfile when when set to "deny").
* The default value is 'ignore'.
* 'allow' No special treatment for dotfiles
* 'deny' Send a 403 for any request for a dotfile
* 'ignore' Pretend like the dotfile does not exist and call next()
*/
dotfiles?: string;
/**
* Enable or disable etag generation, defaults to true.
*/
etag?: boolean;
/**
* Set file extension fallbacks. When set, if a file is not found, the given extensions will be added to the file name and search for.
* The first that exists will be served. Example: ['html', 'htm'].
* The default value is false.
*/
extensions?: string[];
/**
* By default this module will send "index.html" files in response to a request on a directory.
* To disable this set false or to supply a new index pass a string or an array in preferred order.
*/
index?: boolean|string|string[];
/**
* Enable or disable Last-Modified header, defaults to true. Uses the file system's last modified value.
*/
lastModified?: boolean;
/**
* Provide a max-age in milliseconds for http caching, defaults to 0. This can also be a string accepted by the ms module.
*/
maxAge?: number|string;
/**
* Redirect to trailing "/" when the pathname is a dir. Defaults to true.
*/
redirect?: boolean;
/**
* Function to set custom headers on response. Alterations to the headers need to occur synchronously.
* The function is called as fn(res, path, stat), where the arguments are:
* res the response object
* path the file path that is being sent
* stat the stat object of the file that is being sent
*/
setHeaders?: (res: express.Response, path: string, stat: any) => any;
}): express.Handler;
import * as m from "mime";
module serveStatic {
var mime: typeof m;
}
export = serveStatic;
}<|fim▁end|> | |
<|file_name|>shipsptoptimalbonusmf.py<|end_file_name|><|fim▁begin|><|fim▁hole|># shipSPTOptimalBonusMF
#
# Used by:
# Ship: Chremoas
type = "passive"
def handler(fit, ship, context):
fit.modules.filteredItemBoost(lambda mod: mod.item.requiresSkill("Small Projectile Turret"),
"maxRange", ship.getModifiedItemAttr("shipBonusMF"), skill="Minmatar Frigate")<|fim▁end|> | |
<|file_name|>ord.js<|end_file_name|><|fim▁begin|>function ord (string) {
// From: http://phpjs.org/functions
// + original by: Kevin van Zonneveld (http://kevin.vanzonneveld.net)
// + bugfixed by: Onno Marsman
// + improved by: Brett Zamir (http://brett-zamir.me)
// + input by: incidence
// * example 1: ord('K');
// * returns 1: 75
// * example 2: ord('\uD800\uDC00'); // surrogate pair to create a single Unicode character
// * returns 2: 65536
var str = string + '',
code = str.charCodeAt(0);
if (0xD800 <= code && code <= 0xDBFF) { // High surrogate (could change last hex to 0xDB7F to treat high private surrogates as single characters)
var hi = code;
if (str.length === 1) {
return code; // This is just a high surrogate with no following low surrogate, so we return its value;
// we could also throw an error as it is not a complete character, but someone may want to know
}
var low = str.charCodeAt(1);
return ((hi - 0xD800) * 0x400) + (low - 0xDC00) + 0x10000;
}
if (0xDC00 <= code && code <= 0xDFFF) { // Low surrogate
return code; // This is just a low surrogate with no preceding high surrogate, so we return its value;<|fim▁hole|> }
return code;
}<|fim▁end|> | // we could also throw an error as it is not a complete character, but someone may want to know |
<|file_name|>notes.js<|end_file_name|><|fim▁begin|>// The following are instance methods and variables
var Note = Class.create({
initialize: function(id, is_new, raw_body) {
if (Note.debug) {
console.debug("Note#initialize (id=%d)", id)
}
this.id = id
this.is_new = is_new
this.document_observers = [];
// Cache the elements
this.elements = {
box: $('note-box-' + this.id),
corner: $('note-corner-' + this.id),
body: $('note-body-' + this.id),
image: $('image')
}
// Cache the dimensions
this.fullsize = {
left: this.elements.box.offsetLeft,
top: this.elements.box.offsetTop,
width: this.elements.box.clientWidth,
height: this.elements.box.clientHeight
}
// Store the original values (in case the user clicks Cancel)
this.old = {
raw_body: raw_body,
formatted_body: this.elements.body.innerHTML
}
for (p in this.fullsize) {
this.old[p] = this.fullsize[p]
}
// Make the note translucent
if (is_new) {
this.elements.box.setOpacity(0.2)
} else {
this.elements.box.setOpacity(0.5)
}
if (is_new && raw_body == '') {
this.bodyfit = true
this.elements.body.style.height = "100px"
}
// Attach the event listeners
this.elements.box.observe("mousedown", this.dragStart.bindAsEventListener(this))
this.elements.box.observe("mouseout", this.bodyHideTimer.bindAsEventListener(this))
this.elements.box.observe("mouseover", this.bodyShow.bindAsEventListener(this))
this.elements.corner.observe("mousedown", this.resizeStart.bindAsEventListener(this))
this.elements.body.observe("mouseover", this.bodyShow.bindAsEventListener(this))
this.elements.body.observe("mouseout", this.bodyHideTimer.bindAsEventListener(this))
this.elements.body.observe("click", this.showEditBox.bindAsEventListener(this))
this.adjustScale()
},
// Returns the raw text value of this note
textValue: function() {
if (Note.debug) {
console.debug("Note#textValue (id=%d)", this.id)
}
return this.old.raw_body.strip()
},
// Removes the edit box
hideEditBox: function(e) {
if (Note.debug) {
console.debug("Note#hideEditBox (id=%d)", this.id)
}
var editBox = $('edit-box')
if (editBox != null) {
var boxid = editBox.noteid
$("edit-box").stopObserving()
$("note-save-" + boxid).stopObserving()
$("note-cancel-" + boxid).stopObserving()
$("note-remove-" + boxid).stopObserving()
$("note-history-" + boxid).stopObserving()
$("edit-box").remove()
}
},
// Shows the edit box
showEditBox: function(e) {
if (Note.debug) {
console.debug("Note#showEditBox (id=%d)", this.id)
}
this.hideEditBox(e)
var insertionPosition = Note.getInsertionPosition()
var top = insertionPosition[0]
var left = insertionPosition[1]
var html = ""
html += '<div id="edit-box" style="top: '+top+'px; left: '+left+'px; position: absolute; visibility: visible; z-index: 100; background: white; border: 1px solid black; padding: 12px;">'
html += '<form onsubmit="return false;" style="padding: 0; margin: 0;">'
html += '<textarea rows="7" id="edit-box-text" style="width: 350px; margin: 2px 2px 12px 2px;">' + this.textValue() + '</textarea>'
html += '<input type="submit" value="Save" name="save" id="note-save-' + this.id + '">'
html += '<input type="submit" value="Cancel" name="cancel" id="note-cancel-' + this.id + '">'
html += '<input type="submit" value="Remove" name="remove" id="note-remove-' + this.id + '">'
html += '<input type="submit" value="History" name="history" id="note-history-' + this.id + '">'
html += '</form>'
html += '</div>'
$("note-container").insert({bottom: html})
$('edit-box').noteid = this.id
$("edit-box").observe("mousedown", this.editDragStart.bindAsEventListener(this))
$("note-save-" + this.id).observe("click", this.save.bindAsEventListener(this))
$("note-cancel-" + this.id).observe("click", this.cancel.bindAsEventListener(this))
$("note-remove-" + this.id).observe("click", this.remove.bindAsEventListener(this))
$("note-history-" + this.id).observe("click", this.history.bindAsEventListener(this))
$("edit-box-text").focus()
},
// Shows the body text for the note
bodyShow: function(e) {
if (Note.debug) {
console.debug("Note#bodyShow (id=%d)", this.id)
}
if (this.dragging) {
return
}
if (this.hideTimer) {
clearTimeout(this.hideTimer)
this.hideTimer = null
}
if (Note.noteShowingBody == this) {
return
}
if (Note.noteShowingBody) {
Note.noteShowingBody.bodyHide()
}
Note.noteShowingBody = this
if (Note.zindex >= 9) {
/* don't use more than 10 layers (+1 for the body, which will always be above all notes) */
Note.zindex = 0
for (var i=0; i< Note.all.length; ++i) {
Note.all[i].elements.box.style.zIndex = 0
}
}
this.elements.box.style.zIndex = ++Note.zindex
this.elements.body.style.zIndex = 10
this.elements.body.style.top = 0 + "px"
this.elements.body.style.left = 0 + "px"
var dw = document.documentElement.scrollWidth
this.elements.body.style.visibility = "hidden"
this.elements.body.style.display = "block"
if (!this.bodyfit) {
this.elements.body.style.height = "auto"
this.elements.body.style.minWidth = "140px"
var w = null, h = null, lo = null, hi = null, x = null, last = null
w = this.elements.body.offsetWidth
h = this.elements.body.offsetHeight
if (w/h < 1.6180339887) {
/* for tall notes (lots of text), find more pleasant proportions */
lo = 140, hi = 400
do {
last = w
x = (lo+hi)/2
this.elements.body.style.minWidth = x + "px"
w = this.elements.body.offsetWidth
h = this.elements.body.offsetHeight
if (w/h < 1.6180339887) lo = x
else hi = x
} while ((lo < hi) && (w > last))
} else if (this.elements.body.scrollWidth <= this.elements.body.clientWidth) {
/* for short notes (often a single line), make the box no wider than necessary */
// scroll test necessary for Firefox
lo = 20, hi = w
do {
x = (lo+hi)/2
this.elements.body.style.minWidth = x + "px"
if (this.elements.body.offsetHeight > h) lo = x
else hi = x
} while ((hi - lo) > 4)
if (this.elements.body.offsetHeight > h)
this.elements.body.style.minWidth = hi + "px"
}
if (Prototype.Browser.IE) {
// IE7 adds scrollbars if the box is too small, obscuring the text
if (this.elements.body.offsetHeight < 35) {
this.elements.body.style.minHeight = "35px"
}
if (this.elements.body.offsetWidth < 47) {
this.elements.body.style.minWidth = "47px"
}
}
this.bodyfit = true
}
this.elements.body.style.top = (this.elements.box.offsetTop + this.elements.box.clientHeight + 5) + "px"
// keep the box within the document's width
var l = 0, e = this.elements.box
do { l += e.offsetLeft } while (e = e.offsetParent)
l += this.elements.body.offsetWidth + 10 - dw
if (l > 0)
this.elements.body.style.left = this.elements.box.offsetLeft - l + "px"
else
this.elements.body.style.left = this.elements.box.offsetLeft + "px"
this.elements.body.style.visibility = "visible"
},
// Creates a timer that will hide the body text for the note
bodyHideTimer: function(e) {
if (Note.debug) {
console.debug("Note#bodyHideTimer (id=%d)", this.id)
}
this.hideTimer = setTimeout(this.bodyHide.bindAsEventListener(this), 250)
},
// Hides the body text for the note
bodyHide: function(e) {
if (Note.debug) {
console.debug("Note#bodyHide (id=%d)", this.id)
}
this.elements.body.hide()
if (Note.noteShowingBody == this) {
Note.noteShowingBody = null
}
},
addDocumentObserver: function(name, func)
{
document.observe(name, func);
this.document_observers.push([name, func]);
},
clearDocumentObservers: function(name, handler)
{
for(var i = 0; i < this.document_observers.length; ++i)
{
var observer = this.document_observers[i];
document.stopObserving(observer[0], observer[1]);
}
this.document_observers = [];
},
// Start dragging the note
dragStart: function(e) {
if (Note.debug) {
console.debug("Note#dragStart (id=%d)", this.id)
}
this.addDocumentObserver("mousemove", this.drag.bindAsEventListener(this))
this.addDocumentObserver("mouseup", this.dragStop.bindAsEventListener(this))
this.addDocumentObserver("selectstart", function() {return false})
this.cursorStartX = e.pointerX()
this.cursorStartY = e.pointerY()
this.boxStartX = this.elements.box.offsetLeft
this.boxStartY = this.elements.box.offsetTop
this.boundsX = new ClipRange(5, this.elements.image.clientWidth - this.elements.box.clientWidth - 5)
this.boundsY = new ClipRange(5, this.elements.image.clientHeight - this.elements.box.clientHeight - 5)
this.dragging = true
this.bodyHide()
},
// Stop dragging the note
dragStop: function(e) {
if (Note.debug) {
console.debug("Note#dragStop (id=%d)", this.id)
}
this.clearDocumentObservers()
this.cursorStartX = null
this.cursorStartY = null
this.boxStartX = null
this.boxStartY = null
this.boundsX = null
this.boundsY = null
this.dragging = false
this.bodyShow()
},
ratio: function() {
return this.elements.image.width / this.elements.image.getAttribute("large_width")
// var ratio = this.elements.image.width / this.elements.image.getAttribute("large_width")
// if (this.elements.image.scale_factor != null)
// ratio *= this.elements.image.scale_factor;
// return ratio
},
// Scale the notes for when the image gets resized
adjustScale: function() {
if (Note.debug) {
console.debug("Note#adjustScale (id=%d)", this.id)
}
var ratio = this.ratio()
for (p in this.fullsize) {
this.elements.box.style[p] = this.fullsize[p] * ratio + 'px'
}
},
// Update the note's position as it gets dragged
drag: function(e) {
var left = this.boxStartX + e.pointerX() - this.cursorStartX
var top = this.boxStartY + e.pointerY() - this.cursorStartY
left = this.boundsX.clip(left)
top = this.boundsY.clip(top)
this.elements.box.style.left = left + 'px'
this.elements.box.style.top = top + 'px'
var ratio = this.ratio()
this.fullsize.left = left / ratio
this.fullsize.top = top / ratio
e.stop()
},
// Start dragging the edit box
editDragStart: function(e) {
if (Note.debug) {
console.debug("Note#editDragStart (id=%d)", this.id)
}
var node = e.element().nodeName
if (node != 'FORM' && node != 'DIV') {
return
}
this.addDocumentObserver("mousemove", this.editDrag.bindAsEventListener(this))
this.addDocumentObserver("mouseup", this.editDragStop.bindAsEventListener(this))
this.addDocumentObserver("selectstart", function() {return false})
this.elements.editBox = $('edit-box');
this.cursorStartX = e.pointerX()
this.cursorStartY = e.pointerY()
this.editStartX = this.elements.editBox.offsetLeft
this.editStartY = this.elements.editBox.offsetTop
this.dragging = true
},
// Stop dragging the edit box
editDragStop: function(e) {
if (Note.debug) {
console.debug("Note#editDragStop (id=%d)", this.id)
}
this.clearDocumentObservers()
this.cursorStartX = null
this.cursorStartY = null
this.editStartX = null
this.editStartY = null
this.dragging = false<|fim▁hole|> },
// Update the edit box's position as it gets dragged
editDrag: function(e) {
var left = this.editStartX + e.pointerX() - this.cursorStartX
var top = this.editStartY + e.pointerY() - this.cursorStartY
this.elements.editBox.style.left = left + 'px'
this.elements.editBox.style.top = top + 'px'
e.stop()
},
// Start resizing the note
resizeStart: function(e) {
if (Note.debug) {
console.debug("Note#resizeStart (id=%d)", this.id)
}
this.cursorStartX = e.pointerX()
this.cursorStartY = e.pointerY()
this.boxStartWidth = this.elements.box.clientWidth
this.boxStartHeight = this.elements.box.clientHeight
this.boxStartX = this.elements.box.offsetLeft
this.boxStartY = this.elements.box.offsetTop
this.boundsX = new ClipRange(10, this.elements.image.clientWidth - this.boxStartX - 5)
this.boundsY = new ClipRange(10, this.elements.image.clientHeight - this.boxStartY - 5)
this.dragging = true
this.clearDocumentObservers()
this.addDocumentObserver("mousemove", this.resize.bindAsEventListener(this))
this.addDocumentObserver("mouseup", this.resizeStop.bindAsEventListener(this))
e.stop()
this.bodyHide()
},
// Stop resizing teh note
resizeStop: function(e) {
if (Note.debug) {
console.debug("Note#resizeStop (id=%d)", this.id)
}
this.clearDocumentObservers()
this.boxCursorStartX = null
this.boxCursorStartY = null
this.boxStartWidth = null
this.boxStartHeight = null
this.boxStartX = null
this.boxStartY = null
this.boundsX = null
this.boundsY = null
this.dragging = false
e.stop()
},
// Update the note's dimensions as it gets resized
resize: function(e) {
var width = this.boxStartWidth + e.pointerX() - this.cursorStartX
var height = this.boxStartHeight + e.pointerY() - this.cursorStartY
width = this.boundsX.clip(width)
height = this.boundsY.clip(height)
this.elements.box.style.width = width + "px"
this.elements.box.style.height = height + "px"
var ratio = this.ratio()
this.fullsize.width = width / ratio
this.fullsize.height = height / ratio
e.stop()
},
// Save the note to the database
save: function(e) {
if (Note.debug) {
console.debug("Note#save (id=%d)", this.id)
}
var note = this
for (p in this.fullsize) {
this.old[p] = this.fullsize[p]
}
this.old.raw_body = $('edit-box-text').value
this.old.formatted_body = this.textValue()
// FIXME: this is not quite how the note will look (filtered elems, <tn>...). the user won't input a <script> that only damages him, but it might be nice to "preview" the <tn> here
this.elements.body.update(this.textValue())
this.hideEditBox(e)
this.bodyHide()
this.bodyfit = false
var params = {
"id": this.id,
"note[x]": this.old.left,
"note[y]": this.old.top,
"note[width]": this.old.width,
"note[height]": this.old.height,
"note[body]": this.old.raw_body
}
if (this.is_new) {
params["note[post_id]"] = Note.post_id
}
notice("Saving note...")
new Ajax.Request('/note/update.json', {
parameters: params,
onComplete: function(resp) {
var resp = resp.responseJSON
if (resp.success) {
notice("Note saved")
var note = Note.find(resp.old_id)
if (resp.old_id < 0) {
note.is_new = false
note.id = resp.new_id
note.elements.box.id = 'note-box-' + note.id
note.elements.body.id = 'note-body-' + note.id
note.elements.corner.id = 'note-corner-' + note.id
}
note.elements.body.innerHTML = resp.formatted_body
note.elements.box.setOpacity(0.5)
note.elements.box.removeClassName('unsaved')
} else {
notice("Error: " + resp.reason)
note.elements.box.addClassName('unsaved')
}
}
})
e.stop()
},
// Revert the note to the last saved state
cancel: function(e) {
if (Note.debug) {
console.debug("Note#cancel (id=%d)", this.id)
}
this.hideEditBox(e)
this.bodyHide()
var ratio = this.ratio()
for (p in this.fullsize) {
this.fullsize[p] = this.old[p]
this.elements.box.style[p] = this.fullsize[p] * ratio + 'px'
}
this.elements.body.innerHTML = this.old.formatted_body
e.stop()
},
// Remove all references to the note from the page
removeCleanup: function() {
if (Note.debug) {
console.debug("Note#removeCleanup (id=%d)", this.id)
}
this.elements.box.remove()
this.elements.body.remove()
var allTemp = []
for (i=0; i<Note.all.length; ++i) {
if (Note.all[i].id != this.id) {
allTemp.push(Note.all[i])
}
}
Note.all = allTemp
Note.updateNoteCount()
},
// Removes a note from the database
remove: function(e) {
if (Note.debug) {
console.debug("Note#remove (id=%d)", this.id)
}
this.hideEditBox(e)
this.bodyHide()
this_note = this
if (this.is_new) {
this.removeCleanup()
notice("Note removed")
} else {
notice("Removing note...")
new Ajax.Request('/note/update.json', {
parameters: {
"id": this.id,
"note[is_active]": "0"
},
onComplete: function(resp) {
var resp = resp.responseJSON
if (resp.success) {
notice("Note removed")
this_note.removeCleanup()
} else {
notice("Error: " + resp.reason)
}
}
})
}
e.stop()
},
// Redirect to the note's history
history: function(e) {
if (Note.debug) {
console.debug("Note#history (id=%d)", this.id)
}
this.hideEditBox(e)
if (this.is_new) {
notice("This note has no history")
} else {
location.href = '/history?search=notes:' + this.id
}
e.stop()
}
})
// The following are class methods and variables
Object.extend(Note, {
zindex: 0,
counter: -1,
all: [],
display: true,
debug: false,
// Show all notes
show: function() {
if (Note.debug) {
console.debug("Note.show")
}
$("note-container").show()
},
// Hide all notes
hide: function() {
if (Note.debug) {
console.debug("Note.hide")
}
$("note-container").hide()
},
// Find a note instance based on the id number
find: function(id) {
if (Note.debug) {
console.debug("Note.find")
}
for (var i=0; i<Note.all.size(); ++i) {
if (Note.all[i].id == id) {
return Note.all[i]
}
}
return null
},
// Toggle the display of all notes
toggle: function() {
if (Note.debug) {
console.debug("Note.toggle")
}
if (Note.display) {
Note.hide()
Note.display = false
} else {
Note.show()
Note.display = true
}
},
// Update the text displaying the number of notes a post has
updateNoteCount: function() {
if (Note.debug) {
console.debug("Note.updateNoteCount")
}
if (Note.all.length > 0) {
var label = ""
if (Note.all.length == 1)
label = "note"
else
label = "notes"
$('note-count').innerHTML = "This post has <a href=\"/note/history?post_id=" + Note.post_id + "\">" + Note.all.length + " " + label + "</a>"
} else {
$('note-count').innerHTML = ""
}
},
// Create a new note
create: function() {
if (Note.debug) {
console.debug("Note.create")
}
Note.show()
var insertion_position = Note.getInsertionPosition()
var top = insertion_position[0]
var left = insertion_position[1]
var html = ''
html += '<div class="note-box unsaved" style="width: 150px; height: 150px; '
html += 'top: ' + top + 'px; '
html += 'left: ' + left + 'px;" '
html += 'id="note-box-' + Note.counter + '">'
html += '<div class="note-corner" id="note-corner-' + Note.counter + '"></div>'
html += '</div>'
html += '<div class="note-body" title="Click to edit" id="note-body-' + Note.counter + '"></div>'
$("note-container").insert({bottom: html})
var note = new Note(Note.counter, true, "")
Note.all.push(note)
Note.counter -= 1
},
// Find a suitable position to insert new notes
getInsertionPosition: function() {
if (Note.debug) {
console.debug("Note.getInsertionPosition")
}
// We want to show the edit box somewhere on the screen, but not outside the image.
var scroll_x = $("image").cumulativeScrollOffset()[0]
var scroll_y = $("image").cumulativeScrollOffset()[1]
var image_left = $("image").positionedOffset()[0]
var image_top = $("image").positionedOffset()[1]
var image_right = image_left + $("image").width
var image_bottom = image_top + $("image").height
var left = 0
var top = 0
if (scroll_x > image_left) {
left = scroll_x
} else {
left = image_left
}
if (scroll_y > image_top) {
top = scroll_y
} else {
top = image_top + 20
}
if (top > image_bottom) {
top = image_top + 20
}
return [top, left]
}
})<|fim▁end|> | |
<|file_name|>resource.py<|end_file_name|><|fim▁begin|>"""
===================
Resource Management
===================
This module provides a tool to manage dependencies on resources within a
:mod:`vivarium` simulation. These resources take the form of things that can
be created and utilized by components, for example columns in the
:mod:`state table <vivarium.framework.population>`
or :mod:`named value pipelines <vivarium.framework.values>`.
Because these resources need to be created before they can be used, they are
sensitive to ordering. The intent behind this tool is to provide an interface
that allows other managers to register resources with the resource manager
and in turn ask for ordered sequences of these resources according to their
dependencies or raise exceptions if this is not possible.
"""
from types import MethodType
from typing import Any, Callable, Iterable, List
import networkx as nx
from loguru import logger
from vivarium.exceptions import VivariumError
class ResourceError(VivariumError):
"""Error raised when a dependency requirement is violated."""
pass
RESOURCE_TYPES = {
"value",
"value_source",
"missing_value_source",
"value_modifier",
"column",
"stream",
}
NULL_RESOURCE_TYPE = "null"
class ResourceGroup:
"""Resource groups are the nodes in the resource dependency graph.
A resource group represents the pool of resources produced by a single
callable and all the dependencies necessary to produce that resource.
When thinking of the dependency graph, this represents a vertex and
all in-edges. This is a local-information representation that can be
used to construct the entire dependency graph once all resources are
specified.
"""
def __init__(
self,
resource_type: str,
resource_names: List[str],
producer: Callable,
dependencies: List[str],
):
self._resource_type = resource_type
self._resource_names = resource_names
self._producer = producer
self._dependencies = dependencies
@property
def type(self) -> str:
"""The type of resource produced by this resource group's producer.
Must be one of `RESOURCE_TYPES`.
"""
return self._resource_type
@property
def names(self) -> List[str]:
"""The long names (including type) of all resources in this group."""
return [f"{self._resource_type}.{name}" for name in self._resource_names]
@property
def producer(self) -> Any:
"""The method or object that produces this group of resources."""
return self._producer
@property
def dependencies(self) -> List[str]:
"""The long names (including type) of dependencies for this group."""
return self._dependencies
def __iter__(self) -> Iterable[str]:
return iter(self.names)
def __repr__(self) -> str:
resources = ", ".join(self)
return f"ResourceProducer({resources})"
def __str__(self) -> str:
resources = ", ".join(self)
return f"({resources})"
class ResourceManager:
"""Manages all the resources needed for population initialization."""
def __init__(self):
# This will be a dict with string keys representing the the resource
# and the resource group they belong to. This is a one to many mapping
# as some resource groups contain many resources.
self._resource_group_map = {}
# null producers are those that don't produce any resources externally
# but still consume other resources (i.e., have dependencies) - these
# are only pop initializers as of 9/26/2019. Tracker is here to assign
# them unique ids.
self._null_producer_count = 0
# Attribute used for lazy (but cached) graph initialization.
self._graph = None
# Attribute used for lazy (but cached) graph topo sort.
self._sorted_nodes = None
@property
def name(self) -> str:
"""The name of this manager."""
return "resource_manager"
@property
def graph(self) -> nx.DiGraph:
"""The networkx graph representation of the resource pool."""
if self._graph is None:
self._graph = self._to_graph()
return self._graph
@property
def sorted_nodes(self):
"""Returns a topological sort of the resource graph.
Notes
-----
Topological sorts are not stable. Be wary of depending on order
where you shouldn't.
"""
if self._sorted_nodes is None:
try:
self._sorted_nodes = list(nx.algorithms.topological_sort(self.graph))
except nx.NetworkXUnfeasible:
raise ResourceError(
f"The resource pool contains at least one cycle: "
f"{nx.find_cycle(self.graph)}."
)
return self._sorted_nodes
def add_resources(
self,
resource_type: str,
resource_names: List[str],
producer: Any,
dependencies: List[str],
):
"""Adds managed resources to the resource pool.
Parameters
----------
resource_type
The type of the resources being added. Must be one of
`RESOURCE_TYPES`.
resource_names
A list of names of the resources being added.
producer
A method or object that will produce the resources.
dependencies
A list of resource names formatted as
``resource_type.resource_name`` that the producer requires.
Raises
------
ResourceError
If either the resource type is invalid, a component has multiple
resource producers for the ``column`` resource type, or
there are multiple producers of the same resource.
"""
if resource_type not in RESOURCE_TYPES:
raise ResourceError(
f"Unknown resource type {resource_type}. "
f"Permitted types are {RESOURCE_TYPES}."
)
resource_group = self._get_resource_group(
resource_type, resource_names, producer, dependencies
)
for resource in resource_group:
if resource in self._resource_group_map:
other_producer = self._resource_group_map[resource].producer
raise ResourceError(
f"Both {producer} and {other_producer} are registered as "
f"producers for {resource}."
)
self._resource_group_map[resource] = resource_group
def _get_resource_group(
self,
resource_type: str,
resource_names: List[str],
producer: MethodType,
dependencies: List[str],
) -> ResourceGroup:
"""Packages resource information into a resource group.
See Also
--------
:class:`ResourceGroup`
"""
if not resource_names:
# We have a "producer" that doesn't produce anything, but
# does have dependencies. This is necessary for components that
# want to track private state information.
resource_type = NULL_RESOURCE_TYPE
resource_names = [str(self._null_producer_count)]
self._null_producer_count += 1
return ResourceGroup(resource_type, resource_names, producer, dependencies)
def _to_graph(self) -> nx.DiGraph:
"""Constructs the full resource graph from information in the groups.
Components specify local dependency information during setup time.
When the resources are required at population creation time,
the graph is generated as all resources must be registered at that
point.
Notes
-----
We are taking advantage of lazy initialization to sneak this in
between post setup time when the :class:`values manager
<vivarium.framework.values.ValuesManager>` finalizes pipeline
dependencies and population creation time.
"""
resource_graph = nx.DiGraph()
# networkx ignores duplicates
resource_graph.add_nodes_from(self._resource_group_map.values())
for resource_group in resource_graph.nodes:
for dependency in resource_group.dependencies:
if dependency not in self._resource_group_map:
# Warn here because this sometimes happens naturally
# if observer components are missing from a simulation.
logger.warning(
f"Resource {dependency} is not provided by any component but is needed to "
f"compute {resource_group}."
)
continue
dependency_group = self._resource_group_map[dependency]
resource_graph.add_edge(dependency_group, resource_group)
return resource_graph
def __iter__(self) -> Iterable[MethodType]:
"""Returns a dependency-sorted iterable of population initializers.
We exclude all non-initializer dependencies. They were necessary in
graph construction, but we only need the column producers at population
creation time.
"""
return iter(
[
r.producer
for r in self.sorted_nodes
if r.type in {"column", NULL_RESOURCE_TYPE}
]
)
def __repr__(self):
out = {}
for resource_group in set(self._resource_group_map.values()):
produced = ", ".join(resource_group)
out[produced] = ", ".join(resource_group.dependencies)
return "\n".join([f"{produced} : {depends}" for produced, depends in out.items()])
class ResourceInterface:
"""The resource management system.
A resource in :mod:`vivarium` is something like a state table column
or a randomness stream. These resources are used to initialize or alter
the state of the simulation. Many of these resources might depend on each
other and therefore need to be created or updated in a particular order.
These dependency chains can be quite long and complex.
Placing the ordering responsibility on end users makes simulations very
fragile and difficult to understand. Instead, the resource management
system allows users to only specify local dependencies. The system then
uses the local dependency information to construct a full dependency
graph, validate that there are no cyclic dependencies, and return
resources and their producers in an order that makes sense.
"""
def __init__(self, manager: ResourceManager):
self._manager = manager
def add_resources(
self,
resource_type: str,
resource_names: List[str],
producer: Any,
dependencies: List[str],
):
"""Adds managed resources to the resource pool.
Parameters
----------
resource_type
The type of the resources being added. Must be one of
`RESOURCE_TYPES`.
resource_names<|fim▁hole|> dependencies
A list of resource names formatted as
``resource_type.resource_name`` that the producer requires.
Raises
------
ResourceError
If either the resource type is invalid, a component has multiple
resource producers for the ``column`` resource type, or
there are multiple producers of the same resource.
"""
self._manager.add_resources(resource_type, resource_names, producer, dependencies)
def __iter__(self):
"""Returns a dependency-sorted iterable of population initializers.
We exclude all non-initializer dependencies. They were necessary in
graph construction, but we only need the column producers at population
creation time.
"""
return iter(self._manager)<|fim▁end|> | A list of names of the resources being added.
producer
A method or object that will produce the resources. |
<|file_name|>mnist_evaluate.py<|end_file_name|><|fim▁begin|>from __future__ import print_function
from mnist import evaluate
import sys
model = evaluate.init()
prediction1, confidence1 = evaluate.from_local_image(sys.argv[1], model)
print("prediction: {}; confidence: {:.2f}".format(prediction1, confidence1))
prediction2, confidence2 = evaluate.from_local_image(sys.argv[1], model)<|fim▁hole|><|fim▁end|> | print("prediction: {}; confidence: {:.2f}".format(prediction2, confidence2)) |
<|file_name|>test_group_norm_op.py<|end_file_name|><|fim▁begin|># Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import unittest
import numpy as np
from operator import mul
import paddle.fluid.core as core
import paddle.fluid as fluid
from op_test import OpTest
from testsuite import create_op
def group_norm_naive(x, scale, bias, epsilon, groups, data_layout):
if data_layout == "NHWC":
x = np.transpose(x, (0, 3, 1, 2)) # NHWC => NCHW
N, C, H, W = x.shape
G = groups
x = x.reshape((N * G, -1))
mean = np.mean(x, axis=1, keepdims=True)
var = np.var(x, axis=1, keepdims=True)
output = (x - mean) / np.sqrt(var + epsilon)
output = output.reshape((N, C, H, W)) * scale.reshape(
(-1, 1, 1)) + bias.reshape((-1, 1, 1))
if data_layout == "NHWC":
output = np.transpose(output, (0, 2, 3, 1)) # NCHW => NHWC
return output, mean.reshape((N, G)), var.reshape((N, G))
class TestGroupNormOp(OpTest):
def setUp(self):
self.op_type = "group_norm"
self.data_format = "NCHW"
self.dtype = np.float32
self.shape = (2, 4, 3, 3)
self.attrs = {'epsilon': 1e-5, 'groups': 2, 'data_layout': "NCHW"}
self.compare_between_place = False
self.init_test_case()
input = np.random.random(self.shape).astype(self.dtype)
if self.data_format == "NHWC":
input = np.transpose(input, (0, 2, 3, 1))
scale = np.random.random([self.shape[1]]).astype(self.dtype)
bias = np.random.random([self.shape[1]]).astype(self.dtype)
output, mean, var = group_norm_naive(
input, scale, bias, self.attrs['epsilon'], self.attrs['groups'],
self.data_format)
self.inputs = {
'X': OpTest.np_dtype_to_fluid_dtype(input),
'Scale': OpTest.np_dtype_to_fluid_dtype(scale),
'Bias': OpTest.np_dtype_to_fluid_dtype(bias)
}
self.outputs = {'Y': output, 'Mean': mean, 'Variance': var}
self.attrs['data_layout'] = self.data_format
def test_check_output(self):
atol = 1e-4
inplace_atol = 1e-4
place = core.CPUPlace()
# add inplace_atol bacause group_norm doesn't ensure computational consistency
self.check_output_with_place(
place, atol=atol, inplace_atol=inplace_atol)
if core.is_compiled_with_cuda():
place = core.CUDAPlace(0)
self.check_output_with_place(
place, atol=atol, inplace_atol=inplace_atol)
def do_compare_between_place(self):
if not core.is_compiled_with_cuda(): return
place = core.CPUPlace()
place2 = core.CUDAPlace(0)
self.scope = core.Scope()
op_inputs = self.inputs if hasattr(self, "inputs") else dict()
op_outputs = self.outputs if hasattr(self, "outputs") else dict()
op_attrs = self.attrs if hasattr(self, "attrs") else dict()
self.op = create_op(self.scope, self.op_type, op_inputs, op_outputs,
op_attrs)
inputs_to_check = set(['X', 'Scale', 'Bias'])
output_names = 'Y'
cpu_grads = self._get_gradient(inputs_to_check, place, output_names,
None)
gpu_grads = self._get_gradient(inputs_to_check, place2, output_names,
None)
self._assert_is_close(cpu_grads, gpu_grads, inputs_to_check, 0.005,
"Gradient Check On %s" % str(place))
def test_check_grad(self):
if self.compare_between_place:
self.do_compare_between_place()
return
place = core.CPUPlace()
self.check_grad_with_place(
place, set(['X', 'Scale', 'Bias']), 'Y', max_relative_error=0.01)
if core.is_compiled_with_cuda():
place = core.CUDAPlace(0)
self.check_grad_with_place(
place,
set(['X', 'Scale', 'Bias']),
'Y',
max_relative_error=0.01)
def init_test_case(self):
pass
class TestGroupNormOp1(TestGroupNormOp):
def init_test_case(self):
self.attrs['groups'] = 1
class TestGroupNormOp2(TestGroupNormOp):
def init_test_case(self):
self.attrs['groups'] = 4
class TestGroupNormOpBigEps1(TestGroupNormOp):
def init_test_case(self):
self.attrs['groups'] = 1
self.attrs['epsilon'] = 0.5
class TestGroupNormOpBigEps2(TestGroupNormOp):
def init_test_case(self):
self.attrs['groups'] = 4
self.attrs['epsilon'] = 0.5
class TestGroupNormOpBigEps3(TestGroupNormOp):
def init_test_case(self):
self.attrs['epsilon'] = 0.5
class TestGroupNormOpLargeData(TestGroupNormOp):
def init_test_case(self):
self.shape = (2, 32, 64, 64)
self.attrs['groups'] = 8
self.compare_between_place = True
class TestGroupNormOp1_With_NHWC(TestGroupNormOp):
def init_test_case(self):
self.attrs['groups'] = 1
self.data_format = "NHWC"
<|fim▁hole|> self.attrs['groups'] = 4
self.data_format = "NHWC"
class TestGroupNormOpBigEps1_With_NHWC(TestGroupNormOp):
def init_test_case(self):
self.attrs['groups'] = 1
self.attrs['epsilon'] = 0.5
self.data_format = "NHWC"
class TestGroupNormOpBigEps2_With_NHWC(TestGroupNormOp):
def init_test_case(self):
self.attrs['groups'] = 4
self.attrs['epsilon'] = 0.5
self.data_format = "NHWC"
class TestGroupNormOpBigEps3_With_NHWC(TestGroupNormOp):
def init_test_case(self):
self.attrs['epsilon'] = 0.5
self.data_format = "NHWC"
class TestGroupNormOpLargeData_With_NHWC(TestGroupNormOp):
def init_test_case(self):
self.shape = (2, 64, 32, 32) # NCHW
self.attrs['groups'] = 8
self.data_format = "NHWC"
self.compare_between_place = True
class TestGroupNormAPI_With_NHWC(OpTest):
def test_case1(self):
data1 = fluid.data(name='data1', shape=[None, 3, 3, 4], dtype='float32')
out1 = fluid.layers.group_norm(
input=data1, groups=2, data_layout="NHWC")
data2 = fluid.data(name='data2', shape=[None, 4, 3, 3], dtype='float32')
out2 = fluid.layers.group_norm(
input=data2, groups=2, data_layout="NCHW")
data1_np = np.random.random((2, 3, 3, 4)).astype("float32")
data2_np = np.random.random((2, 4, 3, 3)).astype("float32")
scale = np.array([1]).astype("float32")
bias = np.array([0]).astype("float32")
place = core.CPUPlace()
exe = fluid.Executor(place)
results = exe.run(fluid.default_main_program(),
feed={"data1": data1_np,
"data2": data2_np},
fetch_list=[out1, out2],
return_numpy=True)
expect_res1 = group_norm_naive(
data1_np, scale, bias, epsilon=1e-5, groups=2, data_layout="NHWC")
expect_res2 = group_norm_naive(
data2_np, scale, bias, epsilon=1e-5, groups=2, data_layout="NCHW")
self.assertTrue(np.allclose(results[0], expect_res1[0]))
self.assertTrue(np.allclose(results[1], expect_res2[0]))
class TestGroupNormException(OpTest):
# data_layout is not NHWC or NCHW
def test_exception(self):
data = fluid.data(name='data', shape=[None, 3, 3, 4], dtype="float32")
def attr_data_format():
out = fluid.layers.group_norm(
input=data, groups=2, data_layout="NDHW")
self.assertRaises(ValueError, attr_data_format)
if __name__ == '__main__':
unittest.main()<|fim▁end|> | class TestGroupNormOp2_With_NHWC(TestGroupNormOp):
def init_test_case(self): |
<|file_name|>linear.py<|end_file_name|><|fim▁begin|># Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Linear Estimators."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import math
import re
import six
from tensorflow.contrib import layers
from tensorflow.contrib.framework import deprecated
from tensorflow.contrib.framework import deprecated_arg_values
from tensorflow.contrib.framework.python.ops import variables as contrib_variables
from tensorflow.contrib.learn.python.learn import evaluable
from tensorflow.contrib.learn.python.learn import trainable
from tensorflow.contrib.learn.python.learn.estimators import estimator
from tensorflow.contrib.learn.python.learn.estimators import head as head_lib
from tensorflow.contrib.learn.python.learn.estimators import prediction_key
from tensorflow.contrib.learn.python.learn.utils import export
from tensorflow.contrib.linear_optimizer.python import sdca_optimizer
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import clip_ops
from tensorflow.python.ops import gradients
from tensorflow.python.ops import partitioned_variables
from tensorflow.python.ops import variable_scope
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.training import session_run_hook
from tensorflow.python.training import training as train
# The default learning rate of 0.2 is a historical artifact of the initial
# implementation, but seems a reasonable choice.
_LEARNING_RATE = 0.2
def _get_optimizer(spec):
if isinstance(spec, six.string_types):
return layers.OPTIMIZER_CLS_NAMES[spec](
learning_rate=_LEARNING_RATE)
elif callable(spec):
return spec()
return spec
# TODO(ispir): Remove this function by fixing '_infer_model' with single outputs
# and as_iteable case.
def _as_iterable(preds, output):
for pred in preds:
yield pred[output]
def _add_bias_column(feature_columns, columns_to_tensors, bias_variable,
labels, columns_to_variables):
# TODO(b/31008490): Move definition to a common constants place.
bias_column_name = "tf_virtual_bias_column"
if any(col.name is bias_column_name for col in feature_columns):
raise ValueError("%s is a reserved column name." % bias_column_name)
bias_column = layers.real_valued_column(bias_column_name)
columns_to_tensors[bias_column] = array_ops.ones_like(labels,
dtype=dtypes.float32)
columns_to_variables[bias_column] = [bias_variable]
def _linear_model_fn(features, labels, mode, params):
"""A model_fn for linear models that use a gradient-based optimizer.
Args:
features: `Tensor` or dict of `Tensor` (depends on data passed to `fit`).
labels: `Tensor` of shape [batch_size, 1] or [batch_size] labels of
dtype `int32` or `int64` in the range `[0, n_classes)`.
mode: Defines whether this is training, evaluation or prediction.
See `ModeKeys`.
params: A dict of hyperparameters.
The following hyperparameters are expected:
* head: A `Head` instance.
* feature_columns: An iterable containing all the feature columns used by
the model.
* optimizer: string, `Optimizer` object, or callable that defines the
optimizer to use for training.
* gradient_clip_norm: A float > 0. If provided, gradients are
clipped to their global norm with this clipping ratio.
* num_ps_replicas: The number of parameter server replicas.
* joint_weights: If True, the weights for all columns will be stored in a
single (possibly partitioned) variable. It's more efficient, but it's
incompatible with SDCAOptimizer, and requires all feature columns are
sparse and use the 'sum' combiner.
Returns:
An `estimator.ModelFnOps` instance.
Raises:
ValueError: If mode is not any of the `ModeKeys`.
"""
head = params["head"]
feature_columns = params["feature_columns"]
optimizer = params["optimizer"]
gradient_clip_norm = params.get("gradient_clip_norm", None)
num_ps_replicas = params.get("num_ps_replicas", 0)
joint_weights = params.get("joint_weights", False)
if not isinstance(features, dict):
features = {"": features}
parent_scope = "linear"
partitioner = partitioned_variables.min_max_variable_partitioner(
max_partitions=num_ps_replicas,
min_slice_size=64 << 20)
with variable_scope.variable_scope(
parent_scope, values=features.values(), partitioner=partitioner) as scope:
if joint_weights:
logits, _, _ = (
layers.joint_weighted_sum_from_feature_columns(
columns_to_tensors=features,
feature_columns=feature_columns,
num_outputs=head.logits_dimension,
weight_collections=[parent_scope],
scope=scope))
else:
logits, _, _ = (
layers.weighted_sum_from_feature_columns(
columns_to_tensors=features,
feature_columns=feature_columns,
num_outputs=head.logits_dimension,
weight_collections=[parent_scope],
scope=scope))
def _train_op_fn(loss):
global_step = contrib_variables.get_global_step()
my_vars = ops.get_collection("linear")
grads = gradients.gradients(loss, my_vars)
if gradient_clip_norm:
grads, _ = clip_ops.clip_by_global_norm(grads, gradient_clip_norm)
return (optimizer.apply_gradients(
zip(grads, my_vars), global_step=global_step))
return head.head_ops(features, labels, mode, _train_op_fn, logits)
def sdca_model_fn(features, labels, mode, params):
"""A model_fn for linear models that use the SDCA optimizer.
Args:
features: A dict of `Tensor` keyed by column name.
labels: `Tensor` of shape [batch_size, 1] or [batch_size] labels of
dtype `int32` or `int64` in the range `[0, n_classes)`.
mode: Defines whether this is training, evaluation or prediction.
See `ModeKeys`.
params: A dict of hyperparameters.
The following hyperparameters are expected:
* head: A `Head` instance. Type must be one of `_BinarySvmHead`,
`_RegressionHead` or `_MultiClassHead`.
* feature_columns: An iterable containing all the feature columns used by
the model.
* optimizer: An `SDCAOptimizer` instance.
* weight_column_name: A string defining the weight feature column, or
None if there are no weights.
* update_weights_hook: A `SessionRunHook` object or None. Used to update
model weights.
Returns:
An `estimator.ModelFnOps` instance.
Raises:
ValueError: If `optimizer` is not an `SDCAOptimizer` instance.
ValueError: If the type of head is neither `_BinarySvmHead`, nor
`_RegressionHead` nor `_MultiClassHead`.
ValueError: If mode is not any of the `ModeKeys`.
"""
head = params["head"]
feature_columns = params["feature_columns"]
optimizer = params["optimizer"]
weight_column_name = params["weight_column_name"]
update_weights_hook = params.get("update_weights_hook", None)
if not isinstance(optimizer, sdca_optimizer.SDCAOptimizer):
raise ValueError("Optimizer must be of type SDCAOptimizer")
if isinstance(head, head_lib._BinarySvmHead): # pylint: disable=protected-access
loss_type = "hinge_loss"
elif isinstance(head, head_lib._MultiClassHead): # pylint: disable=protected-access
loss_type = "logistic_loss"
elif isinstance(head, head_lib._RegressionHead): # pylint: disable=protected-access
loss_type = "squared_loss"
else:
return ValueError("Unsupported head type: {}".format(head))
parent_scope = "linear"
with variable_scope.variable_op_scope(
features.values(), parent_scope) as scope:
logits, columns_to_variables, bias = (
layers.weighted_sum_from_feature_columns(
columns_to_tensors=features,
feature_columns=feature_columns,
num_outputs=1,
scope=scope))
_add_bias_column(feature_columns, features, bias, labels,
columns_to_variables)
def _train_op_fn(unused_loss):
global_step = contrib_variables.get_global_step()
sdca_model, train_op = optimizer.get_train_step(columns_to_variables,
weight_column_name,
loss_type, features,
labels, global_step)
if update_weights_hook is not None:
update_weights_hook.set_parameters(sdca_model, train_op)
return train_op
return head.head_ops(features, labels, mode, _train_op_fn, logits)
# Ensures consistency with LinearComposableModel.
def _get_default_optimizer(feature_columns):
learning_rate = min(_LEARNING_RATE, 1.0 / math.sqrt(len(feature_columns)))
return train.FtrlOptimizer(learning_rate=learning_rate)
class _SdcaUpdateWeightsHook(session_run_hook.SessionRunHook):
"""SessionRunHook to update and shrink SDCA model weights."""
def __init__(self):
pass
def set_parameters(self, sdca_model, train_op):
self._sdca_model = sdca_model
self._train_op = train_op
def begin(self):
"""Construct the update_weights op.
The op is implicitly added to the default graph.
"""
self._update_op = self._sdca_model.update_weights(self._train_op)
def before_run(self, run_context):
"""Return the update_weights op so that it is executed during this run."""
return session_run_hook.SessionRunArgs(self._update_op)
class LinearClassifier(evaluable.Evaluable, trainable.Trainable):
"""Linear classifier model.
Train a linear model to classify instances into one of multiple possible
classes. When number of possible classes is 2, this is binary classification.
Example:
```python
sparse_column_a = sparse_column_with_hash_bucket(...)
sparse_column_b = sparse_column_with_hash_bucket(...)
sparse_feature_a_x_sparse_feature_b = crossed_column(...)
# Estimator using the default optimizer.
estimator = LinearClassifier(
feature_columns=[sparse_column_a, sparse_feature_a_x_sparse_feature_b])
# Or estimator using the FTRL optimizer with regularization.
estimator = LinearClassifier(
feature_columns=[sparse_column_a, sparse_feature_a_x_sparse_feature_b],
optimizer=tf.train.FtrlOptimizer(
learning_rate=0.1,
l1_regularization_strength=0.001
))
# Or estimator using the SDCAOptimizer.
estimator = LinearClassifier(
feature_columns=[sparse_column_a, sparse_feature_a_x_sparse_feature_b],
optimizer=tf.contrib.linear_optimizer.SDCAOptimizer(
example_id_column='example_id',
num_loss_partitions=...,
symmetric_l2_regularization=2.0
))
# Input builders
def input_fn_train: # returns x, y
...
def input_fn_eval: # returns x, y
...
estimator.fit(input_fn=input_fn_train)
estimator.evaluate(input_fn=input_fn_eval)
estimator.predict(x=x)
```
Input of `fit` and `evaluate` should have following features,
otherwise there will be a `KeyError`:
* if `weight_column_name` is not `None`, a feature with
`key=weight_column_name` whose value is a `Tensor`.
* for each `column` in `feature_columns`:
- if `column` is a `SparseColumn`, a feature with `key=column.name`
whose `value` is a `SparseTensor`.
- if `column` is a `WeightedSparseColumn`, two features: the first with
`key` the id column name, the second with `key` the weight column name.
Both features' `value` must be a `SparseTensor`.
- if `column` is a `RealValuedColumn`, a feature with `key=column.name`
whose `value` is a `Tensor`.
"""
def __init__(self, # _joint_weight pylint: disable=invalid-name
feature_columns,
model_dir=None,
n_classes=2,
weight_column_name=None,
optimizer=None,
gradient_clip_norm=None,
enable_centered_bias=False,
_joint_weight=False,
config=None,
feature_engineering_fn=None):
"""Construct a `LinearClassifier` estimator object.
Args:
feature_columns: An iterable containing all the feature columns used by
the model. All items in the set should be instances of classes derived
from `FeatureColumn`.
model_dir: Directory to save model parameters, graph and etc. This can
also be used to load checkpoints from the directory into a estimator
to continue training a previously saved model.
n_classes: number of label classes. Default is binary classification.
weight_column_name: A string defining feature column name representing
weights. It is used to down weight or boost examples during training. It
will be multiplied by the loss of the example.
optimizer: The optimizer used to train the model. If specified, it should
be either an instance of `tf.Optimizer` or the SDCAOptimizer. If `None`,
the Ftrl optimizer will be used.
gradient_clip_norm: A `float` > 0. If provided, gradients are clipped
to their global norm with this clipping ratio. See
`tf.clip_by_global_norm` for more details.
enable_centered_bias: A bool. If True, estimator will learn a centered
bias variable for each class. Rest of the model structure learns the
residual after centered bias.
_joint_weight: If True, the weights for all columns will be stored in a
single (possibly partitioned) variable. It's more efficient, but it's
incompatible with SDCAOptimizer, and requires all feature columns are
sparse and use the 'sum' combiner.
config: `RunConfig` object to configure the runtime settings.
feature_engineering_fn: Feature engineering function. Takes features and
labels which are the output of `input_fn` and
returns features and labels which will be fed
into the model.
Returns:
A `LinearClassifier` estimator.
Raises:
ValueError: if n_classes < 2.
"""
# TODO(zoy): Give an unsupported error if enable_centered_bias is
# requested for SDCA once its default changes to False.
self._feature_columns = feature_columns
assert self._feature_columns
self._optimizer = _get_default_optimizer(feature_columns)
if optimizer:
self._optimizer = _get_optimizer(optimizer)
chief_hook = None
if (isinstance(optimizer, sdca_optimizer.SDCAOptimizer) and
enable_centered_bias):
enable_centered_bias = False
logging.warning("centered_bias is not supported with SDCA, "
"please disable it explicitly.")
head = head_lib._multi_class_head( # pylint: disable=protected-access
n_classes,
weight_column_name=weight_column_name,
enable_centered_bias=enable_centered_bias)
params = {
"head": head,
"feature_columns": feature_columns,
"optimizer": self._optimizer,
}
if isinstance(optimizer, sdca_optimizer.SDCAOptimizer):
assert not _joint_weight, ("_joint_weight is incompatible with the"
" SDCAOptimizer")
assert n_classes == 2, "SDCA only applies to binary classification."
model_fn = sdca_model_fn
# The model_fn passes the model parameters to the chief_hook. We then use
# the hook to update weights and shrink step only on the chief.
chief_hook = _SdcaUpdateWeightsHook()
params.update({
"weight_column_name": weight_column_name,
"update_weights_hook": chief_hook,
})
else:
model_fn = _linear_model_fn
params.update({
"gradient_clip_norm": gradient_clip_norm,
"num_ps_replicas": config.num_ps_replicas if config else 0,
"joint_weights": _joint_weight,
})
self._estimator = estimator.Estimator(
model_fn=model_fn,
model_dir=model_dir,
config=config,
params=params,
feature_engineering_fn=feature_engineering_fn)
self._additional_run_hook = (chief_hook if self._estimator.config.is_chief
else None)
def get_estimator(self):
return self._estimator
def fit(self, x=None, y=None, input_fn=None, steps=None, batch_size=None,
monitors=None, max_steps=None):
"""See trainable.Trainable."""
# TODO(roumposg): Remove when deprecated monitors are removed.
if monitors is None:
monitors = []
deprecated_monitors = [
m for m in monitors
if not isinstance(m, session_run_hook.SessionRunHook)
]
for monitor in deprecated_monitors:
monitor.set_estimator(self)
monitor._lock_estimator() # pylint: disable=protected-access
if self._additional_run_hook:
monitors.append(self._additional_run_hook)
result = self._estimator.fit(x=x, y=y, input_fn=input_fn, steps=steps,
batch_size=batch_size, monitors=monitors,
max_steps=max_steps)
for monitor in deprecated_monitors:
monitor._unlock_estimator() # pylint: disable=protected-access
return result
def evaluate(self, x=None, y=None, input_fn=None, feed_fn=None,
batch_size=None, steps=None, metrics=None, name=None):
"""See evaluable.Evaluable."""
return self._estimator.evaluate(x=x, y=y, input_fn=input_fn,
feed_fn=feed_fn, batch_size=batch_size,
steps=steps, metrics=metrics, name=name)
@deprecated_arg_values(
estimator.AS_ITERABLE_DATE, estimator.AS_ITERABLE_INSTRUCTIONS,
as_iterable=False)
def predict(self, x=None, input_fn=None, batch_size=None, as_iterable=True):
"""Runs inference to determine the predicted class."""
key = prediction_key.PredictionKey.CLASSES
preds = self._estimator.predict(
x=x,
input_fn=input_fn,
batch_size=batch_size,
outputs=[key],
as_iterable=as_iterable)
if as_iterable:
return _as_iterable(preds, output=key)
return preds[key]
@deprecated_arg_values(
estimator.AS_ITERABLE_DATE, estimator.AS_ITERABLE_INSTRUCTIONS,
as_iterable=False)
def predict_proba(self, x=None, input_fn=None, batch_size=None, outputs=None,
as_iterable=True):
"""Runs inference to determine the class probability predictions."""
key = prediction_key.PredictionKey.PROBABILITIES
preds = self._estimator.predict(
x=x,
input_fn=input_fn,
batch_size=batch_size,
outputs=[key],
as_iterable=as_iterable)
if as_iterable:
return _as_iterable(preds, output=key)
return preds[key]
def get_variable_names(self):
return self._estimator.get_variable_names()
def get_variable_value(self, name):
return self._estimator.get_variable_value(name)
def export(self,
export_dir,
input_fn=None,
input_feature_key=None,
use_deprecated_input_fn=True,
signature_fn=None,
default_batch_size=1,
exports_to_keep=None):
"""See BaseEstimator.export."""
def default_input_fn(unused_estimator, examples):
return layers.parse_feature_columns_from_examples(
examples, self._feature_columns)
return self._estimator.export(
export_dir=export_dir,
input_fn=input_fn or default_input_fn,
input_feature_key=input_feature_key,
use_deprecated_input_fn=use_deprecated_input_fn,
signature_fn=(signature_fn or
export.classification_signature_fn_with_prob),
prediction_key=prediction_key.PredictionKey.PROBABILITIES,
default_batch_size=default_batch_size,
exports_to_keep=exports_to_keep)
@property
@deprecated("2016-10-30",
"This method will be removed after the deprecation date. "
"To inspect variables, use get_variable_names() and "
"get_variable_value().")
def weights_(self):
values = {}
optimizer_regex = r".*/"+self._optimizer.get_name() + r"(_\d)?$"
for name in self.get_variable_names():
if (name.startswith("linear/") and
name != "linear/bias_weight" and
not re.match(optimizer_regex, name)):
values[name] = self.get_variable_value(name)
if len(values) == 1:
return values[list(values.keys())[0]]
return values
@property
@deprecated("2016-10-30",
"This method will be removed after the deprecation date. "
"To inspect variables, use get_variable_names() and "
"get_variable_value().")
def bias_(self):
return self.get_variable_value("linear/bias_weight")
@property
def config(self):
return self._estimator.config
@property
def model_dir(self):
return self._estimator.model_dir
class LinearRegressor(evaluable.Evaluable, trainable.Trainable):
"""Linear regressor model.
Train a linear regression model to predict label value given observation of
feature values.
Example:
```python
sparse_column_a = sparse_column_with_hash_bucket(...)
sparse_column_b = sparse_column_with_hash_bucket(...)
sparse_feature_a_x_sparse_feature_b = crossed_column(...)
estimator = LinearRegressor(
feature_columns=[sparse_column_a, sparse_feature_a_x_sparse_feature_b])
# Input builders
def input_fn_train: # returns x, y
...
def input_fn_eval: # returns x, y
...
estimator.fit(input_fn=input_fn_train)
estimator.evaluate(input_fn=input_fn_eval)
estimator.predict(x=x)
```
Input of `fit` and `evaluate` should have following features,
otherwise there will be a KeyError:
* if `weight_column_name` is not `None`:
key=weight_column_name, value=a `Tensor`
* for column in `feature_columns`:
- if isinstance(column, `SparseColumn`):
key=column.name, value=a `SparseTensor`
- if isinstance(column, `WeightedSparseColumn`):
{key=id column name, value=a `SparseTensor`,
key=weight column name, value=a `SparseTensor`}
- if isinstance(column, `RealValuedColumn`):
key=column.name, value=a `Tensor`
"""
def __init__(self, # _joint_weights: pylint: disable=invalid-name
feature_columns,
model_dir=None,
weight_column_name=None,
optimizer=None,
gradient_clip_norm=None,
enable_centered_bias=False,
label_dimension=1,
_joint_weights=False,
config=None,
feature_engineering_fn=None):
"""Construct a `LinearRegressor` estimator object.
Args:
feature_columns: An iterable containing all the feature columns used by
the model. All items in the set should be instances of classes derived
from `FeatureColumn`.
model_dir: Directory to save model parameters, graph, etc. This can
also be used to load checkpoints from the directory into a estimator
to continue training a previously saved model.
weight_column_name: A string defining feature column name representing
weights. It is used to down weight or boost examples during training. It
will be multiplied by the loss of the example.
optimizer: An instance of `tf.Optimizer` used to train the model. If
`None`, will use an Ftrl optimizer.
gradient_clip_norm: A `float` > 0. If provided, gradients are clipped
to their global norm with this clipping ratio. See
`tf.clip_by_global_norm` for more details.
enable_centered_bias: A bool. If True, estimator will learn a centered
bias variable for each class. Rest of the model structure learns the
residual after centered bias.
label_dimension: dimension of the label for multilabels.
_joint_weights: If True use a single (possibly partitioned) variable to
store the weights. It's faster, but requires all feature columns are
sparse and have the 'sum' combiner. Incompatible with SDCAOptimizer.
config: `RunConfig` object to configure the runtime settings.
feature_engineering_fn: Feature engineering function. Takes features and
labels which are the output of `input_fn` and
returns features and labels which will be fed
into the model.
Returns:
A `LinearRegressor` estimator.
"""
self._feature_columns = feature_columns
assert self._feature_columns
self._optimizer = _get_default_optimizer(feature_columns)
if optimizer:
self._optimizer = _get_optimizer(optimizer)
chief_hook = None
if (isinstance(optimizer, sdca_optimizer.SDCAOptimizer) and
enable_centered_bias):
enable_centered_bias = False
logging.warning("centered_bias is not supported with SDCA, "
"please disable it explicitly.")
head = head_lib._regression_head( # pylint: disable=protected-access
weight_column_name=weight_column_name,
label_dimension=label_dimension,
enable_centered_bias=enable_centered_bias)
params = {
"head": head,
"feature_columns": feature_columns,
"optimizer": self._optimizer,
}
if isinstance(optimizer, sdca_optimizer.SDCAOptimizer):
assert label_dimension == 1, "SDCA only applies for label_dimension=1."
assert not _joint_weights, ("_joint_weights is incompatible with"
" SDCAOptimizer.")
model_fn = sdca_model_fn
# The model_fn passes the model parameters to the chief_hook. We then use
# the hook to update weights and shrink step only on the chief.
chief_hook = _SdcaUpdateWeightsHook()
params.update({
"weight_column_name": weight_column_name,
"update_weights_hook": chief_hook,
})<|fim▁hole|> params.update({
"gradient_clip_norm": gradient_clip_norm,
"num_ps_replicas": config.num_ps_replicas if config else 0,
"joint_weights": _joint_weights,
})
self._estimator = estimator.Estimator(
model_fn=model_fn,
model_dir=model_dir,
config=config,
params=params,
feature_engineering_fn=feature_engineering_fn)
self._additional_run_hook = (chief_hook if self._estimator.config.is_chief
else None)
def fit(self, x=None, y=None, input_fn=None, steps=None, batch_size=None,
monitors=None, max_steps=None):
"""See trainable.Trainable."""
# TODO(roumposg): Remove when deprecated monitors are removed.
if monitors is None:
monitors = []
deprecated_monitors = [
m for m in monitors
if not isinstance(m, session_run_hook.SessionRunHook)
]
for monitor in deprecated_monitors:
monitor.set_estimator(self)
monitor._lock_estimator() # pylint: disable=protected-access
if self._additional_run_hook:
monitors.append(self._additional_run_hook)
result = self._estimator.fit(x=x, y=y, input_fn=input_fn, steps=steps,
batch_size=batch_size, monitors=monitors,
max_steps=max_steps)
for monitor in deprecated_monitors:
monitor._unlock_estimator() # pylint: disable=protected-access
return result
def evaluate(self, x=None, y=None, input_fn=None, feed_fn=None,
batch_size=None, steps=None, metrics=None, name=None):
"""See evaluable.Evaluable."""
return self._estimator.evaluate(x=x, y=y, input_fn=input_fn,
feed_fn=feed_fn, batch_size=batch_size,
steps=steps, metrics=metrics, name=name)
@deprecated_arg_values(
estimator.AS_ITERABLE_DATE, estimator.AS_ITERABLE_INSTRUCTIONS,
as_iterable=False)
def predict(self, x=None, input_fn=None, batch_size=None, as_iterable=True):
"""Runs inference to determine the predicted class."""
key = prediction_key.PredictionKey.SCORES
preds = self._estimator.predict(
x=x,
input_fn=input_fn,
batch_size=batch_size,
outputs=[key],
as_iterable=as_iterable)
if as_iterable:
return _as_iterable(preds, output=key)
return preds[key]
def get_variable_names(self):
return self._estimator.get_variable_names()
def get_variable_value(self, name):
return self._estimator.get_variable_value(name)
def export(self,
export_dir,
input_fn=None,
input_feature_key=None,
use_deprecated_input_fn=True,
signature_fn=None,
default_batch_size=1,
exports_to_keep=None):
"""See BaseEstimator.export."""
def default_input_fn(unused_estimator, examples):
return layers.parse_feature_columns_from_examples(
examples, self._feature_columns)
return self._estimator.export(
export_dir=export_dir,
input_fn=input_fn or default_input_fn,
input_feature_key=input_feature_key,
use_deprecated_input_fn=use_deprecated_input_fn,
signature_fn=(signature_fn or export.regression_signature_fn),
prediction_key=prediction_key.PredictionKey.SCORES,
default_batch_size=default_batch_size,
exports_to_keep=exports_to_keep)
@property
@deprecated("2016-10-30",
"This method will be removed after the deprecation date. "
"To inspect variables, use get_variable_names() and "
"get_variable_value().")
def weights_(self):
values = {}
optimizer_regex = r".*/"+self._optimizer.get_name() + r"(_\d)?$"
for name in self.get_variable_names():
if (name.startswith("linear/") and
name != "linear/bias_weight" and
not re.match(optimizer_regex, name)):
values[name] = self.get_variable_value(name)
if len(values) == 1:
return values[list(values.keys())[0]]
return values
@property
@deprecated("2016-10-30",
"This method will be removed after the deprecation date. "
"To inspect variables, use get_variable_names() and "
"get_variable_value().")
def bias_(self):
return self.get_variable_value("linear/bias_weight")
@property
def config(self):
return self._estimator.config
@property
def model_dir(self):
return self._estimator.model_dir<|fim▁end|> | else:
model_fn = _linear_model_fn |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|>from .mdl_event import *
from .mdl_receipt import *
from .mdl_budget import *
from .mdl_division import *
from .mdl_eventsignin import *
from .mdl_joinrequest import *<|fim▁end|> | from .mdl_user import *
from .mdl_club import * |
<|file_name|>0014_formulacolumnmapping.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Generated by Django 1.9.5 on 2017-06-16 19:25
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('silo', '0013_deletedsilos'),
]
operations = [
migrations.CreateModel(<|fim▁hole|> name='FormulaColumnMapping',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('mapping', models.TextField()),
('operation', models.TextField()),
('column_name', models.TextField()),
('silo', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='silo.Silo')),
],
),
]<|fim▁end|> | |
<|file_name|>vmware_vmkernel_ip_config.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2015, Joseph Callen <jcallen () csc.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: vmware_vmkernel_ip_config
short_description: Configure the VMkernel IP Address
description:
- Configure the VMkernel IP Address
version_added: 2.0
author: "Joseph Callen (@jcpowermac), Russell Teague (@mtnbikenc)"
notes:
- Tested on vSphere 5.5
requirements:
- "python >= 2.6"
- PyVmomi
options:
vmk_name:
description:
- VMkernel interface name
required: True
ip_address:
description:
- IP address to assign to VMkernel interface
required: True
subnet_mask:
description:
- Subnet Mask to assign to VMkernel interface
required: True
extends_documentation_fragment: vmware.documentation
'''
EXAMPLES = '''
# Example command from Ansible Playbook
- name: Configure IP address on ESX host
local_action:
module: vmware_vmkernel_ip_config
hostname: esxi_hostname
username: esxi_username
password: esxi_password
vmk_name: vmk0
ip_address: 10.0.0.10
subnet_mask: 255.255.255.0
'''
try:
from pyVmomi import vim, vmodl
HAS_PYVMOMI = True
except ImportError:
HAS_PYVMOMI = False
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.vmware import HAS_PYVMOMI, connect_to_api, get_all_objs, vmware_argument_spec
def configure_vmkernel_ip_address(host_system, vmk_name, ip_address, subnet_mask):
host_config_manager = host_system.configManager
host_network_system = host_config_manager.networkSystem
for vnic in host_network_system.networkConfig.vnic:
if vnic.device == vmk_name:
spec = vnic.spec
if spec.ip.ipAddress != ip_address:
spec.ip.dhcp = False
spec.ip.ipAddress = ip_address
spec.ip.subnetMask = subnet_mask
host_network_system.UpdateVirtualNic(vmk_name, spec)
return True
return False
def main():
argument_spec = vmware_argument_spec()<|fim▁hole|>
module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=False)
if not HAS_PYVMOMI:
module.fail_json(msg='pyvmomi is required for this module')
vmk_name = module.params['vmk_name']
ip_address = module.params['ip_address']
subnet_mask = module.params['subnet_mask']
try:
content = connect_to_api(module, False)
host = get_all_objs(content, [vim.HostSystem])
if not host:
module.fail_json(msg="Unable to locate Physical Host.")
host_system = host.keys()[0]
changed = configure_vmkernel_ip_address(host_system, vmk_name, ip_address, subnet_mask)
module.exit_json(changed=changed)
except vmodl.RuntimeFault as runtime_fault:
module.fail_json(msg=runtime_fault.msg)
except vmodl.MethodFault as method_fault:
module.fail_json(msg=method_fault.msg)
except Exception as e:
module.fail_json(msg=str(e))
if __name__ == '__main__':
main()<|fim▁end|> | argument_spec.update(dict(vmk_name=dict(required=True, type='str'),
ip_address=dict(required=True, type='str'),
subnet_mask=dict(required=True, type='str'))) |
<|file_name|>profile.py<|end_file_name|><|fim▁begin|>import copy
from collections import OrderedDict
from collections import defaultdict
from conans.model.env_info import EnvValues
from conans.model.options import OptionsValues
from conans.model.values import Values
class Profile(object):
"""A profile contains a set of setting (with values), environment variables
"""
def __init__(self):
# Sections
self.settings = OrderedDict()
self.package_settings = defaultdict(OrderedDict)
self.env_values = EnvValues()
self.options = OptionsValues()
self.build_requires = OrderedDict() # conan_ref Pattern: list of conan_ref
@property
def settings_values(self):
return Values.from_list(list(self.settings.items()))
@property
def package_settings_values(self):
result = {}
for pkg, settings in self.package_settings.items():
result[pkg] = list(settings.items())
return result
def dumps(self):
result = ["[settings]"]
for name, value in self.settings.items():
result.append("%s=%s" % (name, value))
for package, values in self.package_settings.items():
for name, value in values.items():
result.append("%s:%s=%s" % (package, name, value))
result.append("[options]")
result.append(self.options.dumps())
result.append("[build_requires]")
for pattern, req_list in self.build_requires.items():
result.append("%s: %s" % (pattern, ", ".join(str(r) for r in req_list)))
result.append("[env]")
result.append(self.env_values.dumps())
return "\n".join(result).replace("\n\n", "\n")
def update(self, other):
self.update_settings(other.settings)
self.update_package_settings(other.package_settings)
# this is the opposite
other.env_values.update(self.env_values)
self.env_values = other.env_values
self.options.update(other.options)
for pattern, req_list in other.build_requires.items():
self.build_requires.setdefault(pattern, []).extend(req_list)
def update_settings(self, new_settings):
"""Mix the specified settings with the current profile.
Specified settings are prioritized to profile"""<|fim▁hole|> assert(isinstance(new_settings, OrderedDict))
# apply the current profile
res = copy.copy(self.settings)
if new_settings:
# Invalidate the current subsettings if the parent setting changes
# Example: new_settings declare a different "compiler", so invalidate the current "compiler.XXX"
for name, value in new_settings.items():
if "." not in name:
if name in self.settings and self.settings[name] != value:
for cur_name, _ in self.settings.items():
if cur_name.startswith("%s." % name):
del res[cur_name]
# Now merge the new values
res.update(new_settings)
self.settings = res
def update_package_settings(self, package_settings):
"""Mix the specified package settings with the specified profile.
Specified package settings are prioritized to profile"""
for package_name, settings in package_settings.items():
self.package_settings[package_name].update(settings)<|fim▁end|> | |
<|file_name|>webpack.dev.config.js<|end_file_name|><|fim▁begin|>var webpack = require('webpack');
var path = require('path');
var APP_DIR = path.resolve(__dirname, 'react');
var BUILD_DIR = path.resolve(__dirname, 'project/static/js');
var config = {
entry: {
public: APP_DIR + '/public.main.jsx',
private: APP_DIR + '/private.main.jsx',
},
output: {
path: BUILD_DIR,
filename: "[name].bundle.js",
},
plugins: [
new webpack.optimize.CommonsChunkPlugin('init.js'),
new webpack.EnvironmentPlugin(['NODE_ENV']),
],
resolve: {
extensions: ['', '.js', '.jsx'],
moduleDirectories: ['node_modules', 'react'],
},
module: {
loaders: [
{
test: /\.jsx?$/,
loader: 'babel-loader',
exclude: /node_modules/,
query: {
cacheDirectory: false,
plugins: [
require.resolve('babel-plugin-transform-decorators-legacy'),
],<|fim▁hole|> ],
},
},
],
},
};
module.exports = config;<|fim▁end|> | presets: [
require.resolve('babel-preset-react'),
require.resolve('babel-preset-es2015'),
require.resolve('babel-preset-stage-0'), |
<|file_name|>gitstats.py<|end_file_name|><|fim▁begin|>import urllib2
import base64
import json<|fim▁hole|>from GitFetcher import GitHubFetcher;
username = "debuggerman"
password = "megadeth"
orgUrl = "https://api.github.com/orgs"
orgName = "coeus-solutions"
gitFetcher = GitHubFetcher(username = username, password = password, orgUrl = orgUrl, orgName = orgName)
gitFetcher.getOrgInfo()<|fim▁end|> | from link import *; |
<|file_name|>jquery.flot.js<|end_file_name|><|fim▁begin|>/* Javascript plotting library for jQuery, v. 0.5.
*
* Released under the MIT license by IOLA, December 2007.
*
*/
(function($) {
function Plot(target, data_, options_, plugins) {
// data is on the form:
// [ series1, series2 ... ]
// where series is either just the data as [ [x1, y1], [x2, y2], ... ]
// or { data: [ [x1, y1], [x2, y2], ... ], label: "some label", ... }
var series = [],
options = {
// the color theme used for graphs
colors: ["#edc240", "#afd8f8", "#cb4b4b", "#4da74d", "#9440ed"],
legend: {
show: true,
noColumns: 1, // number of colums in legend table
labelFormatter: null, // fn: string -> string
labelBoxBorderColor: "#ccc", // border color for the little label boxes
container: null, // container (as jQuery object) to put legend in, null means default on top of graph
position: "ne", // position of default legend container within plot
margin: 5, // distance from grid edge to default legend container within plot
backgroundColor: null, // null means auto-detect
backgroundOpacity: 0.85 // set to 0 to avoid background
},
xaxis: {
mode: null, // null or "time"
min: null, // min. value to show, null means set automatically
max: null, // max. value to show, null means set automatically
autoscaleMargin: null, // margin in % to add if auto-setting min/max
ticks: null, // either [1, 3] or [[1, "a"], 3] or (fn: axis info -> ticks) or app. number of ticks for auto-ticks
tickFormatter: null, // fn: number -> string
labelWidth: null, // size of tick labels in pixels
labelHeight: null,
// mode specific options
tickDecimals: null, // no. of decimals, null means auto
tickSize: null, // number or [number, "unit"]
minTickSize: null, // number or [number, "unit"]
monthNames: null, // list of names of months
timeformat: null // format string to use
},
yaxis: {
autoscaleMargin: 0.02
},
x2axis: {
autoscaleMargin: null
},
y2axis: {
autoscaleMargin: 0.02
},
series: {
points: {
show: false,
radius: 3,
lineWidth: 2, // in pixels
fill: true,
fillColor: "#ffffff"
},
lines: {
// we don't put in show: false so we can see
// whether lines were actively disabled
lineWidth: 2, // in pixels
fill: false,
fillColor: null,
steps: false
},
bars: {
show: false,
lineWidth: 2, // in pixels
barWidth: 1, // in units of the x axis
fill: true,
fillColor: null,
align: "left", // or "center"
horizontal: false // when horizontal, left is now top
},
shadowSize: 3
},
grid: {
color: "#545454", // primary color used for outline and labels
backgroundColor: null, // null for transparent, else color
tickColor: "#dddddd", // color used for the ticks
labelMargin: 5, // in pixels
borderWidth: 2, // in pixels
borderColor: null, // set if different from the grid color
markings: null, // array of ranges or fn: axes -> array of ranges
markingsColor: "#f4f4f4",
markingsLineWidth: 2,
// interactive stuff
clickable: false,
hoverable: false,
autoHighlight: true, // highlight in case mouse is near
mouseActiveRadius: 10 // how far the mouse can be away to activate an item
},
selection: {
mode: null, // one of null, "x", "y" or "xy"
color: "#e8cfac"
}
},
canvas = null, // the canvas for the plot itself
overlay = null, // canvas for interactive stuff on top of plot
eventHolder = null, // jQuery object that events should be bound to
ctx = null, octx = null,
axes = { xaxis: {}, yaxis: {}, x2axis: {}, y2axis: {} },
plotOffset = { left: 0, right: 0, top: 0, bottom: 0},
canvasWidth = 0, canvasHeight = 0,
plotWidth = 0, plotHeight = 0,
hooks = {
processOptions: [],
processRawData: [],
processDatapoints: [],
bindEvents: [],
drawOverlay: []
},
plot = this,
// dedicated to storing data for buggy standard compliance cases
workarounds = {};
// public functions
plot.setData = setData;
plot.setupGrid = setupGrid;
plot.draw = draw;
plot.clearSelection = clearSelection;
plot.setSelection = setSelection;
plot.getSelection = getSelection;
plot.getCanvas = function() { return canvas; };
plot.getPlotOffset = function() { return plotOffset; };
plot.width = function () { return plotWidth; }
plot.height = function () { return plotHeight; }
plot.offset = function () {
var o = eventHolder.offset();
o.left += plotOffset.left;
o.top += plotOffset.top;
return o;
};
plot.getData = function() { return series; };
plot.getAxes = function() { return axes; };
plot.getOptions = function() { return options; };
plot.highlight = highlight;
plot.unhighlight = unhighlight;
plot.triggerRedrawOverlay = triggerRedrawOverlay;
// public attributes
plot.hooks = hooks;
// initialize
initPlugins(plot);
parseOptions(options_);
constructCanvas();
setData(data_);
setupGrid();
draw();
bindEvents();
function executeHooks(hook, args) {
args = [plot].concat(args);
for (var i = 0; i < hook.length; ++i)
hook[i].apply(this, args);
}
function initPlugins() {
for (var i = 0; i < plugins.length; ++i) {
var p = plugins[i];
p.init(plot);
if (p.options)
$.extend(true, options, p.options);
}
}
function parseOptions(opts) {
$.extend(true, options, opts);
if (options.grid.borderColor == null)
options.grid.borderColor = options.grid.color
// backwards compatibility, to be removed in future
if (options.xaxis.noTicks && options.xaxis.ticks == null)
options.xaxis.ticks = options.xaxis.noTicks;
if (options.yaxis.noTicks && options.yaxis.ticks == null)
options.yaxis.ticks = options.yaxis.noTicks;
if (options.grid.coloredAreas)
options.grid.markings = options.grid.coloredAreas;
if (options.grid.coloredAreasColor)
options.grid.markingsColor = options.grid.coloredAreasColor;
if (options.lines)
$.extend(true, options.series.lines, options.lines);
if (options.points)
$.extend(true, options.series.points, options.points);
if (options.bars)
$.extend(true, options.series.bars, options.bars);
if (options.shadowSize)
options.series.shadowSize = options.shadowSize;
executeHooks(hooks.processOptions, [options]);
}
function setData(d) {
series = parseData(d);
fillInSeriesOptions();
processData();
}
function parseData(d) {
var res = [];
for (var i = 0; i < d.length; ++i) {
var s = $.extend(true, {}, options.series);
if (d[i].data) {
s.data = d[i].data; // move the data instead of deep-copy
delete d[i].data;
$.extend(true, s, d[i]);
d[i].data = s.data;
}
else
s.data = d[i];
res.push(s);
}
return res;
}
function fillInSeriesOptions() {
var i;
// collect what we already got of colors
var neededColors = series.length,
usedColors = [],
assignedColors = [];
for (i = 0; i < series.length; ++i) {
var sc = series[i].color;
if (sc != null) {
--neededColors;
if (typeof sc == "number")
assignedColors.push(sc);
else
usedColors.push(parseColor(series[i].color));
}
}
// we might need to generate more colors if higher indices
// are assigned
for (i = 0; i < assignedColors.length; ++i) {
neededColors = Math.max(neededColors, assignedColors[i] + 1);
}
// produce colors as needed
var colors = [], variation = 0;
i = 0;
while (colors.length < neededColors) {
var c;
if (options.colors.length == i) // check degenerate case
c = new Color(100, 100, 100);
else
c = parseColor(options.colors[i]);
// vary color if needed
var sign = variation % 2 == 1 ? -1 : 1;
var factor = 1 + sign * Math.ceil(variation / 2) * 0.2;
c.scale(factor, factor, factor);
// FIXME: if we're getting to close to something else,
// we should probably skip this one
colors.push(c);
++i;
if (i >= options.colors.length) {
i = 0;
++variation;
}
}
// fill in the options
var colori = 0, s;
for (i = 0; i < series.length; ++i) {
s = series[i];
// assign colors
if (s.color == null) {
s.color = colors[colori].toString();
++colori;
}
else if (typeof s.color == "number")
s.color = colors[s.color].toString();
// turn on lines automatically in case nothing is set
if (s.lines.show == null && !s.bars.show && !s.points.show)
s.lines.show = true;
// setup axes
if (!s.xaxis)
s.xaxis = axes.xaxis;
if (s.xaxis == 1)
s.xaxis = axes.xaxis;
else if (s.xaxis == 2)
s.xaxis = axes.x2axis;
if (!s.yaxis)
s.yaxis = axes.yaxis;
if (s.yaxis == 1)
s.yaxis = axes.yaxis;
else if (s.yaxis == 2)
s.yaxis = axes.y2axis;
}
}
function processData() {
var topSentry = Number.POSITIVE_INFINITY,
bottomSentry = Number.NEGATIVE_INFINITY,
i, j, k, m, length,
s, points, ps, x, y;
for (axis in axes) {
axes[axis].datamin = topSentry;
axes[axis].datamax = bottomSentry;
axes[axis].min = options[axis].min;
axes[axis].max = options[axis].max;
axes[axis].used = false;
}
function updateAxis(axis, min, max) {
if (min < axis.datamin)
axis.datamin = min;
if (max > axis.datamax)
axis.datamax = max;
}
for (i = 0; i < series.length; ++i) {
s = series[i];
s.datapoints = { points: [] };
executeHooks(hooks.processRawData, [ s, s.data, s.datapoints ]);
}
// first pass: clean and copy data
for (i = 0; i < series.length; ++i) {
s = series[i];
if (s.datapoints.pointsize != null)
continue; // already filled in
var data = s.data, format = [], p;
// determine the point size
if (s.bars.show) {
s.datapoints.pointsize = 3;
format.push({ d: 0 });
}
else
s.datapoints.pointsize = 2;
/*
// examine data to find out how to copy
for (j = 0; j < data.length; ++j) {
}*/
ps = s.datapoints.pointsize;
points = s.datapoints.points;
insertSteps = s.lines.show && s.lines.steps;
s.xaxis.used = s.yaxis.used = true;
for (j = k = 0; j < data.length; ++j, k += ps) {
p = data[j];
if (p != null) {
x = p[0];
y = p[1];
}
else
y = x = null;
if (x != null) {
x = +x; // convert to number
if (isNaN(x))
x = null;
}
if (y != null) {
y = +y; // convert to number
if (isNaN(y))
y = null;
}
// check validity of point, making sure both are cleared
if (x == null && y != null) {
// extract min/max info before we whack
updateAxis(s.yaxis, y, y);
y = null;
}
if (y == null && x != null) {
updateAxis(s.xaxis, x, x);
x = null;
}
if (insertSteps && x != null && k > 0
&& points[k - ps] != null
&& points[k - ps] != x && points[k - ps + 1] != y) {
points[k + 1] = points[k - ps + 1];
points[k] = x;
// copy the remainding from real point
for (m = 2; m < ps; ++m)
points[k + m] = p[m] == null ? format[m-2].d : p[m];
k += ps;
}
for (m = 2; m < ps; ++m)
points[k + m] = p == null || p[m] == null ? format[m-2].d : p[m];
points[k] = x;
points[k + 1] = y;
}
}
for (i = 0; i < series.length; ++i) {
s = series[i];
executeHooks(hooks.processDatapoints, [ s, s.datapoints]);
}
// second pass: find datamax/datamin for auto-scaling
for (i = 0; i < series.length; ++i) {
s = series[i];
points = s.datapoints.points,
ps = s.datapoints.pointsize;
var xmin = topSentry, ymin = topSentry,
xmax = bottomSentry, ymax = bottomSentry;
for (j = 0; j < points.length; j += ps) {
x = points[j];
if (x == null)
continue;
if (x < xmin)
xmin = x;
if (x > xmax)
xmax = x;
y = points[j + 1];
if (y < ymin)
ymin = y;
if (y > ymax)
ymax = y;
}
if (s.bars.show) {
// make sure we got room for the bar on the dancing floor
var delta = s.bars.align == "left" ? 0 : -s.bars.barWidth/2;
if (s.bars.horizontal) {
ymin += delta;
ymax += delta + s.bars.barWidth;
}
else {
xmin += delta;
xmax += delta + s.bars.barWidth;
}
}
updateAxis(s.xaxis, xmin, xmax);
updateAxis(s.yaxis, ymin, ymax);
}
}
function constructCanvas() {
function makeCanvas(width, height) {
var c = document.createElement('canvas');
c.width = width;
c.height = height;
if ($.browser.msie) // excanvas hack
c = window.G_vmlCanvasManager.initElement(c);
return c;
}
canvasWidth = target.width();
canvasHeight = target.height();
target.html(""); // clear target
if (target.css("position") == 'static')
target.css("position", "relative"); // for positioning labels and overlay
if (canvasWidth <= 0 || canvasHeight <= 0)
throw "Invalid dimensions for plot, width = " + canvasWidth + ", height = " + canvasHeight;
if ($.browser.msie) // excanvas hack
window.G_vmlCanvasManager.init_(document); // make sure everything is setup
// the canvas
canvas = $(makeCanvas(canvasWidth, canvasHeight)).appendTo(target).get(0);
ctx = canvas.getContext("2d");
// overlay canvas for interactive features
overlay = $(makeCanvas(canvasWidth, canvasHeight)).css({ position: 'absolute', left: 0, top: 0 }).appendTo(target).get(0);
octx = overlay.getContext("2d");
octx.stroke();
}
function bindEvents() {
// we include the canvas in the event holder too, because IE 7
// sometimes has trouble with the stacking order
eventHolder = $([overlay, canvas]);
// bind events
if (options.selection.mode != null
|| options.grid.hoverable)
eventHolder.mousemove(onMouseMove);
if (options.selection.mode != null)
eventHolder.mousedown(onMouseDown);
if (options.grid.clickable)
eventHolder.click(onClick);
executeHooks(hooks.bindEvents, [eventHolder]);
}
function setupGrid() {
function setupAxis(axis, options) {
setRange(axis, options);
prepareTickGeneration(axis, options);
setTicks(axis, options);
// add transformation helpers
if (axis == axes.xaxis || axis == axes.x2axis) {
// data point to canvas coordinate
axis.p2c = function (p) { return (p - axis.min) * axis.scale; };
// canvas coordinate to data point
axis.c2p = function (c) { return axis.min + c / axis.scale; };
}
else {
axis.p2c = function (p) { return (axis.max - p) * axis.scale; };
axis.c2p = function (p) { return axis.max - p / axis.scale; };
}
}
for (var axis in axes)
setupAxis(axes[axis], options[axis]);
setSpacing();
insertLabels();
insertLegend();
}
function setRange(axis, axisOptions) {
var min = axisOptions.min != null ? +axisOptions.min : axis.datamin,
max = axisOptions.max != null ? +axisOptions.max : axis.datamax;
// degenerate case
if (min == Number.POSITIVE_INFINITY)
min = 0;
if (max == Number.NEGATIVE_INFINITY)
max = 1;
if (max - min == 0.0) {
// degenerate case
var widen = max == 0 ? 1 : 0.01;
if (axisOptions.min == null)
min -= widen;
// alway widen max if we couldn't widen min to ensure we
// don't fall into min == max which doesn't work
if (axisOptions.max == null || axisOptions.min != null)
max += widen;
}
else {
// consider autoscaling
var margin = axisOptions.autoscaleMargin;
if (margin != null) {
if (axisOptions.min == null) {
min -= (max - min) * margin;
// make sure we don't go below zero if all values
// are positive
if (min < 0 && axis.datamin >= 0)
min = 0;
}
if (axisOptions.max == null) {
max += (max - min) * margin;
if (max > 0 && axis.datamax <= 0)
max = 0;
}
}
}
axis.min = min;
axis.max = max;
}
function prepareTickGeneration(axis, axisOptions) {
// estimate number of ticks
var noTicks;
if (typeof axisOptions.ticks == "number" && axisOptions.ticks > 0)
noTicks = axisOptions.ticks;
else if (axis == axes.xaxis || axis == axes.x2axis)
noTicks = canvasWidth / 100;
else
noTicks = canvasHeight / 60;
var delta = (axis.max - axis.min) / noTicks;
var size, generator, unit, formatter, i, magn, norm;
if (axisOptions.mode == "time") {
// pretty handling of time
// map of app. size of time units in milliseconds
var timeUnitSize = {
"second": 1000,
"minute": 60 * 1000,
"hour": 60 * 60 * 1000,
"day": 24 * 60 * 60 * 1000,
"month": 30 * 24 * 60 * 60 * 1000,
"year": 365.2425 * 24 * 60 * 60 * 1000
};
// the allowed tick sizes, after 1 year we use
// an integer algorithm
var spec = [
[1, "second"], [2, "second"], [5, "second"], [10, "second"],
[30, "second"],
[1, "minute"], [2, "minute"], [5, "minute"], [10, "minute"],
[30, "minute"],
[1, "hour"], [2, "hour"], [4, "hour"],
[8, "hour"], [12, "hour"],
[1, "day"], [2, "day"], [3, "day"],
[0.25, "month"], [0.5, "month"], [1, "month"],
[2, "month"], [3, "month"], [6, "month"],
[1, "year"]
];
var minSize = 0;
if (axisOptions.minTickSize != null) {
if (typeof axisOptions.tickSize == "number")
minSize = axisOptions.tickSize;
else
minSize = axisOptions.minTickSize[0] * timeUnitSize[axisOptions.minTickSize[1]];
}
for (i = 0; i < spec.length - 1; ++i)
if (delta < (spec[i][0] * timeUnitSize[spec[i][1]]
+ spec[i + 1][0] * timeUnitSize[spec[i + 1][1]]) / 2
&& spec[i][0] * timeUnitSize[spec[i][1]] >= minSize)
break;
size = spec[i][0];
unit = spec[i][1];
// special-case the possibility of several years
if (unit == "year") {
magn = Math.pow(10, Math.floor(Math.log(delta / timeUnitSize.year) / Math.LN10));
norm = (delta / timeUnitSize.year) / magn;
if (norm < 1.5)
size = 1;
else if (norm < 3)
size = 2;
else if (norm < 7.5)
size = 5;
else
size = 10;
size *= magn;
}
if (axisOptions.tickSize) {
size = axisOptions.tickSize[0];
unit = axisOptions.tickSize[1];
}
generator = function(axis) {
var ticks = [],
tickSize = axis.tickSize[0], unit = axis.tickSize[1],
d = new Date(axis.min);
var step = tickSize * timeUnitSize[unit];
if (unit == "second")
d.setUTCSeconds(floorInBase(d.getUTCSeconds(), tickSize));
if (unit == "minute")
d.setUTCMinutes(floorInBase(d.getUTCMinutes(), tickSize));
if (unit == "hour")
d.setUTCHours(floorInBase(d.getUTCHours(), tickSize));
if (unit == "month")
d.setUTCMonth(floorInBase(d.getUTCMonth(), tickSize));
if (unit == "year")
d.setUTCFullYear(floorInBase(d.getUTCFullYear(), tickSize));
// reset smaller components
d.setUTCMilliseconds(0);
if (step >= timeUnitSize.minute)
d.setUTCSeconds(0);
if (step >= timeUnitSize.hour)
d.setUTCMinutes(0);
if (step >= timeUnitSize.day)
d.setUTCHours(0);
if (step >= timeUnitSize.day * 4)
d.setUTCDate(1);
if (step >= timeUnitSize.year)
d.setUTCMonth(0);
var carry = 0, v = Number.NaN, prev;
do {
prev = v;
v = d.getTime();
ticks.push({ v: v, label: axis.tickFormatter(v, axis) });
if (unit == "month") {
if (tickSize < 1) {
// a bit complicated - we'll divide the month
// up but we need to take care of fractions
// so we don't end up in the middle of a day
d.setUTCDate(1);
var start = d.getTime();
d.setUTCMonth(d.getUTCMonth() + 1);
var end = d.getTime();
d.setTime(v + carry * timeUnitSize.hour + (end - start) * tickSize);
carry = d.getUTCHours();
d.setUTCHours(0);
}
else
d.setUTCMonth(d.getUTCMonth() + tickSize);
}
else if (unit == "year") {
d.setUTCFullYear(d.getUTCFullYear() + tickSize);
}
else
d.setTime(v + step);
} while (v < axis.max && v != prev);
return ticks;
};
formatter = function (v, axis) {
var d = new Date(v);
// first check global format
if (axisOptions.timeformat != null)
return $.plot.formatDate(d, axisOptions.timeformat, axisOptions.monthNames);
var t = axis.tickSize[0] * timeUnitSize[axis.tickSize[1]];
var span = axis.max - axis.min;
if (t < timeUnitSize.minute)
fmt = "%h:%M:%S";
else if (t < timeUnitSize.day) {
if (span < 2 * timeUnitSize.day)
fmt = "%h:%M";
else
fmt = "%b %d %h:%M";
}
else if (t < timeUnitSize.month)
fmt = "%b %d";
else if (t < timeUnitSize.year) {
if (span < timeUnitSize.year)
fmt = "%b";
else
fmt = "%b %y";
}
else
fmt = "%y";
return $.plot.formatDate(d, fmt, axisOptions.monthNames);
};
}
else {
// pretty rounding of base-10 numbers
var maxDec = axisOptions.tickDecimals;
var dec = -Math.floor(Math.log(delta) / Math.LN10);
if (maxDec != null && dec > maxDec)
dec = maxDec;
magn = Math.pow(10, -dec);
norm = delta / magn; // norm is between 1.0 and 10.0
if (norm < 1.5)
size = 1;
else if (norm < 3) {
size = 2;
// special case for 2.5, requires an extra decimal
if (norm > 2.25 && (maxDec == null || dec + 1 <= maxDec)) {
size = 2.5;
++dec;
}
}
else if (norm < 7.5)
size = 5;
else
size = 10;
size *= magn;
if (axisOptions.minTickSize != null && size < axisOptions.minTickSize)
size = axisOptions.minTickSize;
if (axisOptions.tickSize != null)
size = axisOptions.tickSize;
axis.tickDecimals = Math.max(0, (maxDec != null) ? maxDec : dec);
generator = function (axis) {
var ticks = [];
// spew out all possible ticks
var start = floorInBase(axis.min, axis.tickSize),
i = 0, v = Number.NaN, prev;
do {
prev = v;
v = start + i * axis.tickSize;
ticks.push({ v: v, label: axis.tickFormatter(v, axis) });
++i;
} while (v < axis.max && v != prev);
return ticks;
};
formatter = function (v, axis) {
return v.toFixed(axis.tickDecimals);
};
}
axis.tickSize = unit ? [size, unit] : size;
axis.tickGenerator = generator;
if ($.isFunction(axisOptions.tickFormatter))
axis.tickFormatter = function (v, axis) { return "" + axisOptions.tickFormatter(v, axis); };
else
axis.tickFormatter = formatter;
if (axisOptions.labelWidth != null)
axis.labelWidth = axisOptions.labelWidth;
if (axisOptions.labelHeight != null)
axis.labelHeight = axisOptions.labelHeight;
}
function setTicks(axis, axisOptions) {
axis.ticks = [];
if (!axis.used)
return;
if (axisOptions.ticks == null)
axis.ticks = axis.tickGenerator(axis);
else if (typeof axisOptions.ticks == "number") {
if (axisOptions.ticks > 0)
axis.ticks = axis.tickGenerator(axis);
}
else if (axisOptions.ticks) {
var ticks = axisOptions.ticks;
if ($.isFunction(ticks))
// generate the ticks
ticks = ticks({ min: axis.min, max: axis.max });
// clean up the user-supplied ticks, copy them over
var i, v;
for (i = 0; i < ticks.length; ++i) {
var label = null;
var t = ticks[i];
if (typeof t == "object") {
v = t[0];
if (t.length > 1)
label = t[1];
}
else
v = t;
if (label == null)
label = axis.tickFormatter(v, axis);
axis.ticks[i] = { v: v, label: label };
}
}
if (axisOptions.autoscaleMargin != null && axis.ticks.length > 0) {
// snap to ticks
if (axisOptions.min == null)
axis.min = Math.min(axis.min, axis.ticks[0].v);
if (axisOptions.max == null && axis.ticks.length > 1)
axis.max = Math.min(axis.max, axis.ticks[axis.ticks.length - 1].v);
}
}
function setSpacing() {
function measureXLabels(axis) {
// to avoid measuring the widths of the labels, we
// construct fixed-size boxes and put the labels inside
// them, we don't need the exact figures and the
// fixed-size box content is easy to center
if (axis.labelWidth == null)
axis.labelWidth = canvasWidth / 6;
// measure x label heights
if (axis.labelHeight == null) {
labels = [];
for (i = 0; i < axis.ticks.length; ++i) {
l = axis.ticks[i].label;
if (l)
labels.push('<div class="tickLabel" style="float:left;width:' + axis.labelWidth + 'px">' + l + '</div>');
}
axis.labelHeight = 0;
if (labels.length > 0) {
var dummyDiv = $('<div style="position:absolute;top:-10000px;width:10000px;font-size:smaller">'
+ labels.join("") + '<div style="clear:left"></div></div>').appendTo(target);
axis.labelHeight = dummyDiv.height();
dummyDiv.remove();
}
}
}
function measureYLabels(axis) {
if (axis.labelWidth == null || axis.labelHeight == null) {
var i, labels = [], l;
// calculate y label dimensions
for (i = 0; i < axis.ticks.length; ++i) {
l = axis.ticks[i].label;
if (l)
labels.push('<div class="tickLabel">' + l + '</div>');
}
if (labels.length > 0) {
var dummyDiv = $('<div style="position:absolute;top:-10000px;font-size:smaller">'
+ labels.join("") + '</div>').appendTo(target);
if (axis.labelWidth == null)
axis.labelWidth = dummyDiv.width();
if (axis.labelHeight == null)
axis.labelHeight = dummyDiv.find("div").height();
dummyDiv.remove();
}
if (axis.labelWidth == null)
axis.labelWidth = 0;
if (axis.labelHeight == null)
axis.labelHeight = 0;
}
}
measureXLabels(axes.xaxis);
measureYLabels(axes.yaxis);
measureXLabels(axes.x2axis);
measureYLabels(axes.y2axis);
// get the most space needed around the grid for things
// that may stick out
var maxOutset = options.grid.borderWidth;
for (i = 0; i < series.length; ++i)
maxOutset = Math.max(maxOutset, 2 * (series[i].points.radius + series[i].points.lineWidth/2));
plotOffset.left = plotOffset.right = plotOffset.top = plotOffset.bottom = maxOutset;
var margin = options.grid.labelMargin + options.grid.borderWidth;
if (axes.xaxis.labelHeight > 0)
plotOffset.bottom = Math.max(maxOutset, axes.xaxis.labelHeight + margin);
if (axes.yaxis.labelWidth > 0)
plotOffset.left = Math.max(maxOutset, axes.yaxis.labelWidth + margin);
if (axes.x2axis.labelHeight > 0)
plotOffset.top = Math.max(maxOutset, axes.x2axis.labelHeight + margin);
if (axes.y2axis.labelWidth > 0)
plotOffset.right = Math.max(maxOutset, axes.y2axis.labelWidth + margin);
plotWidth = canvasWidth - plotOffset.left - plotOffset.right;
plotHeight = canvasHeight - plotOffset.bottom - plotOffset.top;
// precompute how much the axis is scaling a point in canvas space
axes.xaxis.scale = plotWidth / (axes.xaxis.max - axes.xaxis.min);
axes.yaxis.scale = plotHeight / (axes.yaxis.max - axes.yaxis.min);
axes.x2axis.scale = plotWidth / (axes.x2axis.max - axes.x2axis.min);
axes.y2axis.scale = plotHeight / (axes.y2axis.max - axes.y2axis.min);
}
function draw() {
drawGrid();
for (var i = 0; i < series.length; ++i)
drawSeries(series[i]);
}
function extractRange(ranges, coord) {
var firstAxis = coord + "axis",
secondaryAxis = coord + "2axis",
axis, from, to, reverse;
if (ranges[firstAxis]) {
axis = axes[firstAxis];
from = ranges[firstAxis].from;
to = ranges[firstAxis].to;
}
else if (ranges[secondaryAxis]) {
axis = axes[secondaryAxis];
from = ranges[secondaryAxis].from;
to = ranges[secondaryAxis].to;
}
else {
// backwards-compat stuff - to be removed in future
axis = axes[firstAxis];
from = ranges[coord + "1"];
to = ranges[coord + "2"];
}
// auto-reverse as an added bonus
if (from != null && to != null && from > to)
return { from: to, to: from, axis: axis };
return { from: from, to: to, axis: axis };
}
function drawGrid() {
var i;
ctx.save();
ctx.clearRect(0, 0, canvasWidth, canvasHeight);
ctx.translate(plotOffset.left, plotOffset.top);
// draw background, if any
if (options.grid.backgroundColor) {
ctx.fillStyle = getColorOrGradient(options.grid.backgroundColor, plotHeight, 0, "rgba(255, 255, 255, 0)");
ctx.fillRect(0, 0, plotWidth, plotHeight);
}
// draw markings
var markings = options.grid.markings;
if (markings) {
if ($.isFunction(markings))
// xmin etc. are backwards-compatible, to be removed in future
markings = markings({ xmin: axes.xaxis.min, xmax: axes.xaxis.max, ymin: axes.yaxis.min, ymax: axes.yaxis.max, xaxis: axes.xaxis, yaxis: axes.yaxis, x2axis: axes.x2axis, y2axis: axes.y2axis });
for (i = 0; i < markings.length; ++i) {
var m = markings[i],
xrange = extractRange(m, "x"),
yrange = extractRange(m, "y");
// fill in missing
if (xrange.from == null)
xrange.from = xrange.axis.min;
if (xrange.to == null)
xrange.to = xrange.axis.max;
if (yrange.from == null)
yrange.from = yrange.axis.min;
if (yrange.to == null)
yrange.to = yrange.axis.max;
// clip
if (xrange.to < xrange.axis.min || xrange.from > xrange.axis.max ||
yrange.to < yrange.axis.min || yrange.from > yrange.axis.max)
continue;
xrange.from = Math.max(xrange.from, xrange.axis.min);
xrange.to = Math.min(xrange.to, xrange.axis.max);
yrange.from = Math.max(yrange.from, yrange.axis.min);
yrange.to = Math.min(yrange.to, yrange.axis.max);
if (xrange.from == xrange.to && yrange.from == yrange.to)
continue;
// then draw
xrange.from = xrange.axis.p2c(xrange.from);
xrange.to = xrange.axis.p2c(xrange.to);
yrange.from = yrange.axis.p2c(yrange.from);
yrange.to = yrange.axis.p2c(yrange.to);
if (xrange.from == xrange.to || yrange.from == yrange.to) {
// draw line
ctx.strokeStyle = m.color || options.grid.markingsColor;
ctx.beginPath();
ctx.lineWidth = m.lineWidth || options.grid.markingsLineWidth;
//ctx.moveTo(Math.floor(xrange.from), yrange.from);
//ctx.lineTo(Math.floor(xrange.to), yrange.to);
ctx.moveTo(xrange.from, yrange.from);
ctx.lineTo(xrange.to, yrange.to);
ctx.stroke();
}
else {
// fill area
ctx.fillStyle = m.color || options.grid.markingsColor;
ctx.fillRect(xrange.from, yrange.to,
xrange.to - xrange.from,
yrange.from - yrange.to);
}
}
}
// draw the inner grid
ctx.lineWidth = 1;
ctx.strokeStyle = options.grid.tickColor;
ctx.beginPath();
var v, axis = axes.xaxis;
for (i = 0; i < axis.ticks.length; ++i) {
v = axis.ticks[i].v;
if (v <= axis.min || v >= axes.xaxis.max)
continue; // skip those lying on the axes
ctx.moveTo(Math.floor(axis.p2c(v)) + ctx.lineWidth/2, 0);
ctx.lineTo(Math.floor(axis.p2c(v)) + ctx.lineWidth/2, plotHeight);
}
axis = axes.yaxis;
for (i = 0; i < axis.ticks.length; ++i) {
v = axis.ticks[i].v;
if (v <= axis.min || v >= axis.max)
continue;
ctx.moveTo(0, Math.floor(axis.p2c(v)) + ctx.lineWidth/2);
ctx.lineTo(plotWidth, Math.floor(axis.p2c(v)) + ctx.lineWidth/2);
}
axis = axes.x2axis;
for (i = 0; i < axis.ticks.length; ++i) {
v = axis.ticks[i].v;
if (v <= axis.min || v >= axis.max)
continue;
ctx.moveTo(Math.floor(axis.p2c(v)) + ctx.lineWidth/2, -5);
ctx.lineTo(Math.floor(axis.p2c(v)) + ctx.lineWidth/2, 5);
}
axis = axes.y2axis;
for (i = 0; i < axis.ticks.length; ++i) {
v = axis.ticks[i].v;
if (v <= axis.min || v >= axis.max)
continue;
ctx.moveTo(plotWidth-5, Math.floor(axis.p2c(v)) + ctx.lineWidth/2);
ctx.lineTo(plotWidth+5, Math.floor(axis.p2c(v)) + ctx.lineWidth/2);
}
ctx.stroke();
if (options.grid.borderWidth) {
// draw border
var bw = options.grid.borderWidth;
ctx.lineWidth = bw;
ctx.strokeStyle = options.grid.borderColor;
ctx.strokeRect(-bw/2, -bw/2, plotWidth + bw, plotHeight + bw);
}
ctx.restore();
}
function insertLabels() {
target.find(".tickLabels").remove();
var html = ['<div class="tickLabels" style="font-size:smaller;color:' + options.grid.color + '">'];
function addLabels(axis, labelGenerator) {
for (var i = 0; i < axis.ticks.length; ++i) {
var tick = axis.ticks[i];
if (!tick.label || tick.v < axis.min || tick.v > axis.max)
continue;
html.push(labelGenerator(tick, axis));
}
}
var margin = options.grid.labelMargin + options.grid.borderWidth;
addLabels(axes.xaxis, function (tick, axis) {
return '<div style="position:absolute;top:' + (plotOffset.top + plotHeight + margin) + 'px;left:' + Math.round(plotOffset.left + axis.p2c(tick.v) - axis.labelWidth/2) + 'px;width:' + axis.labelWidth + 'px;text-align:center" class="tickLabel">' + tick.label + "</div>";
});
addLabels(axes.yaxis, function (tick, axis) {
return '<div style="position:absolute;top:' + Math.round(plotOffset.top + axis.p2c(tick.v) - axis.labelHeight/2) + 'px;right:' + (plotOffset.right + plotWidth + margin) + 'px;width:' + axis.labelWidth + 'px;text-align:right" class="tickLabel">' + tick.label + "</div>";
});
addLabels(axes.x2axis, function (tick, axis) {
return '<div style="position:absolute;bottom:' + (plotOffset.bottom + plotHeight + margin) + 'px;left:' + Math.round(plotOffset.left + axis.p2c(tick.v) - axis.labelWidth/2) + 'px;width:' + axis.labelWidth + 'px;text-align:center" class="tickLabel">' + tick.label + "</div>";
});
addLabels(axes.y2axis, function (tick, axis) {
return '<div style="position:absolute;top:' + Math.round(plotOffset.top + axis.p2c(tick.v) - axis.labelHeight/2) + 'px;left:' + (plotOffset.left + plotWidth + margin) +'px;width:' + axis.labelWidth + 'px;text-align:left" class="tickLabel">' + tick.label + "</div>";
});
html.push('</div>');
target.append(html.join(""));
}
function drawSeries(series) {
if (series.lines.show)
drawSeriesLines(series);
if (series.bars.show)
drawSeriesBars(series);
if (series.points.show)
drawSeriesPoints(series);
}
function drawSeriesLines(series) {
function plotLine(datapoints, xoffset, yoffset, axisx, axisy) {
var points = datapoints.points,
ps = datapoints.pointsize,
prevx = null, prevy = null;
ctx.beginPath();
for (var i = ps; i < points.length; i += ps) {
var x1 = points[i - ps], y1 = points[i - ps + 1],
x2 = points[i], y2 = points[i + 1];
if (x1 == null || x2 == null)
continue;
// clip with ymin
if (y1 <= y2 && y1 < axisy.min) {
if (y2 < axisy.min)
continue; // line segment is outside
// compute new intersection point
x1 = (axisy.min - y1) / (y2 - y1) * (x2 - x1) + x1;
y1 = axisy.min;
}
else if (y2 <= y1 && y2 < axisy.min) {
if (y1 < axisy.min)
continue;
x2 = (axisy.min - y1) / (y2 - y1) * (x2 - x1) + x1;
y2 = axisy.min;
}
// clip with ymax
if (y1 >= y2 && y1 > axisy.max) {
if (y2 > axisy.max)
continue;
x1 = (axisy.max - y1) / (y2 - y1) * (x2 - x1) + x1;
y1 = axisy.max;
}
else if (y2 >= y1 && y2 > axisy.max) {
if (y1 > axisy.max)
continue;
x2 = (axisy.max - y1) / (y2 - y1) * (x2 - x1) + x1;
y2 = axisy.max;
}
// clip with xmin
if (x1 <= x2 && x1 < axisx.min) {<|fim▁hole|> continue;
y1 = (axisx.min - x1) / (x2 - x1) * (y2 - y1) + y1;
x1 = axisx.min;
}
else if (x2 <= x1 && x2 < axisx.min) {
if (x1 < axisx.min)
continue;
y2 = (axisx.min - x1) / (x2 - x1) * (y2 - y1) + y1;
x2 = axisx.min;
}
// clip with xmax
if (x1 >= x2 && x1 > axisx.max) {
if (x2 > axisx.max)
continue;
y1 = (axisx.max - x1) / (x2 - x1) * (y2 - y1) + y1;
x1 = axisx.max;
}
else if (x2 >= x1 && x2 > axisx.max) {
if (x1 > axisx.max)
continue;
y2 = (axisx.max - x1) / (x2 - x1) * (y2 - y1) + y1;
x2 = axisx.max;
}
if (x1 != prevx || y1 != prevy)
ctx.moveTo(axisx.p2c(x1) + xoffset, axisy.p2c(y1) + yoffset);
prevx = x2;
prevy = y2;
ctx.lineTo(axisx.p2c(x2) + xoffset, axisy.p2c(y2) + yoffset);
}
ctx.stroke();
}
function plotLineArea(datapoints, axisx, axisy) {
var points = datapoints.points,
ps = datapoints.pointsize,
bottom = Math.min(Math.max(0, axisy.min), axisy.max),
top, lastX = 0, areaOpen = false;
for (var i = ps; i < points.length; i += ps) {
var x1 = points[i - ps], y1 = points[i - ps + 1],
x2 = points[i], y2 = points[i + 1];
if (areaOpen && x1 != null && x2 == null) {
// close area
ctx.lineTo(axisx.p2c(lastX), axisy.p2c(bottom));
ctx.fill();
areaOpen = false;
continue;
}
if (x1 == null || x2 == null)
continue;
// clip x values
// clip with xmin
if (x1 <= x2 && x1 < axisx.min) {
if (x2 < axisx.min)
continue;
y1 = (axisx.min - x1) / (x2 - x1) * (y2 - y1) + y1;
x1 = axisx.min;
}
else if (x2 <= x1 && x2 < axisx.min) {
if (x1 < axisx.min)
continue;
y2 = (axisx.min - x1) / (x2 - x1) * (y2 - y1) + y1;
x2 = axisx.min;
}
// clip with xmax
if (x1 >= x2 && x1 > axisx.max) {
if (x2 > axisx.max)
continue;
y1 = (axisx.max - x1) / (x2 - x1) * (y2 - y1) + y1;
x1 = axisx.max;
}
else if (x2 >= x1 && x2 > axisx.max) {
if (x1 > axisx.max)
continue;
y2 = (axisx.max - x1) / (x2 - x1) * (y2 - y1) + y1;
x2 = axisx.max;
}
if (!areaOpen) {
// open area
ctx.beginPath();
ctx.moveTo(axisx.p2c(x1), axisy.p2c(bottom));
areaOpen = true;
}
// now first check the case where both is outside
if (y1 >= axisy.max && y2 >= axisy.max) {
ctx.lineTo(axisx.p2c(x1), axisy.p2c(axisy.max));
ctx.lineTo(axisx.p2c(x2), axisy.p2c(axisy.max));
lastX = x2;
continue;
}
else if (y1 <= axisy.min && y2 <= axisy.min) {
ctx.lineTo(axisx.p2c(x1), axisy.p2c(axisy.min));
ctx.lineTo(axisx.p2c(x2), axisy.p2c(axisy.min));
lastX = x2;
continue;
}
// else it's a bit more complicated, there might
// be two rectangles and two triangles we need to fill
// in; to find these keep track of the current x values
var x1old = x1, x2old = x2;
// and clip the y values, without shortcutting
// clip with ymin
if (y1 <= y2 && y1 < axisy.min && y2 >= axisy.min) {
x1 = (axisy.min - y1) / (y2 - y1) * (x2 - x1) + x1;
y1 = axisy.min;
}
else if (y2 <= y1 && y2 < axisy.min && y1 >= axisy.min) {
x2 = (axisy.min - y1) / (y2 - y1) * (x2 - x1) + x1;
y2 = axisy.min;
}
// clip with ymax
if (y1 >= y2 && y1 > axisy.max && y2 <= axisy.max) {
x1 = (axisy.max - y1) / (y2 - y1) * (x2 - x1) + x1;
y1 = axisy.max;
}
else if (y2 >= y1 && y2 > axisy.max && y1 <= axisy.max) {
x2 = (axisy.max - y1) / (y2 - y1) * (x2 - x1) + x1;
y2 = axisy.max;
}
// if the x value was changed we got a rectangle
// to fill
if (x1 != x1old) {
if (y1 <= axisy.min)
top = axisy.min;
else
top = axisy.max;
ctx.lineTo(axisx.p2c(x1old), axisy.p2c(top));
ctx.lineTo(axisx.p2c(x1), axisy.p2c(top));
}
// fill the triangles
ctx.lineTo(axisx.p2c(x1), axisy.p2c(y1));
ctx.lineTo(axisx.p2c(x2), axisy.p2c(y2));
// fill the other rectangle if it's there
if (x2 != x2old) {
if (y2 <= axisy.min)
top = axisy.min;
else
top = axisy.max;
ctx.lineTo(axisx.p2c(x2), axisy.p2c(top));
ctx.lineTo(axisx.p2c(x2old), axisy.p2c(top));
}
lastX = Math.max(x2, x2old);
}
if (areaOpen) {
ctx.lineTo(axisx.p2c(lastX), axisy.p2c(bottom));
ctx.fill();
}
}
ctx.save();
ctx.translate(plotOffset.left, plotOffset.top);
ctx.lineJoin = "round";
var lw = series.lines.lineWidth,
sw = series.shadowSize;
// FIXME: consider another form of shadow when filling is turned on
if (lw > 0 && sw > 0) {
// draw shadow as a thick and thin line with transparency
ctx.lineWidth = sw;
ctx.strokeStyle = "rgba(0,0,0,0.1)";
var xoffset = 1;
plotLine(series.datapoints, xoffset, Math.sqrt((lw/2 + sw/2)*(lw/2 + sw/2) - xoffset*xoffset), series.xaxis, series.yaxis);
ctx.lineWidth = sw/2;
plotLine(series.datapoints, xoffset, Math.sqrt((lw/2 + sw/4)*(lw/2 + sw/4) - xoffset*xoffset), series.xaxis, series.yaxis);
}
ctx.lineWidth = lw;
ctx.strokeStyle = series.color;
var fillStyle = getFillStyle(series.lines, series.color, 0, plotHeight);
if (fillStyle) {
ctx.fillStyle = fillStyle;
plotLineArea(series.datapoints, series.xaxis, series.yaxis);
}
if (lw > 0)
plotLine(series.datapoints, 0, 0, series.xaxis, series.yaxis);
ctx.restore();
}
function drawSeriesPoints(series) {
function plotPoints(datapoints, radius, fillStyle, offset, circumference, axisx, axisy) {
var points = datapoints.points, ps = datapoints.pointsize;
for (var i = 0; i < points.length; i += ps) {
var x = points[i], y = points[i + 1];
if (x == null || x < axisx.min || x > axisx.max || y < axisy.min || y > axisy.max)
continue;
ctx.beginPath();
ctx.arc(axisx.p2c(x), axisy.p2c(y) + offset, radius, 0, circumference, true);
if (fillStyle) {
ctx.fillStyle = fillStyle;
ctx.fill();
}
ctx.stroke();
}
}
ctx.save();
ctx.translate(plotOffset.left, plotOffset.top);
var lw = series.lines.lineWidth,
sw = series.shadowSize,
radius = series.points.radius;
if (lw > 0 && sw > 0) {
// draw shadow in two steps
var w = sw / 2;
ctx.lineWidth = w;
ctx.strokeStyle = "rgba(0,0,0,0.1)";
plotPoints(series.datapoints, radius, null, w + w/2, 2 * Math.PI,
series.xaxis, series.yaxis);
ctx.strokeStyle = "rgba(0,0,0,0.2)";
plotPoints(series.datapoints, radius, null, w/2, 2 * Math.PI,
series.xaxis, series.yaxis);
}
ctx.lineWidth = lw;
ctx.strokeStyle = series.color;
plotPoints(series.datapoints, radius,
getFillStyle(series.points, series.color), 0, 2 * Math.PI,
series.xaxis, series.yaxis);
ctx.restore();
}
function drawBar(x, y, b, barLeft, barRight, offset, fillStyleCallback, axisx, axisy, c, horizontal) {
var left, right, bottom, top,
drawLeft, drawRight, drawTop, drawBottom,
tmp;
if (horizontal) {
drawBottom = drawRight = drawTop = true;
drawLeft = false;
left = b;
right = x;
top = y + barLeft;
bottom = y + barRight;
// account for negative bars
if (right < left) {
tmp = right;
right = left;
left = tmp;
drawLeft = true;
drawRight = false;
}
}
else {
drawLeft = drawRight = drawTop = true;
drawBottom = false;
left = x + barLeft;
right = x + barRight;
bottom = b;
top = y;
// account for negative bars
if (top < bottom) {
tmp = top;
top = bottom;
bottom = tmp;
drawBottom = true;
drawTop = false;
}
}
// clip
if (right < axisx.min || left > axisx.max ||
top < axisy.min || bottom > axisy.max)
return;
if (left < axisx.min) {
left = axisx.min;
drawLeft = false;
}
if (right > axisx.max) {
right = axisx.max;
drawRight = false;
}
if (bottom < axisy.min) {
bottom = axisy.min;
drawBottom = false;
}
if (top > axisy.max) {
top = axisy.max;
drawTop = false;
}
left = axisx.p2c(left);
bottom = axisy.p2c(bottom);
right = axisx.p2c(right);
top = axisy.p2c(top);
// fill the bar
if (fillStyleCallback) {
c.beginPath();
c.moveTo(left, bottom);
c.lineTo(left, top);
c.lineTo(right, top);
c.lineTo(right, bottom);
c.fillStyle = fillStyleCallback(bottom, top);
c.fill();
}
// draw outline
if (drawLeft || drawRight || drawTop || drawBottom) {
c.beginPath();
// FIXME: inline moveTo is buggy with excanvas
c.moveTo(left, bottom + offset);
if (drawLeft)
c.lineTo(left, top + offset);
else
c.moveTo(left, top + offset);
if (drawTop)
c.lineTo(right, top + offset);
else
c.moveTo(right, top + offset);
if (drawRight)
c.lineTo(right, bottom + offset);
else
c.moveTo(right, bottom + offset);
if (drawBottom)
c.lineTo(left, bottom + offset);
else
c.moveTo(left, bottom + offset);
c.stroke();
}
}
function drawSeriesBars(series) {
function plotBars(datapoints, barLeft, barRight, offset, fillStyleCallback, axisx, axisy) {
var points = datapoints.points, ps = datapoints.pointsize;
for (var i = 0; i < points.length; i += ps) {
if (points[i] == null)
continue;
drawBar(points[i], points[i + 1], points[i + 2], barLeft, barRight, offset, fillStyleCallback, axisx, axisy, ctx, series.bars.horizontal);
}
}
ctx.save();
ctx.translate(plotOffset.left, plotOffset.top);
// FIXME: figure out a way to add shadows (for instance along the right edge)
ctx.lineWidth = series.bars.lineWidth;
ctx.strokeStyle = series.color;
var barLeft = series.bars.align == "left" ? 0 : -series.bars.barWidth/2;
var fillStyleCallback = series.bars.fill ? function (bottom, top) { return getFillStyle(series.bars, series.color, bottom, top); } : null;
plotBars(series.datapoints, barLeft, barLeft + series.bars.barWidth, 0, fillStyleCallback, series.xaxis, series.yaxis);
ctx.restore();
}
function getFillStyle(filloptions, seriesColor, bottom, top) {
var fill = filloptions.fill;
if (!fill)
return null;
if (filloptions.fillColor)
return getColorOrGradient(filloptions.fillColor, bottom, top, seriesColor);
var c = parseColor(seriesColor);
c.a = typeof fill == "number" ? fill : 0.4;
c.normalize();
return c.toString();
}
function insertLegend() {
target.find(".legend").remove();
if (!options.legend.show)
return;
var fragments = [], rowStarted = false,
lf = options.legend.labelFormatter, s, label;
for (i = 0; i < series.length; ++i) {
s = series[i];
label = s.label;
if (!label)
continue;
if (i % options.legend.noColumns == 0) {
if (rowStarted)
fragments.push('</tr>');
fragments.push('<tr>');
rowStarted = true;
}
if (lf)
label = lf(label, s);
fragments.push(
'<td class="legendColorBox"><div style="border:1px solid ' + options.legend.labelBoxBorderColor + ';padding:1px"><div style="width:4px;height:0;border:5px solid ' + s.color + ';overflow:hidden"></div></div></td>' +
'<td class="legendLabel">' + label + '</td>');
}
if (rowStarted)
fragments.push('</tr>');
if (fragments.length == 0)
return;
var table = '<table style="font-size:smaller;color:' + options.grid.color + '">' + fragments.join("") + '</table>';
if (options.legend.container != null)
$(options.legend.container).html(table);
else {
var pos = "",
p = options.legend.position,
m = options.legend.margin;
if (m[0] == null)
m = [m, m];
if (p.charAt(0) == "n")
pos += 'top:' + (m[1] + plotOffset.top) + 'px;';
else if (p.charAt(0) == "s")
pos += 'bottom:' + (m[1] + plotOffset.bottom) + 'px;';
if (p.charAt(1) == "e")
pos += 'right:' + (m[0] + plotOffset.right) + 'px;';
else if (p.charAt(1) == "w")
pos += 'left:' + (m[0] + plotOffset.left) + 'px;';
var legend = $('<div class="legend">' + table.replace('style="', 'style="position:absolute;' + pos +';') + '</div>').appendTo(target);
if (options.legend.backgroundOpacity != 0.0) {
// put in the transparent background
// separately to avoid blended labels and
// label boxes
var c = options.legend.backgroundColor;
if (c == null) {
var tmp;
if (options.grid.backgroundColor && typeof options.grid.backgroundColor == "string")
tmp = options.grid.backgroundColor;
else
tmp = extractColor(legend);
c = parseColor(tmp).adjust(null, null, null, 1).toString();
}
var div = legend.children();
$('<div style="position:absolute;width:' + div.width() + 'px;height:' + div.height() + 'px;' + pos +'background-color:' + c + ';"> </div>').prependTo(legend).css('opacity', options.legend.backgroundOpacity);
}
}
}
// interactive features
var lastMousePos = { pageX: null, pageY: null },
selection = {
first: { x: -1, y: -1}, second: { x: -1, y: -1},
show: false,
active: false
},
highlights = [],
clickIsMouseUp = false,
redrawTimeout = null,
hoverTimeout = null;
// returns the data item the mouse is over, or null if none is found
function findNearbyItem(mouseX, mouseY, seriesFilter) {
var maxDistance = options.grid.mouseActiveRadius,
lowestDistance = maxDistance * maxDistance + 1,
item = null, foundPoint = false, i, j;
for (var i = 0; i < series.length; ++i) {
if (!seriesFilter(series[i]))
continue;
var s = series[i],
axisx = s.xaxis,
axisy = s.yaxis,
points = s.datapoints.points,
ps = s.datapoints.pointsize,
mx = axisx.c2p(mouseX), // precompute some stuff to make the loop faster
my = axisy.c2p(mouseY),
maxx = maxDistance / axisx.scale,
maxy = maxDistance / axisy.scale;
if (s.lines.show || s.points.show) {
for (j = 0; j < points.length; j += ps) {
var x = points[j], y = points[j + 1];
if (x == null)
continue;
// For points and lines, the cursor must be within a
// certain distance to the data point
if (x - mx > maxx || x - mx < -maxx ||
y - my > maxy || y - my < -maxy)
continue;
// We have to calculate distances in pixels, not in
// data units, because the scales of the axes may be different
var dx = Math.abs(axisx.p2c(x) - mouseX),
dy = Math.abs(axisy.p2c(y) - mouseY),
dist = dx * dx + dy * dy; // no idea in taking sqrt
if (dist < lowestDistance) {
lowestDistance = dist;
item = [i, j / ps];
}
}
}
if (s.bars.show && !item) { // no other point can be nearby
var barLeft = s.bars.align == "left" ? 0 : -s.bars.barWidth/2,
barRight = barLeft + s.bars.barWidth;
for (j = 0; j < points.length; j += ps) {
var x = points[j], y = points[j + 1], b = points[j + 2];
if (x == null)
continue;
// for a bar graph, the cursor must be inside the bar
if (series[i].bars.horizontal ?
(mx <= Math.max(b, x) && mx >= Math.min(b, x) &&
my >= y + barLeft && my <= y + barRight) :
(mx >= x + barLeft && mx <= x + barRight &&
my >= Math.min(b, y) && my <= Math.max(b, y)))
item = [i, j / ps];
}
}
}
if (item) {
i = item[0];
j = item[1];
return { datapoint: series[i].datapoints.points.slice(j * ps, (j + 1) * ps),
dataIndex: j,
series: series[i],
seriesIndex: i };
}
return null;
}
function onMouseMove(e) {
lastMousePos.pageX = e.pageX;
lastMousePos.pageY = e.pageY;
if (options.grid.hoverable)
triggerClickHoverEvent("plothover", lastMousePos,
function (s) { return s["hoverable"] != false; });
if (selection.active) {
target.trigger("plotselecting", [ getSelection() ]);
updateSelection(lastMousePos);
}
}
function onMouseDown(e) {
if (e.which != 1) // only accept left-click
return;
// cancel out any text selections
document.body.focus();
// prevent text selection and drag in old-school browsers
if (document.onselectstart !== undefined && workarounds.onselectstart == null) {
workarounds.onselectstart = document.onselectstart;
document.onselectstart = function () { return false; };
}
if (document.ondrag !== undefined && workarounds.ondrag == null) {
workarounds.ondrag = document.ondrag;
document.ondrag = function () { return false; };
}
setSelectionPos(selection.first, e);
lastMousePos.pageX = null;
selection.active = true;
$(document).one("mouseup", onSelectionMouseUp);
}
function onClick(e) {
if (clickIsMouseUp) {
clickIsMouseUp = false;
return;
}
triggerClickHoverEvent("plotclick", e,
function (s) { return s["clickable"] != false; });
}
/*
function userPositionInCanvasSpace(pos) {
return { x: parseInt(pos.x != null ? axes.xaxis.p2c(pos.x) : axes.x2axis.p2c(pos.x2)),
y: parseInt(pos.y != null ? axes.yaxis.p2c(pos.y) : axes.y2axis.p2c(pos.y2)) };
}
function positionInDivSpace(pos) {
var cpos = userPositionInCanvasSpace(pos);
return { x: cpos.x + plotOffset.left,
y: cpos.y + plotOffset.top };
}*/
// trigger click or hover event (they send the same parameters
// so we share their code)
function triggerClickHoverEvent(eventname, event, seriesFilter) {
var offset = eventHolder.offset(),
pos = { pageX: event.pageX, pageY: event.pageY },
canvasX = event.pageX - offset.left - plotOffset.left,
canvasY = event.pageY - offset.top - plotOffset.top;
if (axes.xaxis.used)
pos.x = axes.xaxis.c2p(canvasX);
if (axes.yaxis.used)
pos.y = axes.yaxis.c2p(canvasY);
if (axes.x2axis.used)
pos.x2 = axes.x2axis.c2p(canvasX);
if (axes.y2axis.used)
pos.y2 = axes.y2axis.c2p(canvasY);
var item = findNearbyItem(canvasX, canvasY, seriesFilter);
if (item) {
// fill in mouse pos for any listeners out there
item.pageX = parseInt(item.series.xaxis.p2c(item.datapoint[0]) + offset.left + plotOffset.left);
item.pageY = parseInt(item.series.yaxis.p2c(item.datapoint[1]) + offset.top + plotOffset.top);
}
if (options.grid.autoHighlight) {
// clear auto-highlights
for (var i = 0; i < highlights.length; ++i) {
var h = highlights[i];
if (h.auto == eventname &&
!(item && h.series == item.series && h.point == item.datapoint))
unhighlight(h.series, h.point);
}
if (item)
highlight(item.series, item.datapoint, eventname);
}
target.trigger(eventname, [ pos, item ]);
}
function triggerRedrawOverlay() {
if (!redrawTimeout)
redrawTimeout = setTimeout(drawOverlay, 30);
}
function drawOverlay() {
redrawTimeout = null;
// draw highlights
octx.save();
octx.clearRect(0, 0, canvasWidth, canvasHeight);
octx.translate(plotOffset.left, plotOffset.top);
var i, hi;
for (i = 0; i < highlights.length; ++i) {
hi = highlights[i];
if (hi.series.bars.show)
drawBarHighlight(hi.series, hi.point);
else
drawPointHighlight(hi.series, hi.point);
}
// draw selection
if (selection.show && selectionIsSane()) {
octx.strokeStyle = parseColor(options.selection.color).scale(null, null, null, 0.8).toString();
octx.lineWidth = 1;
ctx.lineJoin = "round";
octx.fillStyle = parseColor(options.selection.color).scale(null, null, null, 0.4).toString();
var x = Math.min(selection.first.x, selection.second.x),
y = Math.min(selection.first.y, selection.second.y),
w = Math.abs(selection.second.x - selection.first.x),
h = Math.abs(selection.second.y - selection.first.y);
octx.fillRect(x, y, w, h);
octx.strokeRect(x, y, w, h);
}
octx.restore();
executeHooks(hooks.drawOverlay, [octx]);
}
function highlight(s, point, auto) {
if (typeof s == "number")
s = series[s];
if (typeof point == "number")
point = s.data[point];
var i = indexOfHighlight(s, point);
if (i == -1) {
highlights.push({ series: s, point: point, auto: auto });
triggerRedrawOverlay();
}
else if (!auto)
highlights[i].auto = false;
}
function unhighlight(s, point) {
if (s == null && point == null) {
highlights = [];
triggerRedrawOverlay();
}
if (typeof s == "number")
s = series[s];
if (typeof point == "number")
point = s.data[point];
var i = indexOfHighlight(s, point);
if (i != -1) {
highlights.splice(i, 1);
triggerRedrawOverlay();
}
}
function indexOfHighlight(s, p) {
for (var i = 0; i < highlights.length; ++i) {
var h = highlights[i];
if (h.series == s && h.point[0] == p[0]
&& h.point[1] == p[1])
return i;
}
return -1;
}
function drawPointHighlight(series, point) {
var x = point[0], y = point[1],
axisx = series.xaxis, axisy = series.yaxis;
if (x < axisx.min || x > axisx.max || y < axisy.min || y > axisy.max)
return;
var pointRadius = series.points.radius + series.points.lineWidth / 2;
octx.lineWidth = pointRadius;
octx.strokeStyle = parseColor(series.color).scale(1, 1, 1, 0.5).toString();
var radius = 1.5 * pointRadius;
octx.beginPath();
octx.arc(axisx.p2c(x), axisy.p2c(y), radius, 0, 2 * Math.PI, true);
octx.stroke();
}
function drawBarHighlight(series, point) {
octx.lineWidth = series.bars.lineWidth;
octx.strokeStyle = parseColor(series.color).scale(1, 1, 1, 0.5).toString();
var fillStyle = parseColor(series.color).scale(1, 1, 1, 0.5).toString();
var barLeft = series.bars.align == "left" ? 0 : -series.bars.barWidth/2;
drawBar(point[0], point[1], point[2] || 0, barLeft, barLeft + series.bars.barWidth,
0, function () { return fillStyle; }, series.xaxis, series.yaxis, octx, series.bars.horizontal);
}
function getSelection() {
if (!selectionIsSane())
return null;
var x1 = Math.min(selection.first.x, selection.second.x),
x2 = Math.max(selection.first.x, selection.second.x),
y1 = Math.max(selection.first.y, selection.second.y),
y2 = Math.min(selection.first.y, selection.second.y);
var r = {};
if (axes.xaxis.used)
r.xaxis = { from: axes.xaxis.c2p(x1), to: axes.xaxis.c2p(x2) };
if (axes.x2axis.used)
r.x2axis = { from: axes.x2axis.c2p(x1), to: axes.x2axis.c2p(x2) };
if (axes.yaxis.used)
r.yaxis = { from: axes.yaxis.c2p(y1), to: axes.yaxis.c2p(y2) };
if (axes.y2axis.used)
r.y2axis = { from: axes.y2axis.c2p(y1), to: axes.y2axis.c2p(y2) };
return r;
}
function triggerSelectedEvent() {
var r = getSelection();
target.trigger("plotselected", [ r ]);
// backwards-compat stuff, to be removed in future
if (axes.xaxis.used && axes.yaxis.used)
target.trigger("selected", [ { x1: r.xaxis.from, y1: r.yaxis.from, x2: r.xaxis.to, y2: r.yaxis.to } ]);
}
function onSelectionMouseUp(e) {
// revert drag stuff for old-school browsers
if (document.onselectstart !== undefined)
document.onselectstart = workarounds.onselectstart;
if (document.ondrag !== undefined)
document.ondrag = workarounds.ondrag;
// no more draggy-dee-drag
selection.active = false;
updateSelection(e);
if (selectionIsSane()) {
triggerSelectedEvent();
clickIsMouseUp = true;
}
else {
// this counts as a clear
target.trigger("plotunselected", [ ]);
target.trigger("plotselecting", [ null ]);
}
return false;
}
function setSelectionPos(pos, e) {
var offset = eventHolder.offset();
pos.x = clamp(0, e.pageX - offset.left - plotOffset.left, plotWidth);
pos.y = clamp(0, e.pageY - offset.top - plotOffset.top, plotHeight);
if (options.selection.mode == "y") {
if (pos == selection.first)
pos.x = 0;
else
pos.x = plotWidth;
}
if (options.selection.mode == "x") {
if (pos == selection.first)
pos.y = 0;
else
pos.y = plotHeight;
}
}
function updateSelection(pos) {
if (pos.pageX == null)
return;
setSelectionPos(selection.second, pos);
if (selectionIsSane()) {
selection.show = true;
triggerRedrawOverlay();
}
else
clearSelection(true);
}
function clearSelection(preventEvent) {
if (selection.show) {
selection.show = false;
triggerRedrawOverlay();
if (!preventEvent)
target.trigger("plotunselected", [ ]);
}
}
function setSelection(ranges, preventEvent) {
var range;
if (options.selection.mode == "y") {
selection.first.x = 0;
selection.second.x = plotWidth;
}
else {
range = extractRange(ranges, "x");
selection.first.x = range.axis.p2c(range.from);
selection.second.x = range.axis.p2c(range.to);
}
if (options.selection.mode == "x") {
selection.first.y = 0;
selection.second.y = plotHeight;
}
else {
range = extractRange(ranges, "y");
selection.first.y = range.axis.p2c(range.from);
selection.second.y = range.axis.p2c(range.to);
}
selection.show = true;
triggerRedrawOverlay();
if (!preventEvent)
triggerSelectedEvent();
}
function selectionIsSane() {
var minSize = 5;
return Math.abs(selection.second.x - selection.first.x) >= minSize &&
Math.abs(selection.second.y - selection.first.y) >= minSize;
}
function getColorOrGradient(spec, bottom, top, defaultColor) {
if (typeof spec == "string")
return spec;
else {
// assume this is a gradient spec; IE currently only
// supports a simple vertical gradient properly, so that's
// what we support too
var gradient = ctx.createLinearGradient(0, top, 0, bottom);
for (var i = 0, l = spec.colors.length; i < l; ++i) {
var c = spec.colors[i];
gradient.addColorStop(i / (l - 1), typeof c == "string" ? c : parseColor(defaultColor).scale(c.brightness, c.brightness, c.brightness, c.opacity));
}
return gradient;
}
}
}
$.plot = function(target, data, options) {
var plot = new Plot($(target), data, options, $.plot.plugins);
/*var t0 = new Date();
var t1 = new Date();
var tstr = "time used (msecs): " + (t1.getTime() - t0.getTime())
if (window.console)
console.log(tstr);
else
alert(tstr);*/
return plot;
};
$.plot.plugins = [];
// returns a string with the date d formatted according to fmt
$.plot.formatDate = function(d, fmt, monthNames) {
var leftPad = function(n) {
n = "" + n;
return n.length == 1 ? "0" + n : n;
};
var r = [];
var escape = false;
if (monthNames == null)
monthNames = ["Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"];
for (var i = 0; i < fmt.length; ++i) {
var c = fmt.charAt(i);
if (escape) {
switch (c) {
case 'h': c = "" + d.getUTCHours(); break;
case 'H': c = leftPad(d.getUTCHours()); break;
case 'M': c = leftPad(d.getUTCMinutes()); break;
case 'S': c = leftPad(d.getUTCSeconds()); break;
case 'd': c = "" + d.getUTCDate(); break;
case 'm': c = "" + (d.getUTCMonth() + 1); break;
case 'y': c = "" + d.getUTCFullYear(); break;
case 'b': c = "" + monthNames[d.getUTCMonth()]; break;
}
r.push(c);
escape = false;
}
else {
if (c == "%")
escape = true;
else
r.push(c);
}
}
return r.join("");
};
// round to nearby lower multiple of base
function floorInBase(n, base) {
return base * Math.floor(n / base);
}
function clamp(min, value, max) {
if (value < min)
return min;
else if (value > max)
return max;
else
return value;
}
// color helpers, inspiration from the jquery color animation
// plugin by John Resig
function Color (r, g, b, a) {
var rgba = ['r','g','b','a'];
var x = 4; //rgba.length
while (-1<--x) {
this[rgba[x]] = arguments[x] || ((x==3) ? 1.0 : 0);
}
this.toString = function() {
if (this.a >= 1.0) {
return "rgb("+[this.r,this.g,this.b].join(",")+")";
} else {
return "rgba("+[this.r,this.g,this.b,this.a].join(",")+")";
}
};
this.scale = function(rf, gf, bf, af) {
x = 4; //rgba.length
while (-1<--x) {
if (arguments[x] != null)
this[rgba[x]] *= arguments[x];
}
return this.normalize();
};
this.adjust = function(rd, gd, bd, ad) {
x = 4; //rgba.length
while (-1<--x) {
if (arguments[x] != null)
this[rgba[x]] += arguments[x];
}
return this.normalize();
};
this.clone = function() {
return new Color(this.r, this.b, this.g, this.a);
};
var limit = function(val,minVal,maxVal) {
return Math.max(Math.min(val, maxVal), minVal);
};
this.normalize = function() {
this.r = clamp(0, parseInt(this.r), 255);
this.g = clamp(0, parseInt(this.g), 255);
this.b = clamp(0, parseInt(this.b), 255);
this.a = clamp(0, this.a, 1);
return this;
};
this.normalize();
}
var lookupColors = {
aqua:[0,255,255],
azure:[240,255,255],
beige:[245,245,220],
black:[0,0,0],
blue:[0,0,255],
brown:[165,42,42],
cyan:[0,255,255],
darkblue:[0,0,139],
darkcyan:[0,139,139],
darkgrey:[169,169,169],
darkgreen:[0,100,0],
darkkhaki:[189,183,107],
darkmagenta:[139,0,139],
darkolivegreen:[85,107,47],
darkorange:[255,140,0],
darkorchid:[153,50,204],
darkred:[139,0,0],
darksalmon:[233,150,122],
darkviolet:[148,0,211],
fuchsia:[255,0,255],
gold:[255,215,0],
green:[0,128,0],
indigo:[75,0,130],
khaki:[240,230,140],
lightblue:[173,216,230],
lightcyan:[224,255,255],
lightgreen:[144,238,144],
lightgrey:[211,211,211],
lightpink:[255,182,193],
lightyellow:[255,255,224],
lime:[0,255,0],
magenta:[255,0,255],
maroon:[128,0,0],
navy:[0,0,128],
olive:[128,128,0],
orange:[255,165,0],
pink:[255,192,203],
purple:[128,0,128],
violet:[128,0,128],
red:[255,0,0],
silver:[192,192,192],
white:[255,255,255],
yellow:[255,255,0]
};
function extractColor(element) {
var color, elem = element;
do {
color = elem.css("background-color").toLowerCase();
// keep going until we find an element that has color, or
// we hit the body
if (color != '' && color != 'transparent')
break;
elem = elem.parent();
} while (!$.nodeName(elem.get(0), "body"));
// catch Safari's way of signalling transparent
if (color == "rgba(0, 0, 0, 0)")
return "transparent";
return color;
}
// parse string, returns Color
function parseColor(str) {
var result;
// Look for rgb(num,num,num)
if (result = /rgb\(\s*([0-9]{1,3})\s*,\s*([0-9]{1,3})\s*,\s*([0-9]{1,3})\s*\)/.exec(str))
return new Color(parseInt(result[1], 10), parseInt(result[2], 10), parseInt(result[3], 10));
// Look for rgba(num,num,num,num)
if (result = /rgba\(\s*([0-9]{1,3})\s*,\s*([0-9]{1,3})\s*,\s*([0-9]{1,3})\s*,\s*([0-9]+(?:\.[0-9]+)?)\s*\)/.exec(str))
return new Color(parseInt(result[1], 10), parseInt(result[2], 10), parseInt(result[3], 10), parseFloat(result[4]));
// Look for rgb(num%,num%,num%)
if (result = /rgb\(\s*([0-9]+(?:\.[0-9]+)?)\%\s*,\s*([0-9]+(?:\.[0-9]+)?)\%\s*,\s*([0-9]+(?:\.[0-9]+)?)\%\s*\)/.exec(str))
return new Color(parseFloat(result[1])*2.55, parseFloat(result[2])*2.55, parseFloat(result[3])*2.55);
// Look for rgba(num%,num%,num%,num)
if (result = /rgba\(\s*([0-9]+(?:\.[0-9]+)?)\%\s*,\s*([0-9]+(?:\.[0-9]+)?)\%\s*,\s*([0-9]+(?:\.[0-9]+)?)\%\s*,\s*([0-9]+(?:\.[0-9]+)?)\s*\)/.exec(str))
return new Color(parseFloat(result[1])*2.55, parseFloat(result[2])*2.55, parseFloat(result[3])*2.55, parseFloat(result[4]));
// Look for #a0b1c2
if (result = /#([a-fA-F0-9]{2})([a-fA-F0-9]{2})([a-fA-F0-9]{2})/.exec(str))
return new Color(parseInt(result[1], 16), parseInt(result[2], 16), parseInt(result[3], 16));
// Look for #fff
if (result = /#([a-fA-F0-9])([a-fA-F0-9])([a-fA-F0-9])/.exec(str))
return new Color(parseInt(result[1]+result[1], 16), parseInt(result[2]+result[2], 16), parseInt(result[3]+result[3], 16));
// Otherwise, we're most likely dealing with a named color
var name = $.trim(str).toLowerCase();
if (name == "transparent")
return new Color(255, 255, 255, 0);
else {
result = lookupColors[name];
return new Color(result[0], result[1], result[2]);
}
}
})(jQuery);<|fim▁end|> | if (x2 < axisx.min) |
<|file_name|>JitArm64_Integer.cpp<|end_file_name|><|fim▁begin|>// Copyright 2014 Dolphin Emulator Project
// Licensed under GPLv2
// Refer to the license.txt file included.
#include "Common/Arm64Emitter.h"
#include "Common/Common.h"
#include "Core/Core.h"
#include "Core/CoreTiming.h"
#include "Core/PowerPC/PowerPC.h"
#include "Core/PowerPC/PPCTables.h"
#include "Core/PowerPC/JitArm64/Jit.h"
#include "Core/PowerPC/JitArm64/JitArm64_RegCache.h"
#include "Core/PowerPC/JitArm64/JitAsm.h"
using namespace Arm64Gen;
void JitArm64::ComputeRC(ARM64Reg reg, int crf)
{
ARM64Reg WA = gpr.GetReg();
ARM64Reg XA = EncodeRegTo64(WA);
SXTW(XA, reg);
STR(INDEX_UNSIGNED, XA, X29, PPCSTATE_OFF(cr_val[crf]));
gpr.Unlock(WA);
}
void JitArm64::ComputeRC(u32 imm, int crf)
{
ARM64Reg WA = gpr.GetReg();
ARM64Reg XA = EncodeRegTo64(WA);
MOVI2R(XA, imm);
if (imm & 0x80000000)
SXTW(XA, WA);
STR(INDEX_UNSIGNED, XA, X29, PPCSTATE_OFF(cr_val[crf]));
gpr.Unlock(WA);
}
void JitArm64::ComputeCarry(bool Carry)
{
if (Carry)
{
ARM64Reg WA = gpr.GetReg();
MOVI2R(WA, 1);
STRB(INDEX_UNSIGNED, WA, X29, PPCSTATE_OFF(xer_ca));
gpr.Unlock(WA);
return;
}
STRB(INDEX_UNSIGNED, WSP, X29, PPCSTATE_OFF(xer_ca));
}
void JitArm64::ComputeCarry()
{
ARM64Reg WA = gpr.GetReg();
CSINC(WA, WSP, WSP, CC_CC);
STRB(INDEX_UNSIGNED, WA, X29, PPCSTATE_OFF(xer_ca));
gpr.Unlock(WA);
}
// Following static functions are used in conjunction with reg_imm
static u32 Add(u32 a, u32 b)
{
return a + b;
}
static u32 Or(u32 a, u32 b)
{
return a | b;
}
static u32 And(u32 a, u32 b)
{
return a & b;
}
static u32 Xor(u32 a, u32 b)
{
return a ^ b;
}
void JitArm64::reg_imm(u32 d, u32 a, bool binary, u32 value, Operation do_op, void (ARM64XEmitter::*op)(ARM64Reg, ARM64Reg, ARM64Reg, ArithOption), bool Rc)
{
if (a || binary)
{
if (gpr.IsImm(a))
{
gpr.SetImmediate(d, do_op(gpr.GetImm(a), value));
if (Rc)
ComputeRC(gpr.GetImm(d));
}
else
{
gpr.BindToRegister(d, d == a);
ARM64Reg WA = gpr.GetReg();
MOVI2R(WA, value);
(this->*op)(gpr.R(d), gpr.R(a), WA, ArithOption(WA, ST_LSL, 0));
gpr.Unlock(WA);
if (Rc)
ComputeRC(gpr.R(d), 0);
}
}
else if (do_op == Add)
{
// a == 0, implies zero register
gpr.SetImmediate(d, value);
if (Rc)
ComputeRC(value, 0);
}
else
{
_assert_msg_(DYNA_REC, false, "Hit impossible condition in reg_imm!");
}
}
void JitArm64::arith_imm(UGeckoInstruction inst)
{
INSTRUCTION_START
JITDISABLE(bJITIntegerOff);
u32 d = inst.RD, a = inst.RA, s = inst.RS;
switch (inst.OPCD)
{
case 14: // addi
reg_imm(d, a, false, (u32)(s32)inst.SIMM_16, Add, &ARM64XEmitter::ADD);
break;
case 15: // addis
reg_imm(d, a, false, (u32)inst.SIMM_16 << 16, Add, &ARM64XEmitter::ADD);
break;
case 24: // ori
if (a == 0 && s == 0 && inst.UIMM == 0 && !inst.Rc) //check for nop
{
// NOP
return;
}
reg_imm(a, s, true, inst.UIMM, Or, &ARM64XEmitter::ORR);
break;
case 25: // oris
reg_imm(a, s, true, inst.UIMM << 16, Or, &ARM64XEmitter::ORR);
break;
case 28: // andi
reg_imm(a, s, true, inst.UIMM, And, &ARM64XEmitter::AND, true);
break;
case 29: // andis
reg_imm(a, s, true, inst.UIMM << 16, And, &ARM64XEmitter::AND, true);
break;
case 26: // xori
reg_imm(a, s, true, inst.UIMM, Xor, &ARM64XEmitter::EOR);
break;
case 27: // xoris
reg_imm(a, s, true, inst.UIMM << 16, Xor, &ARM64XEmitter::EOR);
break;
}
}
void JitArm64::boolX(UGeckoInstruction inst)
{
INSTRUCTION_START
JITDISABLE(bJITIntegerOff);
int a = inst.RA, s = inst.RS, b = inst.RB;
if (gpr.IsImm(s) && gpr.IsImm(b))
{
if (inst.SUBOP10 == 28) // andx
gpr.SetImmediate(a, (u32)gpr.GetImm(s) & (u32)gpr.GetImm(b));
else if (inst.SUBOP10 == 476) // nandx
gpr.SetImmediate(a, ~((u32)gpr.GetImm(s) & (u32)gpr.GetImm(b)));
else if (inst.SUBOP10 == 60) // andcx
gpr.SetImmediate(a, (u32)gpr.GetImm(s) & (~(u32)gpr.GetImm(b)));
else if (inst.SUBOP10 == 444) // orx
gpr.SetImmediate(a, (u32)gpr.GetImm(s) | (u32)gpr.GetImm(b));
else if (inst.SUBOP10 == 124) // norx
gpr.SetImmediate(a, ~((u32)gpr.GetImm(s) | (u32)gpr.GetImm(b)));
else if (inst.SUBOP10 == 412) // orcx
gpr.SetImmediate(a, (u32)gpr.GetImm(s) | (~(u32)gpr.GetImm(b)));
else if (inst.SUBOP10 == 316) // xorx
gpr.SetImmediate(a, (u32)gpr.GetImm(s) ^ (u32)gpr.GetImm(b));
else if (inst.SUBOP10 == 284) // eqvx
gpr.SetImmediate(a, ~((u32)gpr.GetImm(s) ^ (u32)gpr.GetImm(b)));
if (inst.Rc)
ComputeRC(gpr.GetImm(a), 0);
}
else if (s == b)
{
if ((inst.SUBOP10 == 28 /* andx */) || (inst.SUBOP10 == 444 /* orx */))
{
if (a != s)
{
gpr.BindToRegister(a, false);
MOV(gpr.R(a), gpr.R(s));
}
if (inst.Rc)
ComputeRC(gpr.R(a));
}
else if ((inst.SUBOP10 == 476 /* nandx */) || (inst.SUBOP10 == 124 /* norx */))
{
gpr.BindToRegister(a, a == s);
MVN(gpr.R(a), gpr.R(s));
if (inst.Rc)
ComputeRC(gpr.R(a));
}
else if ((inst.SUBOP10 == 412 /* orcx */) || (inst.SUBOP10 == 284 /* eqvx */))
{
gpr.SetImmediate(a, 0xFFFFFFFF);
if (inst.Rc)
ComputeRC(gpr.GetImm(a), 0);
}
else if ((inst.SUBOP10 == 60 /* andcx */) || (inst.SUBOP10 == 316 /* xorx */))
{
gpr.SetImmediate(a, 0);
if (inst.Rc)
ComputeRC(gpr.GetImm(a), 0);
}
else
{
PanicAlert("WTF!");
}
}
else
{
gpr.BindToRegister(a, (a == s) || (a == b));
if (inst.SUBOP10 == 28) // andx
{
AND(gpr.R(a), gpr.R(s), gpr.R(b), ArithOption(gpr.R(a), ST_LSL, 0));
}
else if (inst.SUBOP10 == 476) // nandx
{
AND(gpr.R(a), gpr.R(s), gpr.R(b), ArithOption(gpr.R(a), ST_LSL, 0));
MVN(gpr.R(a), gpr.R(a));
}
else if (inst.SUBOP10 == 60) // andcx
{
BIC(gpr.R(a), gpr.R(s), gpr.R(b), ArithOption(gpr.R(a), ST_LSL, 0));
}
else if (inst.SUBOP10 == 444) // orx
{
ORR(gpr.R(a), gpr.R(s), gpr.R(b), ArithOption(gpr.R(a), ST_LSL, 0));
}
else if (inst.SUBOP10 == 124) // norx
{
ORR(gpr.R(a), gpr.R(s), gpr.R(b), ArithOption(gpr.R(a), ST_LSL, 0));
MVN(gpr.R(a), gpr.R(a));
}
else if (inst.SUBOP10 == 412) // orcx
{
ORN(gpr.R(a), gpr.R(s), gpr.R(b), ArithOption(gpr.R(a), ST_LSL, 0));
}
else if (inst.SUBOP10 == 316) // xorx
{
EOR(gpr.R(a), gpr.R(s), gpr.R(b), ArithOption(gpr.R(a), ST_LSL, 0));
}
else if (inst.SUBOP10 == 284) // eqvx
{
EON(gpr.R(a), gpr.R(b), gpr.R(s), ArithOption(gpr.R(a), ST_LSL, 0));
}
else
{
PanicAlert("WTF!");
}
if (inst.Rc)
ComputeRC(gpr.R(a), 0);
}
}
void JitArm64::addx(UGeckoInstruction inst)
{
INSTRUCTION_START
JITDISABLE(bJITIntegerOff);
FALLBACK_IF(inst.OE);
int a = inst.RA, b = inst.RB, d = inst.RD;
if (gpr.IsImm(a) && gpr.IsImm(b))
{
s32 i = (s32)gpr.GetImm(a), j = (s32)gpr.GetImm(b);
gpr.SetImmediate(d, i + j);
if (inst.Rc)
ComputeRC(gpr.GetImm(d), 0);
}
else
{
ADD(gpr.R(d), gpr.R(a), gpr.R(b));
if (inst.Rc)
ComputeRC(gpr.R(d), 0);
}
}
void JitArm64::extsXx(UGeckoInstruction inst)
{
INSTRUCTION_START
JITDISABLE(bJITIntegerOff);
int a = inst.RA, s = inst.RS;
int size = inst.SUBOP10 == 922 ? 16 : 8;
gpr.BindToRegister(a, a == s);
if (gpr.IsImm(s))
{
gpr.SetImmediate(a, (u32)(s32)(size == 16 ? (s16)gpr.GetImm(s) : (s8)gpr.GetImm(s)));
if (inst.Rc)
ComputeRC(gpr.GetImm(a), 0);
}
else
{
SBFM(gpr.R(a), gpr.R(s), 0, size - 1);
if (inst.Rc)
ComputeRC(gpr.R(a), 0);
}
}
void JitArm64::cntlzwx(UGeckoInstruction inst)
{
INSTRUCTION_START
JITDISABLE(bJITIntegerOff);
int a = inst.RA;
int s = inst.RS;
gpr.BindToRegister(a, a == s);
if (gpr.IsImm(s))
{
gpr.SetImmediate(a, __builtin_clz(gpr.GetImm(s)));
if (inst.Rc)
ComputeRC(gpr.GetImm(a), 0);
}
else
{
CLZ(gpr.R(a), gpr.R(s));
if (inst.Rc)
ComputeRC(gpr.R(a), 0);
}
}
void JitArm64::negx(UGeckoInstruction inst)
{
INSTRUCTION_START
JITDISABLE(bJITIntegerOff);
int a = inst.RA;
int d = inst.RD;
FALLBACK_IF(inst.OE);
gpr.BindToRegister(d, d == a);
if (gpr.IsImm(a))
{
gpr.SetImmediate(d, ~((u32)gpr.GetImm(a)) + 1);
if (inst.Rc)
ComputeRC(gpr.GetImm(d), 0);
}
else
{
SUB(gpr.R(d), WSP, gpr.R(a), ArithOption(gpr.R(a), ST_LSL, 0));
if (inst.Rc)
ComputeRC(gpr.R(d), 0);
}
}
void JitArm64::cmp(UGeckoInstruction inst)
{
INSTRUCTION_START
JITDISABLE(bJITIntegerOff);
int crf = inst.CRFD;
u32 a = inst.RA, b = inst.RB;
if (gpr.IsImm(a) && gpr.IsImm(b))
{
ComputeRC((s32)gpr.GetImm(a) - (s32)gpr.GetImm(b), crf);
return;
}
ARM64Reg WA = gpr.GetReg();
ARM64Reg WB = gpr.GetReg();
ARM64Reg XA = EncodeRegTo64(WA);
ARM64Reg XB = EncodeRegTo64(WB);
ARM64Reg RA = gpr.R(a);
ARM64Reg RB = gpr.R(b);
SXTW(XA, RA);
SXTW(XB, RB);
SUB(XA, XA, XB);
STR(INDEX_UNSIGNED, XA, X29, PPCSTATE_OFF(cr_val[0]) + (sizeof(PowerPC::ppcState.cr_val[0]) * crf));
gpr.Unlock(WA, WB);
}
void JitArm64::cmpl(UGeckoInstruction inst)
{
INSTRUCTION_START
JITDISABLE(bJITIntegerOff);
int crf = inst.CRFD;
u32 a = inst.RA, b = inst.RB;
if (gpr.IsImm(a) && gpr.IsImm(b))
{
ComputeRC(gpr.GetImm(a) - gpr.GetImm(b), crf);
return;
}
else if (gpr.IsImm(b) && !gpr.GetImm(b))
{
ComputeRC(gpr.R(a), crf);
return;
}
ARM64Reg WA = gpr.GetReg();
ARM64Reg XA = EncodeRegTo64(WA);
SUB(XA, EncodeRegTo64(gpr.R(a)), EncodeRegTo64(gpr.R(b)));
STR(INDEX_UNSIGNED, XA, X29, PPCSTATE_OFF(cr_val[0]) + (sizeof(PowerPC::ppcState.cr_val[0]) * crf));
gpr.Unlock(WA);
}
void JitArm64::cmpi(UGeckoInstruction inst)
{
INSTRUCTION_START
JITDISABLE(bJITIntegerOff);
u32 a = inst.RA;
int crf = inst.CRFD;
if (gpr.IsImm(a))
{
ComputeRC((s32)gpr.GetImm(a) - inst.SIMM_16, crf);
return;
}
ARM64Reg WA = gpr.GetReg();
if (inst.SIMM_16 >= 0 && inst.SIMM_16 < 4096)
{
SUB(WA, gpr.R(a), inst.SIMM_16);
}
else
{
MOVI2R(WA, inst.SIMM_16);
SUB(WA, gpr.R(a), WA);
}
ComputeRC(WA, crf);
gpr.Unlock(WA);
}
void JitArm64::cmpli(UGeckoInstruction inst)
{
INSTRUCTION_START
JITDISABLE(bJITIntegerOff);
u32 a = inst.RA;
int crf = inst.CRFD;
if (gpr.IsImm(a))
{
ComputeRC(gpr.GetImm(a) - inst.UIMM, crf);
return;
}
if (!inst.UIMM)
{
ComputeRC(gpr.R(a), crf);
return;
}
ARM64Reg WA = gpr.GetReg();
ARM64Reg XA = EncodeRegTo64(WA);
if (inst.UIMM < 4096)
{
SUB(XA, EncodeRegTo64(gpr.R(a)), inst.UIMM);
}
else
{
MOVI2R(WA, inst.UIMM);
SUB(XA, EncodeRegTo64(gpr.R(a)), XA);
}
STR(INDEX_UNSIGNED, XA, X29, PPCSTATE_OFF(cr_val[0]) + (sizeof(PowerPC::ppcState.cr_val[0]) * crf));
gpr.Unlock(WA);
}
void JitArm64::rlwinmx(UGeckoInstruction inst)
{
INSTRUCTION_START
JITDISABLE(bJITIntegerOff);
u32 mask = Helper_Mask(inst.MB,inst.ME);
if (gpr.IsImm(inst.RS))
{
gpr.SetImmediate(inst.RA, _rotl(gpr.GetImm(inst.RS), inst.SH) & mask);
if (inst.Rc)
ComputeRC(gpr.GetImm(inst.RA), 0);
return;
}
gpr.BindToRegister(inst.RA, inst.RA == inst.RS);
ARM64Reg WA = gpr.GetReg();
ArithOption Shift(gpr.R(inst.RS), ST_ROR, 32 - inst.SH);
MOVI2R(WA, mask);
AND(gpr.R(inst.RA), WA, gpr.R(inst.RS), Shift);
gpr.Unlock(WA);
if (inst.Rc)
ComputeRC(gpr.R(inst.RA), 0);
}
void JitArm64::srawix(UGeckoInstruction inst)
{
INSTRUCTION_START
JITDISABLE(bJITIntegerOff);
int a = inst.RA;
int s = inst.RS;
int amount = inst.SH;
if (gpr.IsImm(s))
{
s32 imm = (s32)gpr.GetImm(s);
gpr.SetImmediate(a, imm >> amount);
if (amount != 0 && (imm < 0) && (imm << (32 - amount)))
ComputeCarry(true);
else
ComputeCarry(false);
}
else if (amount != 0)
{
gpr.BindToRegister(a, a == s);<|fim▁hole|>
ORR(WA, WSP, RS, ArithOption(RS, ST_LSL, 32 - amount));
ORR(RA, WSP, RS, ArithOption(RS, ST_ASR, amount));
if (inst.Rc)
ComputeRC(RA, 0);
ANDS(WSP, WA, RA, ArithOption(RA, ST_LSL, 0));
CSINC(WA, WSP, WSP, CC_EQ);
STRB(INDEX_UNSIGNED, WA, X29, PPCSTATE_OFF(xer_ca));
gpr.Unlock(WA);
}
else
{
gpr.BindToRegister(a, a == s);
ARM64Reg RA = gpr.R(a);
ARM64Reg RS = gpr.R(s);
MOV(RA, RS);
STRB(INDEX_UNSIGNED, WSP, X29, PPCSTATE_OFF(xer_ca));
}
}
void JitArm64::addic(UGeckoInstruction inst)
{
INSTRUCTION_START
JITDISABLE(bJITIntegerOff);
int a = inst.RA, d = inst.RD;
bool rc = inst.OPCD == 13;
s32 simm = inst.SIMM_16;
u32 imm = (u32)simm;
if (gpr.IsImm(a))
{
u32 i = gpr.GetImm(a);
gpr.SetImmediate(d, i + imm);
bool has_carry = Interpreter::Helper_Carry(i, imm);
ComputeCarry(has_carry);
if (rc)
ComputeRC(gpr.GetImm(d), 0);
}
else
{
gpr.BindToRegister(d, d == a);
if (imm < 4096)
{
ADDS(gpr.R(d), gpr.R(a), imm);
}
else if (simm > -4096 && simm < 0)
{
SUBS(gpr.R(d), gpr.R(a), std::abs(simm));
}
else
{
ARM64Reg WA = gpr.GetReg();
MOVI2R(WA, imm);
ADDS(gpr.R(d), gpr.R(a), WA);
gpr.Unlock(WA);
}
ComputeCarry();
if (rc)
ComputeRC(gpr.R(d), 0);
}
}
void JitArm64::mulli(UGeckoInstruction inst)
{
INSTRUCTION_START
JITDISABLE(bJITIntegerOff);
FALLBACK_IF(inst.OE);
int a = inst.RA, d = inst.RD;
if (gpr.IsImm(a))
{
s32 i = (s32)gpr.GetImm(a);
gpr.SetImmediate(d, i * inst.SIMM_16);
}
else
{
gpr.BindToRegister(d, d == a);
ARM64Reg WA = gpr.GetReg();
MOVI2R(WA, (u32)(s32)inst.SIMM_16);
MUL(gpr.R(d), gpr.R(a), WA);
gpr.Unlock(WA);
}
}
void JitArm64::mullwx(UGeckoInstruction inst)
{
INSTRUCTION_START
JITDISABLE(bJITIntegerOff);
FALLBACK_IF(inst.OE);
int a = inst.RA, b = inst.RB, d = inst.RD;
if (gpr.IsImm(a) && gpr.IsImm(b))
{
s32 i = (s32)gpr.GetImm(a), j = (s32)gpr.GetImm(b);
gpr.SetImmediate(d, i * j);
if (inst.Rc)
ComputeRC(gpr.GetImm(d), 0);
}
else
{
gpr.BindToRegister(d, d == a || d == b);
MUL(gpr.R(d), gpr.R(a), gpr.R(b));
if (inst.Rc)
ComputeRC(gpr.R(d), 0);
}
}
void JitArm64::addzex(UGeckoInstruction inst)
{
INSTRUCTION_START
JITDISABLE(bJITIntegerOff);
FALLBACK_IF(inst.OE);
int a = inst.RA, d = inst.RD;
gpr.BindToRegister(d, d == a);
ARM64Reg WA = gpr.GetReg();
LDRB(INDEX_UNSIGNED, WA, X29, PPCSTATE_OFF(xer_ca));
CMP(WA, 0);
CSINC(gpr.R(d), gpr.R(a), gpr.R(a), CC_EQ);
CMP(gpr.R(d), 0);
gpr.Unlock(WA);
ComputeCarry();
if (inst.Rc)
ComputeRC(gpr.R(d), 0);
}
void JitArm64::subfx(UGeckoInstruction inst)
{
INSTRUCTION_START
JITDISABLE(bJITIntegerOff);
FALLBACK_IF(inst.OE);
int a = inst.RA, b = inst.RB, d = inst.RD;
if (gpr.IsImm(a) && gpr.IsImm(b))
{
u32 i = gpr.GetImm(a), j = gpr.GetImm(b);
gpr.SetImmediate(d, j - i);
if (inst.Rc)
ComputeRC(gpr.GetImm(d), 0);
}
else
{
SUB(gpr.R(d), gpr.R(b), gpr.R(a));
if (inst.Rc)
ComputeRC(gpr.R(d), 0);
}
}
void JitArm64::addcx(UGeckoInstruction inst)
{
INSTRUCTION_START
JITDISABLE(bJITIntegerOff);
FALLBACK_IF(inst.OE);
int a = inst.RA, b = inst.RB, d = inst.RD;
if (gpr.IsImm(a) && gpr.IsImm(b))
{
u32 i = gpr.GetImm(a), j = gpr.GetImm(b);
gpr.SetImmediate(d, i + j);
bool has_carry = Interpreter::Helper_Carry(i, j);
ComputeCarry(has_carry);
if (inst.Rc)
ComputeRC(gpr.GetImm(d), 0);
}
else
{
gpr.BindToRegister(d, d == a || d == b);
ADDS(gpr.R(d), gpr.R(a), gpr.R(b));
ComputeCarry();
if (inst.Rc)
ComputeRC(gpr.R(d), 0);
}
}<|fim▁end|> | ARM64Reg RA = gpr.R(a);
ARM64Reg RS = gpr.R(s);
ARM64Reg WA = gpr.GetReg(); |
<|file_name|>purchase-modal.service.ts<|end_file_name|><|fim▁begin|>import { defineAsyncComponent } from 'vue';
import { showModal } from '../../../modal/modal.service';
import { User } from '../../../user/user.model';
import { GameBuild } from '../../build/build.model';
import { Game } from '../../game.model';
import { GamePackage } from '../package.model';
interface GamePackagePurchaseModalOptions {
game: Game;
package: GamePackage;
build: GameBuild | null;
fromExtraSection: boolean;
partnerKey?: string;<|fim▁hole|>export class GamePackagePurchaseModal {
static async show(options: GamePackagePurchaseModalOptions) {
return await showModal<void>({
modalId: 'GamePackagePurchase',
component: defineAsyncComponent(() => import('./purchase-modal.vue')),
size: 'sm',
props: options,
});
}
}<|fim▁end|> | partner?: User;
}
|
<|file_name|>openvpn.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# openvpn.py: library to handle starting and stopping openvpn instances
import subprocess
import threading
import time
class OpenVPN():
def __init__(self, config_file=None, auth_file=None, timeout=10):
self.started = False
self.stopped = False
self.error = False
self.notifications = ""
self.auth_file = auth_file
self.config_file = config_file
self.thread = threading.Thread(target=self._invoke_openvpn)
self.thread.setDaemon(1)
self.timeout = timeout
def _invoke_openvpn(self):
if self.auth_file is None:
cmd = ['sudo', 'openvpn', '--script-security', '2',
'--config', self.config_file]
else:
cmd = ['sudo', 'openvpn', '--script-security', '2',
'--config', self.config_file,
'--auth-user-pass', self.auth_file]
self.process = subprocess.Popen(cmd,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
self.kill_switch = self.process.terminate
self.starting = True
while True:
line = self.process.stdout.readline().strip()
if not line:
break
self.output_callback(line, self.process.terminate)
def output_callback(self, line, kill_switch):
"""Set status of openvpn according to what we process"""
self.notifications += line + "\n"
if "Initialization Sequence Completed" in line:
self.started = True
if "ERROR:" in line:
self.error = True
if "process exiting" in line:
self.stopped = True
def start(self, timeout=None):
"""Start openvpn and block until the connection is opened or there is
an error
"""
if not timeout:
timeout = self.timeout
self.thread.start()
start_time = time.time()<|fim▁hole|> break
if self.started:
print "openvpn started"
else:
print "openvpn not started"
print self.notifications
def stop(self, timeout=None):
"""Stop openvpn"""
if not timeout:
timeout = self.timeout
self.kill_switch()
self.thread.join(timeout)
if self.stopped:
print "stopped"
else:
print "not stopped"
print self.notifications<|fim▁end|> | while start_time + timeout > time.time():
self.thread.join(1)
if self.error or self.started: |
<|file_name|>test_disambiguate.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals, print_function, absolute_import
import unittest
from ..morf import analyze, disambiguate
# EINO SANTANEN. Muodon vanhimmat
# http://luulet6lgendus.blogspot.com/
sentences = '''KÕIGE VANEM MUDEL
Pimedas luusivad robotid,
originaalsed tšehhi robotid kahekümnendatest.
Robota! kisendavad nad, uhked originaalsed robotid,
hüüdes iseenda nime.
Robota! möirgavad nad, naftasegused elukad,
hiiglase vaimusünnitised, robotid:
kurvameelsetena kauguses,
ebamäärastena kauguses,
mattudes vastuoludesse,
muutudes peaaegu julmaks oma õiglusejanus.
Robota! Kui päike pageb monoliitide kohalt,
tähistavad nad vägisi
öö salajast geomeetriat.
Õudne on inimesel vaadata
neid metsikuid mudeleid.
Kuuntele, romantiikkaa, 2002'''.split('\n')
class TestDisambiguator(unittest.TestCase):
"""Test the separate disambiguate function
against the built in disambiguate=True function.
Both must work the same."""
def test_disambiguator(self):<|fim▁hole|> an_without = analyze(sentence, disambiguate=False)
disamb = disambiguate(an_without)
self.assertListEqual(an_with, disamb)<|fim▁end|> | for sentence in sentences:
an_with = analyze(sentence) |
<|file_name|>fmbt.py<|end_file_name|><|fim▁begin|># fMBT, free Model Based Testing tool
# Copyright (c) 2012 Intel Corporation.
#
# This program is free software; you can redistribute it and/or modify it
# under the terms and conditions of the GNU Lesser General Public License,
# version 2.1, as published by the Free Software Foundation.
#
# This program is distributed in the hope it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for
# more details.
#
# You should have received a copy of the GNU Lesser General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA.
# Import this to test step implementations written in Python
# in order to enable logging.
# fmbtlog writes given message to the fmbt log (XML)
# messages can be viewed using format $al of
# fmbt-log -f '$al' logfile
#
# adapterlog writes given message to the adapter log (plain text)
# written by remote_python or remote_pyaal, for instance.
# Log function implementations are provided by the adapter
# component such as remote_python or remote_pyaal.
import datetime
import inspect
import os
import sys
import time
import urllib
_g_fmbt_adapterlogtimeformat="%s.%f"
_g_actionName = "undefined"
_g_testStep = -1
_g_simulated_actions = []
def _fmbt_call_helper(func,param = ""):
if simulated():
return ""
sys.stdout.write("fmbt_call %s.%s\n" % (func,param))
sys.stdout.flush()
response = sys.stdin.readline().rstrip()
magic,code = response.split(" ")
if magic == "fmbt_call":
if code[0] == "1":
return urllib.unquote(code[1:])
return ""
def formatTime(timeformat="%s", timestamp=None):
if timestamp == None:
timestamp = datetime.datetime.now()
# strftime on Windows does not support conversion to epoch (%s).
# Calculate it here, if needed.
if os.name == "nt":
if "%s" in timeformat:
epoch_time = time.mktime(timestamp.timetuple())
timeformat = timeformat.replace("%s", str(int(epoch_time)))
if "%F" in timeformat:
timeformat = timeformat.replace("%F", "%Y-%m-%d")
if "%T" in timeformat:
timeformat = timeformat.replace("%T", "%H:%M:%S")
return timestamp.strftime(timeformat)
def heuristic():
return _fmbt_call_helper("heuristic.get")
def setHeuristic(heuristic):
return _fmbt_call_helper("heuristic.set",heuristic)
def coverage():
return _fmbt_call_helper("coverage.get")
def setCoverage(coverage):
return _fmbt_call_helper("coverage.set",coverage)
def coverageValue():
return _fmbt_call_helper("coverage.getValue")
def fmbtlog(msg, flush=True):
try: file("/tmp/fmbt.fmbtlog", "a").write("%s\n" % (msg,))
except: pass
def adapterlog(msg, flush=True):
try:
_adapterlogWriter(file("/tmp/fmbt.adapterlog", "a"),
formatAdapterLogMessage(msg,))
except: pass
def setAdapterLogWriter(func):
"""
Override low-level adapter log writer with the given function. The
function should take two parameters: a file-like object and a log
message. The message is formatted and ready to be written to the
file. The default is
lambda fileObj, formattedMsg: fileObj.write(formattedMsg)
"""
global _adapterlogWriter
_adapterlogWriter = func
def adapterLogWriter():
"""
Return current low-level adapter log writer function.
"""
global _adapterlogWriter
return _adapterlogWriter
def reportOutput(msg):
try: file("/tmp/fmbt.reportOutput", "a").write("%s\n" % (msg,))
except: pass
def setAdapterLogTimeFormat(strftime_format):
"""
Use given time format string in timestamping adapterlog messages
"""
global _g_fmbt_adapterlogtimeformat
_g_fmbt_adapterlogtimeformat = strftime_format
def formatAdapterLogMessage(msg, fmt="%s %s\n"):
"""
Return timestamped adapter log message
"""
return fmt % (formatTime(_g_fmbt_adapterlogtimeformat), msg)
def getActionName():
"""deprecated, use actionName()"""
return _g_actionName
def actionName():
"""
Return the name of currently executed action (input or output).
"""
return _g_actionName
def getTestStep():
"""deprecated, use testStep()"""
return _g_testStep
def testStep():
"""
Return the number of currently executed test step.
"""
return _g_testStep
def simulated():
"""
Returns True if fMBT is simulating execution of an action (guard
or body block) instead of really executing it.
"""
return len(_g_simulated_actions) > 0
def _adapterlogWriter(fileObj, formattedMsg):
fileObj.write(formattedMsg)
def funcSpec(func):
"""
Return function name and args as they could have been defined
based on function object.
"""
argspec = inspect.getargspec(func)
if argspec.defaults:
kwarg_count = len(argspec.defaults)
else:
kwarg_count = 0
arg_count = len(argspec.args) - kwarg_count
arglist = [str(arg) for arg in argspec.args[:arg_count]]
kwargs = argspec.args[arg_count:]
for index, kwarg in enumerate(kwargs):
arglist.append("%s=%s" % (kwarg, repr(argspec.defaults[index])))
if argspec.varargs:
arglist.append("*%s" % (argspec.varargs,))
if argspec.keywords:
arglist.append("**%s" % (argspec.keywords,))
try:
funcspec = "%s(%s)" % (func.func_name, ", ".join(arglist))
except:
funcspec = "%s(fmbt.funcSpec error)" % (func.func_name,)
return funcspec
_g_debug_socket = None
_g_debug_conn = None
def debug(session=0):
"""
Start debugging with fmbt-debug from the point where this function
was called. Execution will stop until connection to fmbt-debug
[session] has been established.
Parameters:
session (integer, optional):
debug session that identifies which fmbt-debug should
connect to this process. The default is 0.
Example:
- execute on command line "fmbt-debug 42"
- add fmbt.debug(42) in your Python code
- run the Python code so that it will call fmbt.debug(42)
- when done the debugging on the fmbt-debug prompt, enter "c"
for continue.
"""
import bdb
import inspect
import pdb
import socket
global _g_debug_conn, _g_debug_socket
if not _g_debug_socket:
PORTBASE = 0xf4bd # 62653, fMBD
host = "127.0.0.1" # accept local host only, by default
port = PORTBASE + session
_g_debug_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
_g_debug_socket.bind((host, port))
_g_debug_socket.listen(1)
while True:
(_g_debug_conn, addr) = _g_debug_socket.accept()
_g_debug_conn.sendall("fmbt.debug\n")
msg = _g_debug_conn.recv(len("fmbt-debug\n"))
if msg.startswith("fmbt-debug"):
break
_g_debug_conn.close()
except socket.error:
# already in use, perhaps fmbt-debug is already listening to
# the socket and waiting for this process to connect
try:
_g_debug_socket.connect((host, port))
_g_debug_conn = _g_debug_socket
whos_there = _g_debug_conn.recv(len("fmbt-debug\n"))
if not whos_there.startswith("fmbt-debug"):
_g_debug_conn.close()
_g_debug_socket = None
_g_debug_conn = None<|fim▁hole|> except socket.error:
raise ValueError('debugger cannot listen or connect to %s:%s' % (host, port))
if not _g_debug_conn:
fmbtlog("debugger waiting for connection at %s:%s" % (host, port))
# socket.makefile does not work due to buffering issues
# therefore, use our own socket-to-file converter
class SocketToFile(object):
def __init__(self, socket_conn):
self._conn = socket_conn
def read(self, bytes=-1):
msg = []
rv = ""
try:
c = self._conn.recv(1)
except KeyboardInterrupt:
self._conn.close()
raise
while c and not rv:
msg.append(c)
if c == "\r":
rv = "".join(msg)
elif c == "\n":
rv = "".join(msg)
elif len(msg) == bytes:
rv = "".join(msg)
else:
c = self._conn.recv(1)
return rv
def readline(self):
return self.read()
def write(self, msg):
self._conn.sendall(msg)
def flush(self):
pass
connfile = SocketToFile(_g_debug_conn)
debugger = pdb.Pdb(stdin=connfile, stdout=connfile)
debugger.set_trace(inspect.currentframe().f_back)<|fim▁end|> | raise ValueError(
'unexpected answer "%s", fmbt-debug expected' %
(whos_there.strip(),))
_g_debug_conn.sendall("fmbt.debug\n") |
<|file_name|>database_type.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
##############################################################################
#
# Infrastructure
# Copyright (C) 2014 Ingenieria ADHOC
# No email
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import re
from openerp import netsvc
from openerp.osv import osv, fields
class database_type(osv.osv):
""""""
_name = 'infrastructure.database_type'
_description = 'database_type'
_columns = {
'name': fields.char(string='Name', required=True),
'prefix': fields.char(string='Prefix', required=True, size=4),
'url_prefix': fields.char(string='URL Prefix'),
'automatic_drop': fields.boolean(string='Automatic Drop'),
'automatic_drop_days': fields.integer(string='Automatic Drop Days'),
'protect_db': fields.boolean(string='Protect DBs?'),
'color': fields.integer(string='Color'),
'automatic_deactivation': fields.boolean(string='Atumatic Deactivation?'),
'auto_deactivation_days': fields.integer(string='Automatic Drop Days'),
'url_example': fields.char(string='URL Example'),
'bd_name_example': fields.char(string='BD Name Example'),
'db_back_up_policy_ids': fields.many2many('infrastructure.db_back_up_policy', 'infrastructure_database_type_ids_db_back_up_policy_ids_rel', 'database_type_id', 'db_back_up_policy_id', string='Suggested Backup Policies'),
}
<|fim▁hole|>
_constraints = [
]
database_type()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:<|fim▁end|> | _defaults = {
} |
<|file_name|>test3D_old.py<|end_file_name|><|fim▁begin|>from rdkit import Chem
from rdkit import rdBase
from rdkit.Chem import rdMolDescriptors as rdMD
from rdkit.Chem import AllChem
from rdkit.Chem.EState import EStateIndices
from rdkit.Chem.EState import AtomTypes
import time
print rdBase.rdkitVersion
print rdBase.boostVersion
def getEState(mol):
return EStateIndices(mol)
def localopt(mol, steps = 500):
if mol.GetNumConformers() == 0:
mol=make3D(mol)
AllChem.MMFFOptimizeMolecule(mol, maxIters = steps)
return mol
def make3D(mol, steps = 50):
mol = Chem.AddHs(mol)
success = AllChem.EmbedMolecule(mol)
if success == -1: # Failed
success = AllChem.EmbedMolecule(mol, useRandomCoords = True)
if success == -1:
raise Error, "Embedding failed!"
mol = localopt(mol, steps)
return mol
def get3D(m,is3d):
if not is3d:
m = Chem.AddHs(m)
AllChem.EmbedMolecule(m)
AllChem.MMFFOptimizeMolecule(m)
r= rdMD.CalcAUTOCORR3D(m)+rdMD.CalcRDF(m)+rdMD.CalcMORSE(m)+rdMD.CalcWHIM(m)+rdMD.CalcGETAWAY(m)
return r
def generateALL():
m = Chem.MolFromSmiles('Cc1ccccc1')
thefile = open('testAC.txt', 'w')
filename="/Users/mbp/Github/rdkit_mine/Code/GraphMol/Descriptors/test_data/PBF_egfr.sdf"
suppl = Chem.SDMolSupplier(filename,removeHs=False)
mols = [x for x in suppl]
start = time.time()
for m in mols:
r= get3D(m,True)
for item in r:
thefile.write("%.3f," % item)
thefile.write("\n")
end = time.time()
print end - start
thefile = open('testSMWHIM.txt', 'w')
writer = Chem.SDWriter('3Dsmallmol.sdf')
A=['[H][H]','B','O=O','C','CC','CCC','CCCC','CCCCC','CCCCCC','CO','CCO','CCCO','CCCCO','CCCCCO','CCCCCCO','CCl','CCCl','CCCCl','CCCCCl','CCCCCCl','CCCCCCCl','CBr','CCBr','CCCBr','CCCCBr','CCCCCBr','CCCCCCBr','CI','CCI','CCCI','CCCCI','CCCCCI','CCCCCCI','CF','CCF','CCCF','CCCCF','CCCCCF','CCCCCCF','CS','CCS','CCCS','CCCCS','CCCCCS','CCCCCCS','CN','CCN','CCCN','CCCCN','CCCCCN','CCCCCCN']
for smi in A:
m = Chem.MolFromSmiles(smi)
m=localopt(m,100)
#r=get3D(m,True)
print smi
print "---------"<|fim▁hole|> print "Gi:"+str(r[5])+ "," + str(r[6]) + "," + str(r[7])+ "\n"
print "SI:"+str(rdMD.CalcSpherocityIndex(m))
print "AS:"+str(rdMD.CalcAsphericity(m))
print "EX:"+str(rdMD.CalcEccentricity(m))
for item in r:
thefile.write("%.3f," % item)
thefile.write("\n")
#m.SetProp("smi", smi)
#writer.write(m)
thefile = open('testBPA.txt', 'w')
writer = Chem.SDWriter('3DBPAmol.sdf')
B=['CN(C)CC(Br)c1ccccc1','CN(C)CC(Br)c1ccc(F)cc1','CN(C)CC(Br)c1ccc(Cl)cc1','CN(C)CC(Br)c1ccc(Cl)cc1','CN(C)CC(Br)c1ccc(I)cc1','CN(C)CC(Br)c1ccc(C)cc1','CN(C)CC(Br)c1cccc(F)c1','CN(C)CC(Br)c1cccc(Cl)c1','CN(C)CC(Br)c1cccc(Br)c1','CN(C)CC(Br)c1cccc(I)c1','CN(C)CC(Br)c1cccc(C)c1','CN(C)CC(Br)c1ccc(F)c(Cl)c1','CN(C)CC(Br)c1ccc(F)c(Br)c1','CN(C)CC(Br)c1ccc(F)c(C)c1','CN(C)CC(Br)c1ccc(Cl)c(Cl)c1','CN(C)CC(Br)c1ccc(Cl)c(Br)c1','CN(C)CC(Br)c1ccc(Cl)c(C)c1','CN(C)CC(Br)c1ccc(Br)c(Cl)c1','CN(C)CC(Br)c1ccc(Br)c(Br)c1','CN(C)CC(Br)c1ccc(Br)c(C)c1','CN(C)CC(Br)c1ccc(C)c(C)c1','CN(C)CC(Br)c1ccc(C)c(Br)c1']
for smi in B:
m = Chem.MolFromSmiles(smi)
m=localopt(m,100)
#r=get3D(m,True)
r=rdMD.CalcWHIM(m)
for item in r:
thefile.write("%.3f," % item)
thefile.write("\n")
#m.SetProp("smi", smi)
#writer.write(m)
A="G1w,G2w,G3w,Gw"
print dir(rdMD)<|fim▁end|> | r=rdMD.CalcWHIM(m)
print "Ei:"+str(r[0])+ "," + str(r[1]) + "," + str(r[2])+ "\n" |
<|file_name|>JavaHTMLTableCaptionElement.cpp<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2013, 2018, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
#undef IMPL
#include "config.h"
#include <WebCore/HTMLNames.h>
#include <WebCore/HTMLTableCaptionElement.h>
#include <WebCore/JSMainThreadExecState.h>
#include <wtf/RefPtr.h>
#include <wtf/GetPtr.h>
#include "JavaDOMUtils.h"
#include <wtf/java/JavaEnv.h>
using namespace WebCore;
extern "C" {
#define IMPL (static_cast<HTMLTableCaptionElement*>(jlong_to_ptr(peer)))
// Attributes
JNIEXPORT jstring JNICALL Java_com_sun_webkit_dom_HTMLTableCaptionElementImpl_getAlignImpl(JNIEnv* env, jclass, jlong peer)
{
WebCore::JSMainThreadNullState state;
return JavaReturn<String>(env, IMPL->getAttribute(WebCore::HTMLNames::alignAttr));
}
JNIEXPORT void JNICALL Java_com_sun_webkit_dom_HTMLTableCaptionElementImpl_setAlignImpl(JNIEnv* env, jclass, jlong peer, jstring value)
{
WebCore::JSMainThreadNullState state;
IMPL->setAttributeWithoutSynchronization(WebCore::HTMLNames::alignAttr, String(env, value));
}
<|fim▁hole|><|fim▁end|> | } |
<|file_name|>config.py<|end_file_name|><|fim▁begin|>import os
basedir = os.path.abspath(os.path.dirname(__file__))
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(basedir, 'app.db')
CSRF_ENABLED = True<|fim▁hole|>SECRET_KEY = 'f35fc593c73a35956a70d0a7eeac9bdb'<|fim▁end|> | |
<|file_name|>JobComparator.java<|end_file_name|><|fim▁begin|>package it.unimarconi.utils;
import it.unimarconi.beans.Job;
import java.util.Comparator;
public class JobComparator implements Comparator<Job> {
<|fim▁hole|>
}<|fim▁end|> | public int compare(Job a, Job b) {
return a.compareTo(b);
} |
<|file_name|>HeadersTest.java<|end_file_name|><|fim▁begin|>package com.github.wovnio.wovnjava;
import java.util.HashMap;
import javax.servlet.FilterConfig;
import javax.servlet.http.HttpServletRequest;
import org.easymock.EasyMock;
import java.net.URL;
import java.net.MalformedURLException;
import junit.framework.TestCase;
public class HeadersTest extends TestCase {
private Lang japanese;
protected void setUp() throws Exception {
this.japanese = Lang.get("ja");
}
private static FilterConfig mockConfigPath() {
HashMap<String, String> parameters = new HashMap<String, String>() {{
put("urlPattern", "path");
}};
return TestUtil.makeConfigWithValidDefaults(parameters);
}
private static FilterConfig mockConfigSubdomain() {
HashMap<String, String> parameters = new HashMap<String, String>() {{
put("urlPattern", "subdomain");
}};
return TestUtil.makeConfigWithValidDefaults(parameters);
}
private static FilterConfig mockConfigQuery() {
HashMap<String, String> parameters = new HashMap<String, String>() {{
put("urlPattern", "query");
put("defaultLang", "en");
put("supportedLangs", "en,ja,zh-CHS");
}};
return TestUtil.makeConfigWithValidDefaults(parameters);
}
public void testHeaders() throws ConfigurationError {
HttpServletRequest mockRequest = MockHttpServletRequest.create("https://example.com/ja/test");
FilterConfig mockConfig = mockConfigPath();
Settings s = new Settings(mockConfig);
UrlLanguagePatternHandler ulph = UrlLanguagePatternHandlerFactory.create(s);
Headers h = new Headers(mockRequest, s, ulph);
assertNotNull(h);
}
public void testGetRequestLangPath() throws ConfigurationError {
HttpServletRequest mockRequest = MockHttpServletRequest.create("https://example.com/ja/test");
FilterConfig mockConfig = mockConfigPath();
Settings s = new Settings(mockConfig);
UrlLanguagePatternHandler ulph = UrlLanguagePatternHandlerFactory.create(s);
Headers h = new Headers(mockRequest, s, ulph);
assertEquals(this.japanese, h.getRequestLang());
}
public void testGetRequestLangSubdomain() throws ConfigurationError {
HttpServletRequest mockRequest = MockHttpServletRequest.create("https://ja.example.com/test");
FilterConfig mockConfig = mockConfigSubdomain();
Settings s = new Settings(mockConfig);
UrlLanguagePatternHandler ulph = UrlLanguagePatternHandlerFactory.create(s);
Headers h = new Headers(mockRequest, s, ulph);
assertEquals(this.japanese, h.getRequestLang());
}
public void testGetRequestLangQuery() throws ConfigurationError {
HttpServletRequest mockRequest = MockHttpServletRequest.create("https://example.com/test?wovn=ja");
FilterConfig mockConfig = mockConfigQuery();
Settings s = new Settings(mockConfig);
UrlLanguagePatternHandler ulph = UrlLanguagePatternHandlerFactory.create(s);
Headers h = new Headers(mockRequest, s, ulph);
assertEquals(this.japanese, h.getRequestLang());
}
public void testConvertToDefaultLanguage__PathPattern() throws ConfigurationError, MalformedURLException {
HttpServletRequest mockRequest = MockHttpServletRequest.create("https://example.com/ja/test");
FilterConfig mockConfig = mockConfigPath();
Settings s = new Settings(mockConfig);
UrlLanguagePatternHandler ulph = UrlLanguagePatternHandlerFactory.create(s);
Headers h = new Headers(mockRequest, s, ulph);
URL url = new URL("http://example.com/ja/test");
assertEquals("http://example.com/test", h.convertToDefaultLanguage(url).toString());
}
public void testConvertToDefaultLanguage__SubdomainPattern() throws ConfigurationError, MalformedURLException {<|fim▁hole|>
Settings s = new Settings(mockConfig);
UrlLanguagePatternHandler ulph = UrlLanguagePatternHandlerFactory.create(s);
Headers h = new Headers(mockRequest, s, ulph);
URL url = new URL("http://ja.example.com/test");
assertEquals("http://example.com/test", h.convertToDefaultLanguage(url).toString());
}
public void testConvertToDefaultLanguage__QueryPattern() throws ConfigurationError, MalformedURLException {
HttpServletRequest mockRequest = MockHttpServletRequest.create("https://example.com/test?wovn=ja");
FilterConfig mockConfig = mockConfigQuery();
Settings s = new Settings(mockConfig);
UrlLanguagePatternHandler ulph = UrlLanguagePatternHandlerFactory.create(s);
Headers h = new Headers(mockRequest, s, ulph);
URL url = new URL("http://example.com/test?wovn=ja");
assertEquals("http://example.com/test", h.convertToDefaultLanguage(url).toString());
}
public void testConvertToDefaultLanguage__PathPatternWithSitePrefixPath() throws ConfigurationError, MalformedURLException {
Headers h = createHeaders("/global/en/foo", "/global/", "");
URL url;
url = new URL("http://site.com/global/en/");
assertEquals("http://site.com/global/", h.convertToDefaultLanguage(url).toString());
url = new URL("http://site.com/en/global/");
assertEquals("http://site.com/en/global/", h.convertToDefaultLanguage(url).toString());
}
public void testLocationWithDefaultLangCode() throws ConfigurationError {
HttpServletRequest mockRequest = MockHttpServletRequest.create("https://example.com/signin");
FilterConfig mockConfig = mockConfigPath();
Settings s = new Settings(mockConfig);
UrlLanguagePatternHandler ulph = UrlLanguagePatternHandlerFactory.create(s);
Headers h = new Headers(mockRequest, s, ulph);
assertEquals("http://example.com/", h.locationWithLangCode("http://example.com/"));
assertEquals("https://example.com/", h.locationWithLangCode("https://example.com/"));
assertEquals("https://example.com/dir/file", h.locationWithLangCode("https://example.com/dir/file"));
}
public void testLocationWithPath() throws ConfigurationError {
HttpServletRequest mockRequest = MockHttpServletRequest.create("https://example.com/ja/dir/signin");
FilterConfig mockConfig = mockConfigPath();
Settings s = new Settings(mockConfig);
UrlLanguagePatternHandler ulph = UrlLanguagePatternHandlerFactory.create(s);
Headers h = new Headers(mockRequest, s, ulph);
assertEquals("http://example.com/ja/", h.locationWithLangCode("http://example.com/"));
assertEquals("https://example.com/ja/", h.locationWithLangCode("https://example.com/"));
assertEquals("https://example.com/ja/dir/file", h.locationWithLangCode("https://example.com/dir/file"));
assertEquals("https://other.com/dir/file", h.locationWithLangCode("https://other.com/dir/file"));
assertEquals("https://example.com/ja/", h.locationWithLangCode("/"));
assertEquals("https://example.com/ja/dir/file", h.locationWithLangCode("/dir/file"));
assertEquals("https://example.com/ja/dir/file", h.locationWithLangCode("./file"));
assertEquals("https://example.com/ja/file", h.locationWithLangCode("../file"));
assertEquals("https://example.com/ja/file", h.locationWithLangCode("../../file"));
}
public void testLocationWithPathAndTrailingSlash() throws ConfigurationError {
HttpServletRequest mockRequest = MockHttpServletRequest.create("https://example.com/ja/dir/signin/");
FilterConfig mockConfig = mockConfigPath();
Settings s = new Settings(mockConfig);
UrlLanguagePatternHandler ulph = UrlLanguagePatternHandlerFactory.create(s);
Headers h = new Headers(mockRequest, s, ulph);
assertEquals("https://example.com/ja/dir/signin/file", h.locationWithLangCode("./file"));
assertEquals("https://example.com/ja/dir/file", h.locationWithLangCode("../file"));
assertEquals("https://example.com/ja/file", h.locationWithLangCode("../../file"));
assertEquals("https://example.com/ja/file", h.locationWithLangCode("../../../file"));
}
public void testLocationWithPathAndTopLevel() throws ConfigurationError {
HttpServletRequest mockRequest = MockHttpServletRequest.create("https://example.com/location.jsp?wovn=ja");
FilterConfig mockConfig = mockConfigQuery();
Settings s = new Settings(mockConfig);
UrlLanguagePatternHandler ulph = UrlLanguagePatternHandlerFactory.create(s);
Headers h = new Headers(mockRequest, s, ulph);
assertEquals("https://example.com/index.jsp?wovn=ja", h.locationWithLangCode("./index.jsp"));
}
public void testLocationWithQuery() throws ConfigurationError {
HttpServletRequest mockRequest = MockHttpServletRequest.create("https://example.com/dir/signin?wovn=ja");
FilterConfig mockConfig = mockConfigQuery();
Settings s = new Settings(mockConfig);
UrlLanguagePatternHandler ulph = UrlLanguagePatternHandlerFactory.create(s);
Headers h = new Headers(mockRequest, s, ulph);
assertEquals("http://example.com/?wovn=ja", h.locationWithLangCode("http://example.com/"));
assertEquals("https://example.com/?wovn=ja", h.locationWithLangCode("https://example.com/"));
assertEquals("https://example.com/dir/file?wovn=ja", h.locationWithLangCode("https://example.com/dir/file"));
assertEquals("https://other.com/dir/file", h.locationWithLangCode("https://other.com/dir/file"));
assertEquals("https://example.com/?wovn=ja", h.locationWithLangCode("/"));
assertEquals("https://example.com/dir/file?wovn=ja", h.locationWithLangCode("/dir/file"));
assertEquals("https://example.com/dir/file?wovn=ja", h.locationWithLangCode("./file"));
assertEquals("https://example.com/file?wovn=ja", h.locationWithLangCode("../file"));
assertEquals("https://example.com/file?wovn=ja", h.locationWithLangCode("../../file"));
assertEquals("https://example.com/file?q=hello&wovn=ja", h.locationWithLangCode("../../file?q=hello&wovn=zh-CHS"));
assertEquals("https://example.com/file?wovn=ja", h.locationWithLangCode("../../file?wovn=zh-CHS"));
}
public void testLocationWithSubdomain() throws ConfigurationError {
HttpServletRequest mockRequest = MockHttpServletRequest.create("https://ja.example.com/dir/signin");
FilterConfig mockConfig = mockConfigSubdomain();
Settings s = new Settings(mockConfig);
UrlLanguagePatternHandler ulph = UrlLanguagePatternHandlerFactory.create(s);
Headers h = new Headers(mockRequest, s, ulph);
assertEquals("http://ja.example.com/", h.locationWithLangCode("http://example.com/"));
assertEquals("https://ja.example.com/", h.locationWithLangCode("https://example.com/"));
assertEquals("https://ja.example.com/dir/file", h.locationWithLangCode("https://example.com/dir/file"));
assertEquals("https://other.com/dir/file", h.locationWithLangCode("https://other.com/dir/file"));
assertEquals("https://fr.example.com/dir/file", h.locationWithLangCode("https://fr.example.com/dir/file"));
assertEquals("https://ja.example.com/", h.locationWithLangCode("/"));
assertEquals("https://ja.example.com/dir/file", h.locationWithLangCode("/dir/file"));
assertEquals("https://ja.example.com/dir/file", h.locationWithLangCode("./file"));
assertEquals("https://ja.example.com/file", h.locationWithLangCode("../file"));
assertEquals("https://ja.example.com/file", h.locationWithLangCode("../../file"));
}
public void testLocationWithSitePrefixPath() throws ConfigurationError {
Headers h = createHeaders("/global/ja/foo", "/global/", "");
assertEquals("http://example.com/", h.locationWithLangCode("http://example.com/"));
assertEquals("http://example.com/global/ja/", h.locationWithLangCode("http://example.com/global/"));
assertEquals("https://example.com/global/ja/", h.locationWithLangCode("https://example.com/global/"));
assertEquals("https://example.com/global/ja/", h.locationWithLangCode("https://example.com/global/ja/"));
assertEquals("https://example.com/global/ja/th/", h.locationWithLangCode("https://example.com/global/th/")); // `th` not in supportedLangs
assertEquals("https://example.com/global/ja/tokyo/", h.locationWithLangCode("https://example.com/global/tokyo/"));
assertEquals("https://example.com/global/ja/file.html", h.locationWithLangCode("https://example.com/global/file.html"));
assertEquals("https://example.com/global/ja/file.html", h.locationWithLangCode("https://example.com/pics/../global/file.html"));
assertEquals("https://example.com/global/../../file.html", h.locationWithLangCode("https://example.com/global/../../file.html"));
assertEquals("https://example.com/tokyo/", h.locationWithLangCode("https://example.com/tokyo/"));
assertEquals("https://example.com/tokyo/global/", h.locationWithLangCode("https://example.com/tokyo/global/"));
assertEquals("https://example.com/ja/global/", h.locationWithLangCode("https://example.com/ja/global/"));
assertEquals("https://example.com/th/global/", h.locationWithLangCode("https://example.com/th/global/"));
assertEquals("https://example.com/th/", h.locationWithLangCode("https://example.com/th/"));
}
public void testGetIsValidRequest() throws ConfigurationError {
Headers h;
h = createHeaders("/", "global", "");
assertEquals(false, h.getIsValidRequest());
h = createHeaders("/global", "global", "");
assertEquals(true, h.getIsValidRequest());
h = createHeaders("/global/ja/foo", "global", "");
assertEquals(true, h.getIsValidRequest());
h = createHeaders("/ja/global/foo", "global", "");
assertEquals(false, h.getIsValidRequest());
}
public void testGetIsValidRequest__withIgnoredPaths() throws ConfigurationError {
Headers h;
h = createHeaders("/", "", "/admin,/wp-admin");
assertEquals(true, h.getIsValidRequest());
h = createHeaders("/user/admin", "", "/admin,/wp-admin");
assertEquals(true, h.getIsValidRequest());
h = createHeaders("/adminpage", "", "/admin,/wp-admin");
assertEquals(true, h.getIsValidRequest());
h = createHeaders("/admin", "", "/admin,/wp-admin");
assertEquals(false, h.getIsValidRequest());
h = createHeaders("/wp-admin/", "", "/admin,/wp-admin");
assertEquals(false, h.getIsValidRequest());
h = createHeaders("/wp-admin/page", "", "/admin,/wp-admin");
assertEquals(false, h.getIsValidRequest());
h = createHeaders("/ja/admin", "", "/admin,/wp-admin");
assertEquals(false, h.getIsValidRequest());
h = createHeaders("/ja/wp-admin/", "", "/admin,/wp-admin");
assertEquals(false, h.getIsValidRequest());
h = createHeaders("/en/admin", "", "/admin,/wp-admin");
assertEquals(false, h.getIsValidRequest());
h = createHeaders("/en/wp-admin/", "", "/admin,/wp-admin");
assertEquals(false, h.getIsValidRequest());
h = createHeaders("/city/wp-admin", "city", "/admin,/wp-admin");
assertEquals(true, h.getIsValidRequest());
h = createHeaders("/city/wp-admin", "city", "/city/admin,/city/wp-admin");
assertEquals(false, h.getIsValidRequest());
}
private Headers createHeaders(String requestPath, String sitePrefixPath, String ignorePaths) throws ConfigurationError {
HttpServletRequest mockRequest = MockHttpServletRequest.create("https://example.com" + requestPath);
HashMap<String, String> option = new HashMap<String, String>() {{
put("urlPattern", "path");
put("sitePrefixPath", sitePrefixPath);
put("ignorePaths", ignorePaths);
}};
Settings s = TestUtil.makeSettings(option);
UrlLanguagePatternHandler ulph = UrlLanguagePatternHandlerFactory.create(s);
return new Headers(mockRequest, s, ulph);
}
public void testGetHreflangUrlMap__PathPattern() throws ConfigurationError {
Settings settings = TestUtil.makeSettings(new HashMap<String, String>() {{
put("defaultLang", "en");
put("supportedLangs", "en,ja,fr");
put("urlPattern", "path");
put("sitePrefixPath", "/home");
}});
UrlLanguagePatternHandler patternHandler = UrlLanguagePatternHandlerFactory.create(settings);
HttpServletRequest request = MockHttpServletRequest.create("https://example.com/home?user=123");
Headers sut = new Headers(request, settings, patternHandler);
HashMap<String, String> hreflangs = sut.getHreflangUrlMap();
assertEquals(3, hreflangs.size());
assertEquals("https://example.com/home?user=123", hreflangs.get("en"));
assertEquals("https://example.com/home/ja?user=123", hreflangs.get("ja"));
assertEquals("https://example.com/home/fr?user=123", hreflangs.get("fr"));
}
public void testGetHreflangUrlMap__QueryPattern() throws ConfigurationError {
Settings settings = TestUtil.makeSettings(new HashMap<String, String>() {{
put("defaultLang", "ja");
put("supportedLangs", "ko");
put("urlPattern", "query");
}});
UrlLanguagePatternHandler patternHandler = UrlLanguagePatternHandlerFactory.create(settings);
HttpServletRequest request = MockHttpServletRequest.create("https://example.com/home?user=123");
Headers sut = new Headers(request, settings, patternHandler);
HashMap<String, String> hreflangs = sut.getHreflangUrlMap();
assertEquals(2, hreflangs.size());
assertEquals("https://example.com/home?user=123", hreflangs.get("ja"));
assertEquals("https://example.com/home?user=123&wovn=ko", hreflangs.get("ko"));
}
public void testGetHreflangUrlMap__SubdomainPattern__WithChineseSupportedLangs() throws ConfigurationError {
Settings settings = TestUtil.makeSettings(new HashMap<String, String>() {{
put("defaultLang", "ja");
put("supportedLangs", "ko,th, zh-CHT, zh-CHS");
put("urlPattern", "subdomain");
}});
UrlLanguagePatternHandler patternHandler = UrlLanguagePatternHandlerFactory.create(settings);
HttpServletRequest request = MockHttpServletRequest.create("https://example.com/home?user=123");
Headers sut = new Headers(request, settings, patternHandler);
HashMap<String, String> hreflangs = sut.getHreflangUrlMap();
assertEquals(5, hreflangs.size());
assertEquals("https://example.com/home?user=123", hreflangs.get("ja"));
assertEquals("https://ko.example.com/home?user=123", hreflangs.get("ko"));
assertEquals("https://th.example.com/home?user=123", hreflangs.get("th"));
assertEquals("https://zh-CHT.example.com/home?user=123", hreflangs.get("zh-Hant"));
assertEquals("https://zh-CHS.example.com/home?user=123", hreflangs.get("zh-Hans"));
}
public void testIsSearchEngineBot_NoUserAgent_False() throws ConfigurationError {
String userAgent = null;
Settings settings = TestUtil.makeSettings();
UrlLanguagePatternHandler patternHandler = UrlLanguagePatternHandlerFactory.create(settings);
HttpServletRequest request = MockHttpServletRequest.create("https://example.com/home?user=123", userAgent);
Headers sut = new Headers(request, settings, patternHandler);
assertEquals(false, sut.isSearchEngineBot());
}
public void testIsSearchEngineBot_OrdinaryUserAgent_False() throws ConfigurationError {
String userAgent = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/98.0.4758.81 Safari/537.36";
Settings settings = TestUtil.makeSettings();
UrlLanguagePatternHandler patternHandler = UrlLanguagePatternHandlerFactory.create(settings);
HttpServletRequest request = MockHttpServletRequest.create("https://example.com/home?user=123");
Headers sut = new Headers(request, settings, patternHandler);
assertEquals(false, sut.isSearchEngineBot());
}
public void testIsSearchEngineBot_SearchEngineBotUserAgent_True() throws ConfigurationError {
String userAgent = "Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)";
Settings settings = TestUtil.makeSettings();
UrlLanguagePatternHandler patternHandler = UrlLanguagePatternHandlerFactory.create(settings);
HttpServletRequest request = MockHttpServletRequest.create("https://example.com/home?user=123", userAgent);
Headers sut = new Headers(request, settings, patternHandler);
assertEquals(true, sut.isSearchEngineBot());
}
}<|fim▁end|> | HttpServletRequest mockRequest = MockHttpServletRequest.create("https://ja.example.com/test");
FilterConfig mockConfig = mockConfigSubdomain(); |
<|file_name|>common-crud.service.ts<|end_file_name|><|fim▁begin|>import {MdSnackBar} from '@angular/material';
export abstract class CommonCrudService {
constructor(private snackBar: MdSnackBar) {
}
// todo refactor crud operations to here (use metadata?)
onOperationPerformedNotify(opName: string): (res: boolean) => void {
return (res) => {<|fim▁hole|> text = `Successful ${opName}!`;
} else {
text = `Failed to ${opName}!`;
}
this.snackBar.open(text, null, {
duration: 1000
});
};
}
}<|fim▁end|> | let text;
if (res) { |
<|file_name|>managers.py<|end_file_name|><|fim▁begin|><|fim▁hole|>from belt.managers import SearchQuerySetMixin
class PostQuerySet(SearchQuerySetMixin, models.QuerySet):
pass
class CategoryQuerySet(SearchQuerySetMixin, models.QuerySet):
pass
class BlogQuerySet(SearchQuerySetMixin, models.QuerySet):
def annotate_total_posts(self):
return self.annotate(total_posts=Count("posts"))<|fim▁end|> | from django.db import models
from django.db.models import Count
|
<|file_name|>brush.d.ts<|end_file_name|><|fim▁begin|><|fim▁hole|>
export default function Brush(props: SvgIconProps): React.ReactElement<SvgIconProps>;<|fim▁end|> | import * as React from 'react';
import {SvgIconProps} from '../../SvgIcon'; |
<|file_name|>multiple_hosts.py<|end_file_name|><|fim▁begin|>import os
from locust import HttpUser, TaskSet, task, between
from locust.clients import HttpSession
class MultipleHostsUser(HttpUser):
abstract = True
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.api_client = HttpSession(base_url=os.environ["API_HOST"])
class UserTasks(TaskSet):
# but it might be convenient to use the @task decorator
@task
def index(self):
self.user.client.get("/")
@task
def index_other_host(self):<|fim▁hole|> """
User class that does requests to the locust web server running on localhost
"""
host = "http://127.0.0.1:8089"
wait_time = between(2, 5)
tasks = [UserTasks]<|fim▁end|> | self.user.api_client.get("/stats/requests")
class WebsiteUser(MultipleHostsUser): |
<|file_name|>event.py<|end_file_name|><|fim▁begin|>'''
Copyright 2015-2020 HENNGE K.K. (formerly known as HDE, Inc.)
Licensed under MIT.<|fim▁hole|>import json
def read_event(path):
with open(path) as event:
data = json.load(event)
return data<|fim▁end|> | '''
|
<|file_name|>functions.ts<|end_file_name|><|fim▁begin|>let nop = function() {};
// Function params
let oneParam = function(n: number) {};
function twoParams(b: boolean, s: string) {}
function withDefaultValue(list = []) {}
// Function returns
let anyReturn = function(): any {
return 'hello';
};
<|fim▁hole|>
// Both params and returns
let complex = function(b: boolean, s: string, x: any): string {
if (b) {
return s;
}
};
// Undefined params
let paramUndef = function(u: undefined, v: undefined) {};
// Void returns
let retVoid = function(): void {};
let retUndef = function(): void {};
const arrowWithJsDoc = (a: number): number => {
return a;
};
const arrowNoJsDoc = (a) => {
return a;
};
const implicitReturnArrow = (a) => a;<|fim▁end|> | function typedReturn(): number {
return 4;
}
let partiallyTyped = function(n: number, u1, b: boolean, u2) {}; |
<|file_name|>transform.rs<|end_file_name|><|fim▁begin|>extern crate rier;
extern crate cgmath;
use rier::transform::Transform;<|fim▁hole|>#[test]
fn new_transform() {
let trans = Transform::new();
assert!(trans.matrix == Matrix4::one());
}<|fim▁end|> | use cgmath::{Matrix4, One};
|
<|file_name|>BooleanNode.js<|end_file_name|><|fim▁begin|>'use strict';
var isA = require("Espresso/oop").isA;
var oop = require("Espresso/oop").oop;
var init = require("Espresso/oop").init;
var trim = require("Espresso/trim").trim;
var isA = require("Espresso/oop").isA;
var oop = require("Espresso/oop").oop;
var ScalarNode = require("Espresso/Config/Definition/ScalarNode");
var InvalidTypeException = require("Espresso/Config/Definition/Exception/InvalidTypeException");
var format = require("util").format;
function BooleanNode(name,parent){
init(this,ScalarNode,name,parent);
oop(this,"Espresso/Config/Definition/BooleanNode");
}
/**
* {@inheritdoc}
*/
function validateType($value)<|fim▁hole|> 'Invalid type for path "%s". Expected boolean, but got %s.',
this.getPath(),
getClass($value)
));
var $hint = this.getInfo();
if ($hint) {
$ex.addHint($hint);
}
$ex.setPath(this.getPath());
throw $ex;
}
}
/**
* {@inheritdoc}
*/
function isValueEmpty($value)
{
// a boolean value cannot be empty
return false;
}
BooleanNode.prototype = Object.create( ScalarNode.prototype );
BooleanNode.prototype.validateType = validateType;
BooleanNode.prototype.isValueEmpty = isValueEmpty;
module.exports = BooleanNode;<|fim▁end|> | {
if ( typeof $value !== 'boolean') {
var $ex = new InvalidTypeException(format( |
<|file_name|>try-catch-before-try.js<|end_file_name|><|fim▁begin|>// Copyright (C) 2015 the V8 project authors. All rights reserved.
// This code is governed by the BSD license found in the LICENSE file.
/*---
es6id: 25.3.1.3
description: >
When a generator is paused before a `try..catch` statement, `return` should
interrupt control flow as if a `return` statement had appeared at that
location in the function body.
---*/
function* g() {<|fim▁hole|> throw e;
}
$ERROR('This code is unreachable (following `try` statement)');
}
var iter = g();
var result;
iter.next();
result = iter.return(45);
assert.sameValue(result.value, 45, 'Result `value` following `return`');
assert.sameValue(result.done, true, 'Result `done` flag following `return`');
result = iter.next();
assert.sameValue(result.value,
undefined, 'Result `value` is undefined when complete'
);
assert.sameValue(
result.done, true, 'Result `done` flag is `true` when complete'
);<|fim▁end|> | yield;
try {
$ERROR('This code is unreachable (within `try` block)');
} catch (e) { |
<|file_name|>check_internal.rs<|end_file_name|><|fim▁begin|><|fim▁hole|> Copyright 2013 Jesse 'Jeaye' Wilkerson
See licensing in LICENSE file, or at:
http://www.opensource.org/licenses/BSD-3-Clause
File: client/gfx/check_internal.rs
Author: Jesse 'Jeaye' Wilkerson
Description:
Provides a handy macro to check the outcome
of an OpenGL call for errors -- use it everywhere.
*/
#[cfg(check_gl)]
#[macro_escape]
#[path = "../log/macros.rs"]
mod macros;
#[cfg(check_gl)]
pub fn check_gl(func: &str)
{
use gl2 = opengles::gl2;
use log::Log;
let err = gl2::get_error();
if err != gl2::NO_ERROR
{
log_error!(func);
log_fail!(util::get_err_str(err));
}
}
#[cfg(not(check_gl))]
pub fn check_gl(_func: &str)
{ }
macro_rules! check
(
($func:expr) =>
({
let ret = $func;
check::check_gl(stringify!($func));
ret
});
)
macro_rules! check_unsafe
(
($func:expr) =>
({
unsafe { check!($func) }
});
)<|fim▁end|> | /* |
<|file_name|>views.py<|end_file_name|><|fim▁begin|>import json
import os
import re
from django import http
from django.conf import settings
from django.db.transaction import non_atomic_requests
from django.http import HttpResponse, HttpResponseBadRequest
from django.shortcuts import render
from django.utils.encoding import iri_to_uri
from django.views.decorators.cache import never_cache
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.http import require_POST
import commonware.log
import waffle
from django_statsd.clients import statsd
from olympia import amo, api
from olympia.amo.utils import log_cef
from . import monitors
log = commonware.log.getLogger('z.amo')
monitor_log = commonware.log.getLogger('z.monitor')
jp_log = commonware.log.getLogger('z.jp.repack')
flash_re = re.compile(r'^(Win|(PPC|Intel) Mac OS X|Linux.+i\d86)|SunOs',
re.IGNORECASE)
quicktime_re = re.compile(
r'^(application/(sdp|x-(mpeg|rtsp|sdp))|audio/(3gpp(2)?|AMR|aiff|basic|'
r'mid(i)?|mp4|mpeg|vnd\.qcelp|wav|x-(aiff|m4(a|b|p)|midi|mpeg|wav))|'
r'image/(pict|png|tiff|x-(macpaint|pict|png|quicktime|sgi|targa|tiff))|'
r'video/(3gpp(2)?|flc|mp4|mpeg|quicktime|sd-video|x-mpeg))$')
java_re = re.compile(
r'^application/x-java-((applet|bean)(;jpi-version=1\.5|;'
r'version=(1\.(1(\.[1-3])?|(2|4)(\.[1-2])?|3(\.1)?|5)))?|vm)$')
wmp_re = re.compile(
r'^(application/(asx|x-(mplayer2|ms-wmp))|video/x-ms-(asf(-plugin)?|'
r'wm(p|v|x)?|wvx)|audio/x-ms-w(ax|ma))$')
<|fim▁hole|>
@never_cache
@non_atomic_requests
def monitor(request, format=None):
# For each check, a boolean pass/fail status to show in the template
status_summary = {}
results = {}
checks = ['memcache', 'libraries', 'elastic', 'path',
'redis']
for check in checks:
with statsd.timer('monitor.%s' % check) as timer:
status, result = getattr(monitors, check)()
# state is a string. If it is empty, that means everything is fine.
status_summary[check] = {'state': not status,
'status': status}
results['%s_results' % check] = result
results['%s_timer' % check] = timer.ms
# If anything broke, send HTTP 500.
status_code = 200 if all(a['state']
for a in status_summary.values()) else 500
if format == '.json':
return http.HttpResponse(json.dumps(status_summary),
status=status_code)
ctx = {}
ctx.update(results)
ctx['status_summary'] = status_summary
return render(request, 'services/monitor.html', ctx, status=status_code)
@non_atomic_requests
def robots(request):
"""Generate a robots.txt"""
_service = (request.META['SERVER_NAME'] == settings.SERVICES_DOMAIN)
if _service or not settings.ENGAGE_ROBOTS:
template = "User-agent: *\nDisallow: /"
else:
template = render(request, 'amo/robots.html', {'apps': amo.APP_USAGE})
return HttpResponse(template, content_type="text/plain")
@non_atomic_requests
def contribute(request):
path = os.path.join(settings.ROOT, 'contribute.json')
return HttpResponse(open(path, 'rb'), content_type='application/json')
@non_atomic_requests
def handler403(request):
if request.path_info.startswith('/api/'):
# Pass over to handler403 view in api if api was targeted.
return api.views.handler403(request)
else:
return render(request, 'amo/403.html', status=403)
@non_atomic_requests
def handler404(request):
if request.path_info.startswith('/api/'):
# Pass over to handler404 view in api if api was targeted.
return api.views.handler404(request)
else:
return render(request, 'amo/404.html', status=404)
@non_atomic_requests
def handler500(request):
if request.path_info.startswith('/api/'):
# Pass over to handler500 view in api if api was targeted.
return api.views.handler500(request)
else:
return render(request, 'amo/500.html', status=500)
@non_atomic_requests
def csrf_failure(request, reason=''):
return render(request, 'amo/403.html',
{'because_csrf': 'CSRF' in reason}, status=403)
@non_atomic_requests
def loaded(request):
return http.HttpResponse('%s' % request.META['wsgi.loaded'],
content_type='text/plain')
@csrf_exempt
@require_POST
@non_atomic_requests
def cspreport(request):
"""Accept CSP reports and log them."""
report = ('blocked-uri', 'violated-directive', 'original-policy')
if not waffle.sample_is_active('csp-store-reports'):
return HttpResponse()
try:
v = json.loads(request.body)['csp-report']
# If possible, alter the PATH_INFO to contain the request of the page
# the error occurred on, spec: http://mzl.la/P82R5y
meta = request.META.copy()
meta['PATH_INFO'] = v.get('document-uri', meta['PATH_INFO'])
v = [(k, v[k]) for k in report if k in v]
log_cef('CSPViolation', 5, meta, username=request.user,
signature='CSPREPORT',
msg='A client reported a CSP violation',
cs6=v, cs6Label='ContentPolicy')
except (KeyError, ValueError), e:
log.debug('Exception in CSP report: %s' % e, exc_info=True)
return HttpResponseBadRequest()
return HttpResponse()
@non_atomic_requests
def version(request):
path = os.path.join(settings.ROOT, 'version.json')
return HttpResponse(open(path, 'rb'), content_type='application/json')
@non_atomic_requests
def plugin_check_redirect(request):
return http.HttpResponseRedirect('%s?%s' % (
settings.PFS_URL, iri_to_uri(request.META.get('QUERY_STRING', ''))))<|fim▁end|> | |
<|file_name|>imp.rs<|end_file_name|><|fim▁begin|>//! This code might still contain bugs. In either case it's very inefficient because we
//! want to avoid reading invalid memory at all costs. Note that the implementation from
//! 1024cores does not handle ABA!
use std::{ptr, mem};
use std::sync::atomic::{AtomicUsize, AtomicBool};
use std::sync::atomic::Ordering::{SeqCst};
use std::sync::{Mutex, Condvar};
use std::rt::heap::{allocate, deallocate};
use std::cell::{Cell};
use select::{_Selectable, WaitQueue, Payload};
use alloc::{oom};
use {Error, Sendable};
#[cfg(target_pointer_width = "64")]
type HalfPointer = u32;
#[cfg(target_pointer_width = "32")]
type HalfPointer = u16;
const HALF_POINTER_BITS: usize = ::std::usize::BITS as usize / 2;
fn decompose_pointer(val: usize) -> (HalfPointer, HalfPointer) {
let lower = val as HalfPointer;
let higher = (val >> HALF_POINTER_BITS) as HalfPointer;
(lower, higher)
}
fn compose_pointer(lower: HalfPointer, higher: HalfPointer) -> usize {
(lower as usize) | ((higher as usize) << HALF_POINTER_BITS)
}
pub struct Packet<'a, T: Sendable+'a> {
// The id of this channel. The address of the `arc::Inner` that contains this channel.
id: Cell<usize>,
// The buffer we store the massages in.
buf: *mut T,
// One less than the capacity of the channel. Note that the capacity is a power of
// two.
cap_mask: HalfPointer,
// read_start and next_write HalfPointer variables encoded in one usize. read_start is
// the id before which all elements in the buffer have been read. next_write is the
// next place that's free for writing.
//
// Note that this implies that, next_write - read_start <= capacity at all times.
read_start_next_write: AtomicUsize,
// write_end and next_read HalfPointer variables encoded in one usize. write_end is
// the id before which all elements in the buffer have been written. next_read is the
// next place that's free for reading.
//
// Note that this implies that, ignoring overflow, next_read <= write_end.
//
// See the docs below for why we have to store these four variables this way.
write_end_next_read: AtomicUsize,
// Number of senders that are currently sleeping.
sleeping_senders: AtomicUsize,
// Condvar the senders are sleeping on.
send_condvar: Condvar,
// Number of receivers that are currently sleeping.
sleeping_receivers: AtomicUsize,
// Condvar the senders are sleeping on.
recv_condvar: Condvar,
// Mutex that protects the two atomic variables above and the one below.
sleep_mutex: Mutex<()>,
// Number of peers that are awake.
peers_awake: AtomicUsize,
// Is any one selecting on this channel?
wait_queue_used: AtomicBool,
wait_queue: Mutex<WaitQueue<'a>>,
}
impl<'a, T: Sendable+'a> Packet<'a, T> {
pub fn new(buf_size: usize) -> Packet<'a, T> {
if buf_size > 1 << (HALF_POINTER_BITS - 1) {
panic!("capacity overflow");
}
let cap = buf_size.next_power_of_two();
let size = cap.checked_mul(mem::size_of::<T>()).unwrap_or(!0);
if size > !0 >> 1 {
panic!("capacity overflow");
}
let buf = if mem::size_of::<T>() == 0 {
1 as *mut u8
} else {
unsafe { allocate(size, mem::align_of::<T>()) }
};
if buf.is_null() {
oom();
}
Packet {
id: Cell::new(0),
buf: buf as *mut T,
cap_mask: (cap - 1) as HalfPointer,
read_start_next_write: AtomicUsize::new(0),
write_end_next_read: AtomicUsize::new(0),
sleeping_senders: AtomicUsize::new(0),
send_condvar: Condvar::new(),
sleeping_receivers: AtomicUsize::new(0),
recv_condvar: Condvar::new(),
sleep_mutex: Mutex::new(()),
peers_awake: AtomicUsize::new(1),
wait_queue_used: AtomicBool::new(false),
wait_queue: Mutex::new(WaitQueue::new()),
}
}
/// Call this function before any other.
pub fn set_id(&self, id: usize) {
self.id.set(id);
self.wait_queue.lock().unwrap().set_id(id);
}
/// Call this function when the channel is cloned.
pub fn add_peer(&self) {
self.peers_awake.fetch_add(1, SeqCst);
}
/// Call this function when a peer is dropped.
pub fn remove_peer(&self) {
if self.peers_awake.fetch_sub(1, SeqCst) == 1 {
let _guard = self.sleep_mutex.lock().unwrap();
if self.sleeping_receivers.load(SeqCst) > 0 {
self.recv_condvar.notify_one();
} else {
self.send_condvar.notify_one();
}
self.notify_wait_queue();
}
}
fn notify_wait_queue(&self) {
if self.wait_queue_used.load(SeqCst) {
let mut wait_queue = self.wait_queue.lock().unwrap();
if wait_queue.notify() == 0 {
self.wait_queue_used.store(false, SeqCst);
}
}
}
/// Get a position to write to if the queue isn't full
fn get_write_pos(&self) -> Option<HalfPointer> {
// See the get_read_pos docs for details.
loop {
let rsnw = self.read_start_next_write.load(SeqCst);
let (read_start, next_write) = decompose_pointer(rsnw);
if next_write - read_start == self.cap_mask + 1 {
return None;
}
let rsnw_new = compose_pointer(read_start, next_write + 1);
if self.read_start_next_write.compare_and_swap(rsnw, rsnw_new,
SeqCst) == rsnw {
return Some(next_write);
}
}
}
/// `pos` is the position we've written to
fn set_write_end(&self, pos: HalfPointer) {
// See the get_read_pos docs for details.
loop {
let wenr = self.write_end_next_read.load(SeqCst);
let (write_end, next_read) = decompose_pointer(wenr);
if write_end != pos {
continue;
}
let wenr_new = compose_pointer(pos + 1, next_read);
if self.write_end_next_read.compare_and_swap(wenr, wenr_new,
SeqCst) == wenr {
return;
}
}
}
fn set_mem(&self, pos: HalfPointer, val: T) {
unsafe {
ptr::write(self.buf.offset((pos & self.cap_mask) as isize), val);
}
}
pub fn send_async(&self, val: T, have_lock: bool) -> Result<(), (T, Error)> {
let write_pos = match self.get_write_pos() {
Some(w) => w,
_ => return Err((val, Error::Full)),
};
self.set_mem(write_pos, val);
self.set_write_end(write_pos);
if self.sleeping_receivers.load(SeqCst) > 0 {
if have_lock {
self.recv_condvar.notify_one();
} else {
let _guard = self.sleep_mutex.lock().unwrap();
self.recv_condvar.notify_one();
}
}
self.notify_wait_queue();
Ok(())
}
pub fn send_sync(&self, mut val: T) -> Result<(), (T, Error)> {
val = match self.send_async(val, false) {
Err(v) => v.0,
_ => return Ok(()),
};
let mut rv = Ok(());
let mut guard = self.sleep_mutex.lock().unwrap();
self.sleeping_senders.fetch_add(1, SeqCst);
loop {
val = match self.send_async(val, true) {
Err(v) => v.0,
_ => break,
};
// It is possible that all peers sleep at the same time, however, it can be
// shown that, as long as not all of them sleep sending and not all of them
// sleeping receiving, one of them will wake up again because the condition
// variable has already been notified.
if self.peers_awake.fetch_sub(1, SeqCst) == 1 &&
self.sleeping_receivers.load(SeqCst) == 0 {
self.peers_awake.fetch_add(1, SeqCst);
rv = Err((val, Error::Deadlock));
break;
} else {
guard = self.send_condvar.wait(guard).unwrap();
self.peers_awake.fetch_add(1, SeqCst);
}
}
self.sleeping_senders.fetch_sub(1, SeqCst);
rv
}
/// Get a position to read from if the queue isn't empty
fn get_read_pos(&self) -> Option<HalfPointer> {
// The write_end_next_read field contains two variables: write_end and next_read.
//
// next_read is the next position we can read from, write_end is the first
// position we can not read from because it has not necessarily been written yet.
//
// We have to store both of them in the same variable because of ABA. Consider the
// following events:
//
// - This thread reads next_read == 0 and write_end == 1 and therefore there is no
// early return in the `if` below.
// - This thread gets suspended right after the `if`.
// - Other threads continuous read from and write to the channel until both
// write_end and next_read overflow.
// - next_read == 0 and write_end == 0 holds now.
// - This thread wakes up again.
// - If we store next_read in its own variable, then the CAS can only test
// next_read. Since next_read is 0, the CAS succeeds and we arrive at next_read ==
// 1 and write_end == 0.
// - The function that called this function reads from position 0 even though
// nothing has been written to that position yet.
//
// Therefore we store next_read and write_end in the same variable. The overflow
// above can still happen but if write_end gets smaller (or changes in any way),
// the CAS will fail and we can never read uninitialized memory.
//
// It's highly unlikely for this ABA to happen, and on 64bit one might even
// consider it impossible. After a more careful analysis, a future implementation
// might change the implementation.
loop {
let wenr = self.write_end_next_read.load(SeqCst);
let (write_end, next_read) = decompose_pointer(wenr);
if write_end == next_read {
return None;
}
let wenr_new = compose_pointer(write_end, next_read + 1);
if self.write_end_next_read.compare_and_swap(wenr, wenr_new,
SeqCst) == wenr {
return Some(next_read);
}
}
}
/// `pos` is the position we've read from
fn set_read_start(&self, pos: HalfPointer) {
loop {
let rsnw = self.read_start_next_write.load(SeqCst);
let (read_start, next_write) = decompose_pointer(rsnw);
if read_start != pos {
continue;
}
let rsnw_new = compose_pointer(pos + 1, next_write);
if self.read_start_next_write.compare_and_swap(rsnw, rsnw_new,
SeqCst) == rsnw {
return;
}
}
}
fn get_mem(&self, pos: HalfPointer) -> T {
unsafe {
ptr::read(self.buf.offset((pos & self.cap_mask) as isize))
}
}
pub fn recv_async(&self, have_lock: bool) -> Result<T, Error> {
let read_pos = match self.get_read_pos() {
Some(r) => r,
_ => return Err(Error::Empty),
};
let val = self.get_mem(read_pos);
self.set_read_start(read_pos);
if self.sleeping_senders.load(SeqCst) > 0 {
if have_lock {
self.send_condvar.notify_one();
} else {
let _guard = self.sleep_mutex.lock().unwrap();
self.send_condvar.notify_one();
}
}
Ok(val)
}
pub fn recv_sync(&self) -> Result<T, Error> {
let mut rv = self.recv_async(false);
if rv.is_ok() {
return rv;
}
let mut guard = self.sleep_mutex.lock().unwrap();
self.sleeping_receivers.fetch_add(1, SeqCst);
loop {
rv = self.recv_async(true);
if rv.is_ok() {
break;
}
// See the docs in send_sync.
if self.peers_awake.fetch_sub(1, SeqCst) == 1 &&
self.sleeping_senders.load(SeqCst) == 0 {
self.peers_awake.fetch_add(1, SeqCst);
rv = Err(Error::Deadlock);
break;
} else {
guard = self.recv_condvar.wait(guard).unwrap();
self.peers_awake.fetch_add(1, SeqCst);
}
}
self.sleeping_receivers.fetch_sub(1, SeqCst);
rv
}
}
unsafe impl<'a, T: Sendable+'a> Send for Packet<'a, T> { }
unsafe impl<'a, T: Sendable+'a> Sync for Packet<'a, T> { }
impl<'a, T: Sendable+'a> Drop for Packet<'a, T> {
fn drop(&mut self) {
let wenr = self.write_end_next_read.load(SeqCst);
let (write_end, read_start) = decompose_pointer(wenr);
unsafe {
for i in (0..write_end-read_start) {
self.get_mem(read_start + i);
}
if mem::size_of::<T>() > 0 {<|fim▁hole|> }
}
}
unsafe impl<'a, T: Sendable+'a> _Selectable<'a> for Packet<'a, T> {
fn ready(&self) -> bool {
if self.peers_awake.load(SeqCst) == 0 {
return true;
}
let wenr = self.write_end_next_read.load(SeqCst);
let (write_end, next_read) = decompose_pointer(wenr);
write_end != next_read
}
fn register(&self, load: Payload<'a>) {
let mut wait_queue = self.wait_queue.lock().unwrap();
if wait_queue.add(load) > 0 {
self.wait_queue_used.store(true, SeqCst);
}
}
fn unregister(&self, id: usize) {
let mut wait_queue = self.wait_queue.lock().unwrap();
if wait_queue.remove(id) == 0 {
self.wait_queue_used.store(false, SeqCst);
}
}
}<|fim▁end|> | deallocate(self.buf as *mut u8,
(self.cap_mask as usize + 1) * mem::size_of::<T>(),
mem::align_of::<T>());
} |
<|file_name|>mouse_handler.rs<|end_file_name|><|fim▁begin|>use sfml::graphics::RenderTarget;
pub trait MousePosHandler {
fn mouse_pos(&mut self, target: &RenderTarget, mx: i32, my: i32);
}
/*
pub trait MouseClickHandler {
fn mouse_click(&mut self, target: &RenderTarget, mx: i32, my: i32);<|fim▁hole|><|fim▁end|> | }
*/ |
<|file_name|>itemFactories-test.js<|end_file_name|><|fim▁begin|>import {
createEllipsisItem,
createFirstPage,
createLastItem,
createNextItem,
createPageFactory,
createPrevItem,
} from 'src/lib/createPaginationItems/itemFactories'
describe('itemFactories', () => {
describe('createEllipsisItem', () => {
it('"active" is always false', () => {
createEllipsisItem(0).should.have.property('active', false)
})
it('"type" matches "ellipsisItem"', () => {
createEllipsisItem(0).should.have.property('type', 'ellipsisItem')
})
it('"value" matches passed argument', () => {
createEllipsisItem(5).should.have.property('value', 5)
})
})
describe('createFirstPage', () => {
it('"active" is always false', () => {
createFirstPage().should.have.property('active', false)
})
it('"type" matches "firstItem"', () => {
createFirstPage().should.have.property('type', 'firstItem')
})
it('"value" always returns 1', () => {
createFirstPage().should.have.property('value', 1)
})
})
describe('createPrevItem', () => {
it('"active" is always false', () => {
createPrevItem(1).should.have.property('active', false)
})
it('"type" matches "prevItem"', () => {
createPrevItem(1).should.have.property('type', 'prevItem')
})
it('"value" returns previous page number or 1', () => {
createPrevItem(1).should.have.property('value', 1)
createPrevItem(2).should.have.property('value', 1)
createPrevItem(3).should.have.property('value', 2)
})<|fim▁hole|> const pageFactory = createPageFactory(1)
it('returns function', () => {
pageFactory.should.be.a('function')
})
it('"active" is true when pageNumber is equal to activePage', () => {
pageFactory(1).should.have.property('active', true)
})
it('"active" is false when pageNumber is not equal to activePage', () => {
pageFactory(2).should.have.property('active', false)
})
it('"type" of created item matches "pageItem"', () => {
pageFactory(2).should.have.property('type', 'pageItem')
})
it('"value" returns pageNumber', () => {
pageFactory(1).should.have.property('value', 1)
pageFactory(2).should.have.property('value', 2)
})
})
describe('createNextItem', () => {
it('"active" is always false', () => {
createNextItem(0, 0).should.have.property('active', false)
})
it('"type" matches "nextItem"', () => {
createNextItem(0, 0).should.have.property('type', 'nextItem')
})
it('"value" returns the smallest of the arguments', () => {
createNextItem(1, 3).should.have.property('value', 2)
createNextItem(2, 3).should.have.property('value', 3)
createNextItem(3, 3).should.have.property('value', 3)
})
})
describe('createLastItem', () => {
it('"active" is always false', () => {
createLastItem(0).should.have.property('active', false)
})
it('"type" matches "lastItem"', () => {
createLastItem(0).should.have.property('type', 'lastItem')
})
it('"value" matches passed argument', () => {
createLastItem(2).should.have.property('value', 2)
})
})
})<|fim▁end|> | })
describe('createPageFactory', () => { |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
setup(name='megacl',
version='0.4.6',
description='mega.co.nz command line client.',
author='Arthibus Gisséhel',
author_email='[email protected]',
url='https://github.com/gissehel/megacl.git',
packages=['megacllib'],
scripts=['mcl','megacl'],
license='MIT',
keywords='commandline mega.co.nz mega',
long_description=open('README.rst').read(),
install_requires=['supertools','cltools>=0.4.0','mega.py>=0.9.13',
'requests', # non declared yet mega.py dependency
'pycrypto', # non declared yet mega.py dependency
],
classifiers=[
'Development Status :: 4 - Beta',<|fim▁hole|> 'Environment :: Console',
'Intended Audience :: Information Technology',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Topic :: Communications',
'Topic :: Internet',
'Topic :: System :: Filesystems',
'Topic :: Utilities',
],
)<|fim▁end|> | |
<|file_name|>strobe.js<|end_file_name|><|fim▁begin|>var five = require("johnny-five"),
board = new five.Board();
board.on("ready", function() {
var led = new five.Led(12);
var rgb = new five.Led.RGB([6, 5, 3]);
var index = 0;
this.loop(10, function() {
// led.toggle();
if (index === 16777215) {
index = 0;
}
rgb.color(index.toString(16));
index++;
});
});
<|fim▁hole|><|fim▁end|> | // var led = new five.Led(13);
// led.blink(5); |
<|file_name|>watch_calendars.py<|end_file_name|><|fim▁begin|># -*- coding: UTF-8 -*-
## Copyright 2011,2013 Luc Saffre
## This file is part of the Lino project.
## Lino is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 3 of the License, or
## (at your option) any later version.
## Lino is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
## You should have received a copy of the GNU General Public License
## along with Lino; if not, see <http://www.gnu.org/licenses/>.
"""
Starts a daemon (or, if daemons are not supported, a nomal console process)
that watches for changes in remote calendars.
See also :doc:`/tickets/47`
"""
import os
import sys
import codecs
import time
import datetime
#~ import signal
import atexit
from cStringIO import StringIO
import vobject
try:
import caldav
from caldav.elements import dav, cdav
except ImportError:
pass # the command won't work, but at least sphinx autodoc will
#~ from lino.modlib.cal.models import Calendar, Event
from vobject.icalendar import VEvent, RecurringComponent
from django.core.management.base import BaseCommand, CommandError
from django.core.exceptions import ValidationError
from django.conf import settings
from django.db.utils import DatabaseError
from django.db import models
import lino
from lino import dd
from lino.utils import confirm, iif
from lino.utils import dblogger
from lino.utils.daemoncommand import DaemonCommand
from lino.modlib.cal.utils import aware, dt2kw, setkw
Place = dd.resolve_model('cal.Place')
Calendar = dd.resolve_model('cal.Calendar')
Event = dd.resolve_model('cal.Event')
RecurrenceSet = dd.resolve_model('cal.RecurrenceSet')
#~ REQUEST = dblogger.PseudoRequest('watch_calendars')
# dblogger.log_changes(REQUEST,obj)
def prettyPrint(obj):
s = StringIO()
out = sys.stdout
sys.stdout = s
obj.prettyPrint()
sys.stdout = out
return s.getvalue()
def receive(dbcal,calendar):
rs_touched = set()
ev_touched = set()
rs_updated = rs_created = rs_deleted = 0
count_update = 0
count_new = 0
count_deleted = 0
#~ print "Using calendar", calendar
props = calendar.get_properties([dav.DisplayName()])
dbcal.name = props[dav.DisplayName().tag]
dbcal.save()
from_date = dbcal.start_date
if not from_date:
from_date = datetime.datetime.now() - datetime.timedelta(days=365)
until_date = datetime.datetime.now() + datetime.timedelta(days=365)
#~ from_date = aware(from_date)
#~ until_date = aware(until_date)
#~ print from_date.tzinfo, until_date.tzinfo
#~ raise Exception("20110823")
results = calendar.date_search(from_date,until_date)
if results:
for comp in results:
#~ if len(list(comp.instance.getChildren())) != 1:
#~ raise Exception("comp.instance.getChildren() is %s" % list(comp.instance.getChildren()))
dblogger.info(
"Got calendar component <<<\n%s\n>>>",
prettyPrint(comp.instance))
if comp.instance.vevent:
event = comp.instance.vevent
if isinstance(event,RecurringComponent):
"""
in a google calendar, all events are parsed to a
RecurringComponent. if event.rruleset is None
we consider them non recurrent.
"""
uid = event.uid.value
dtstart = event.dtstart.value
get_kw = {}
set_kw = {}
get_kw.update(uid = uid)
set_kw.update(summary=event.summary.value)
#~ dblogger.info("TRANSPARENCE IS %r", event.transp.value)
location_name = event.location.value
if location_name:
qs = Place.objects.filter(name__iexact=location_name)
if qs.count() == 0:
pl = Place(name=location_name)
pl.full_clean()
pl.save()
dblogger.info("Auto-created location %s", pl)
else:
pl = qs[0]
if qs.count() > 1:
dblogger.warning("Found more than 1 Place for location %r", location_name)
set_kw.update(place=pl)
else:
set_kw.update(place=None)
if event.transp.value == 'TRANSPARENT':
set_kw.update(transparent=True)
else:
set_kw.update(transparent=False)
set_kw.update(description=event.description.value)
set_kw.update(calendar=dbcal)
#~ set_kw.update(location=event.location.value)
#~ kw.update(dtend=event.dtend.value)
dblogger.info("It's a RecurringComponent")
if event.rruleset:
try:
obj = RecurrenceSet.objects.get(uid=uid)
assert obj.calendar == dbcal
rs_updated += 1
except RecurrenceSet.DoesNotExist, e:
#~ except Exception, e:
obj = RecurrenceSet(uid=uid)
obj.calendar = dbcal
obj.user = dbcal.user
rs_created += 1
#~ raise Exception("20110823 must save rrule, rdate etc... %s" % type(event.rrule_list))
obj.rrules = '\n'.join([r.value for r in event.rrule_list])
#~ obj.exrules = '\n'.join([r.value for r in event.exrule_list])
#~ obj.rdates = '\n'.join([r.value for r in event.rdate_list])
#~ obj.exdates = '\n'.join([r.value for r in event.exdate_list])
obj.summary=event.summary.value
obj.description=event.description.value
setkw(obj,**dt2kw(dtstart,'start'))
obj.full_clean()
obj.save()
dblogger.info("Saved %s",obj)
rs_touched.add(obj.pk)
set_kw.update(rset=obj)
if getattr(dtstart,'tzinfo',False):
dtlist = event.rruleset.between(aware(from_date),aware(until_date))
else:
dtlist = event.rruleset.between(from_date,until_date)
dblogger.info("rrulset.between() --> %s",dtlist)
else:
dtlist = [ dtstart ]
dblogger.info("No rruleset")
duration = event.dtend.value - dtstart
for dtstart in dtlist:
dtend = dtstart + duration
get_kw = dt2kw(dtstart,'start',**get_kw)
set_kw = dt2kw(dtend,'end',**set_kw)
try:
obj = Event.objects.get(**get_kw)
count_update += 1
except Event.DoesNotExist, e:
#~ except Exception, e:
obj = Event(**get_kw)
obj.user = dbcal.user
count_new += 1
setkw(obj,**set_kw)
obj.full_clean()
obj.save()
dblogger.info("Saved %s",obj)
ev_touched.add(obj.pk)
else:
raise Exception("comp.instance.vevent is a %s (expected VEvent)" % type(event))
else:
raise Exception(
"Got unhandled component %s"
% comp.instance.prettyPrint())
#~ print "children:", [c for c in comp.instance.getChildren()]
#~ raise StopIteration
qs = dbcal.event_set.exclude(id__in=ev_touched)
count_deleted = qs.count()
qs.delete() # note: doesn't call delete methods of individual objects
qs = dbcal.recurrenceset_set.exclude(id__in=rs_touched)<|fim▁hole|> "--> Created %d, updated %d, deleted %s Events",
count_new, count_update,count_deleted)
dblogger.info(
"--> Created %d, updated %d, deleted %s RecurrenceSets",
rs_created, rs_updated,rs_deleted)
def send(dbcal,calendar,client):
n = 0
for obj in dbcal.event_set.filter(user_modified=True):
dblogger.info("Gonna send %s",obj)
n += 1
mycal = vobject.iCalendar()
#~ mycal.add('vevent')
#~ mycal = vobject.iCalendar()
#~ vevent = vobject.newFromBehavior('vevent', '2.0')
vevent = mycal.add('vevent')
vevent.add('uid').value = obj.uid
vevent.add('dtstamp').value = obj.modified
if obj.start_time:
vevent.add('dtstart').value = datetime.datetime.combine(obj.start_date,obj.start_time)
else:
vevent.add('dtstart').value = obj.start_date
if obj.end_time:
vevent.add('dtend').value = datetime.datetime.combine(obj.end_date,obj.end_time)
else:
vevent.add('dtend').value = obj.end_date
vevent.add('transp').value = iif(obj.transparent,'TRANSPARENT','OPAQUE')
vevent.add('summary').value = obj.summary
if obj.place:
vevent.add('location').value = obj.place.name
vevent.add('description').value = obj.description
event = caldav.Event(client,data=mycal.serialize(),parent=calendar).save()
dblogger.info("--> Sent %d events to calendar server.", n)
def watch():
"""
Loops through all remote calendars,
synchronizing them with their calendar server.
We first send local changes to the server,
then retrieve remote changes into our database.
Deserves more documentation.
"""
for dbcal in Calendar.objects.filter(url_template__isnull=False):
#~ if not dbcal.url_template:
#~ continue
url = dbcal.get_url()
#~ dblogger.info("Synchronize calendar %s using %s",dbcal.name, url)
dblogger.info("Synchronize calendar %s...",dbcal.name)
client = caldav.DAVClient(url)
principal = caldav.Principal(client, url)
#~ print "url.username:", principal.url.username
#~ print "url.hostname:", principal.url.hostname
calendars = principal.calendars()
if len(calendars) == 0:
dblogger.info("--> Sorry, no calendar")
elif len(calendars) > 1:
#~ print "WARNING: more than 1 calendar"
dblogger.warning("--> More than 1 calendar")
else:
send(dbcal,calendars[0],client)
receive(dbcal,calendars[0])
def main(*args,**options):
#~ msg = "Started watch_calendars %s..."
#~ dblogger.info(msg,lino.__version__)
#~ def goodbye():
#~ msg = "Stopped watch_calendars %s ..."
#~ dblogger.info(msg,lino.__version__)
#~ atexit.register(goodbye)
while True:
watch()
#~ try:
#~ watch()
#~ except Exception,e:
#~ dblogger.exception(e)
break # temporarily while testing
time.sleep(60) # sleep for a minute
class Command(DaemonCommand):
help = __doc__
preserve_loggers = [dblogger.logger]
def handle_daemon(self, *args, **options):
#~ settings.SITE.startup()
main(*args,**options)<|fim▁end|> | rs_deleted = qs.count()
qs.delete() # note: doesn't call delete methods of individual objects
dblogger.info( |
<|file_name|>PolicyManagementDAOFactory.java<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2014, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* you may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.policy.mgt.core.dao;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.wso2.carbon.device.mgt.core.operation.mgt.dao.OperationManagementDAOException;
import org.wso2.carbon.policy.mgt.core.config.datasource.DataSourceConfig;
import org.wso2.carbon.policy.mgt.core.config.datasource.JNDILookupDefinition;
import org.wso2.carbon.policy.mgt.core.dao.impl.FeatureDAOImpl;
import org.wso2.carbon.policy.mgt.core.dao.impl.MonitoringDAOImpl;
import org.wso2.carbon.policy.mgt.core.dao.impl.PolicyDAOImpl;
import org.wso2.carbon.policy.mgt.core.dao.impl.ProfileDAOImpl;
import org.wso2.carbon.policy.mgt.core.dao.util.PolicyManagementDAOUtil;
import javax.sql.DataSource;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.Hashtable;
import java.util.List;
public class PolicyManagementDAOFactory {
private static DataSource dataSource;
private static final Log log = LogFactory.getLog(PolicyManagementDAOFactory.class);
private static ThreadLocal<Connection> currentConnection = new ThreadLocal<Connection>();
public static void init(DataSourceConfig config) {
dataSource = resolveDataSource(config);
}
public static void init(DataSource dtSource) {
dataSource = dtSource;
}
public static DataSource getDataSource() {
if (dataSource != null) {
return dataSource;
}
throw new RuntimeException("Data source is not yet configured.");
}
public static PolicyDAO getPolicyDAO() {
return new PolicyDAOImpl();
}
public static ProfileDAO getProfileDAO() {
return new ProfileDAOImpl();
}
public static FeatureDAO getFeatureDAO() {
return new FeatureDAOImpl();
}
public static MonitoringDAO getMonitoringDAO() {
return new MonitoringDAOImpl();
}
/**
* Resolve data source from the data source definition
*
* @param config data source configuration
* @return data source resolved from the data source definition
*/
private static DataSource resolveDataSource(DataSourceConfig config) {
DataSource dataSource = null;
if (config == null) {
throw new RuntimeException("Device Management Repository data source configuration " +
"is null and thus, is not initialized");
}
JNDILookupDefinition jndiConfig = config.getJndiLookupDefinition();
if (jndiConfig != null) {
if (log.isDebugEnabled()) {
log.debug("Initializing Device Management Repository data source using the JNDI " +
"Lookup Definition");
}
List<JNDILookupDefinition.JNDIProperty> jndiPropertyList =
jndiConfig.getJndiProperties();
if (jndiPropertyList != null) {
Hashtable<Object, Object> jndiProperties = new Hashtable<Object, Object>();
for (JNDILookupDefinition.JNDIProperty prop : jndiPropertyList) {
jndiProperties.put(prop.getName(), prop.getValue());
}
dataSource =
PolicyManagementDAOUtil.lookupDataSource(jndiConfig.getJndiName(), jndiProperties);
} else {
dataSource = PolicyManagementDAOUtil.lookupDataSource(jndiConfig.getJndiName(), null);
}
}
return dataSource;
}
public static void beginTransaction() throws PolicyManagerDAOException {
try {
Connection conn = dataSource.getConnection();
conn.setAutoCommit(false);
currentConnection.set(conn);
} catch (SQLException e) {
throw new PolicyManagerDAOException("Error occurred while retrieving config.datasource connection", e);
}
}
public static Connection getConnection() throws PolicyManagerDAOException {
if (currentConnection.get() == null) {
try {
Connection conn = dataSource.getConnection();
conn.setAutoCommit(false);
currentConnection.set(conn);
} catch (SQLException e) {
throw new PolicyManagerDAOException("Error occurred while retrieving data source connection",
e);
}
}
return currentConnection.get();
}
public static void closeConnection() throws PolicyManagerDAOException {
Connection con = currentConnection.get();
try {
con.close();
} catch (SQLException e) {
log.error("Error occurred while close the connection");
}
currentConnection.remove();
}
public static void commitTransaction() throws PolicyManagerDAOException {
try {
Connection conn = currentConnection.get();
if (conn != null) {
conn.commit();
} else {
if (log.isDebugEnabled()) {
log.debug("Datasource connection associated with the current thread is null, hence commit " +<|fim▁hole|> }
} catch (SQLException e) {
throw new PolicyManagerDAOException("Error occurred while committing the transaction", e);
} finally {
closeConnection();
}
}
public static void rollbackTransaction() throws PolicyManagerDAOException {
try {
Connection conn = currentConnection.get();
if (conn != null) {
conn.rollback();
} else {
if (log.isDebugEnabled()) {
log.debug("Datasource connection associated with the current thread is null, hence rollback " +
"has not been attempted");
}
}
} catch (SQLException e) {
throw new PolicyManagerDAOException("Error occurred while rollbacking the transaction", e);
} finally {
closeConnection();
}
}
}<|fim▁end|> | "has not been attempted");
} |
<|file_name|>issue-1397.rs<|end_file_name|><|fim▁begin|>pub enum TransactionState {
Committed(i64),
}
pub enum Packet {
Transaction { state: TransactionState },
}
<|fim▁hole|>fn baz(p: Packet) {
loop {
loop {
loop {
loop {
if let Packet::Transaction {
state: TransactionState::Committed(ts, ..), ..
} = p {
unreachable!()
}
}
}
}
}
}<|fim▁end|> | |
<|file_name|>socketstream.js<|end_file_name|><|fim▁begin|>// SocketStream 0.3
// ----------------
'use strict';
// console.log('CHECK');
// console.log(process.env);
// console.log('/CHECK');
require('colors');
var EventEmitter2 = require('eventemitter2').EventEmitter2;
// Get current version from package.json
var version = exports.version = require('./utils/file').loadPackageJSON().version;
// Set root path of your project
var root = exports.root = process.cwd().replace(/\\/g, '/'); // replace '\' with '/' to support Windows
// Warn if attempting to start without a cwd (e.g. through upstart script)
if (root === '/') {
throw new Error('You must change into the project directory before starting your SocketStream app');
}
// Set environment
// console.log("SS ENV IS ", process.env['SS_ENV']);
var env = exports.env = (process.env['NODE_ENV'] || process.env['SS_ENV'] || 'development').toLowerCase();
// Session & Session Store
var session = exports.session = require('./session');
// logging
var log = require('./utils/log');
// Create an internal API object which is passed to sub-modules and can be used within your app
var api = exports.api = {
version: version,
root: root,
env: env,
log: log,
session: session,
// Call ss.api.add('name_of_api', value_or_function) from your app to safely extend the 'ss' internal API object passed through to your /server code
add: function(name, fn) {
if (api[name]) {
throw new Error('Unable to register internal API extension \'' + name + '\' as this name has already been taken');
} else {
api[name] = fn;
return true;
}
}
};
// Create internal Events bus
// Note: only used by the ss-console module for now. This idea will be expended upon in SocketStream 0.4
var events = exports.events = new EventEmitter2();
// Publish Events
var publish = exports.publish = require('./publish/index')();
// HTTP
var http = exports.http = require('./http/index')(root);
// Client Asset Manager
var client = exports.client = require('./client/index')(api, http.router);
// Allow other libs to send assets to the client
api.client = {send: client.assets.send};
// Incoming Request Responders
var responders = exports.responders = require('./request/index')(api);
// Websocket Layer (transport, message responders, transmit incoming events)
var ws = exports.ws = require('./websocket/index')(api, responders);
// Only one instance of the server can be started at once
var serverInstance = null;
// Public API
var start = function(httpServer) {
var responder, fn, sessionID, id,
// Load SocketStream server instance
server = {
responders: responders.load(),
eventTransport: publish.transport.load(),
sessionStore: session.store.get()
};
// Extend the internal API with a publish object you can call from your own server-side code
api.publish = publish.api(server.eventTransport);
// Start web stack
if (httpServer) {
api.log.info('Starting SocketStream %s in %s mode...'.green, version, env);
// Bind responders to websocket
ws.load(httpServer, server.responders, server.eventTransport);
// Append SocketStream middleware to stack
http.load(client.options.dirs['static'], server.sessionStore, session.options);
// Load Client Asset Manager
client.load(api);
// Send server instance to any registered modules (e.g. console)
events.emit('server:start', server);
// If no HTTP server is passed return an API to allow for server-side testing
// Note this feature is currently considered 'experimental' and the implementation will
// be changed in SocketStream 0.4 to ensure any type of Request Responder can be tested
} else {
sessionID = session.create();
for (id in server.responders) {
if (server.responders.hasOwnProperty(id)) {
responder = server.responders[id];
if (responder.name && responder.interfaces.internal) {
fn = function(){
var args = Array.prototype.slice.call(arguments),
cb = args.pop();
return responder.interfaces.internal(args, {sessionId: sessionID, transport: 'test'}, function(err, params){ cb(params); });
};
api.add(responder.name, fn);
}
}
}
}
return api;
};
// Ensure server can only be started once<|fim▁hole|>};<|fim▁end|> | exports.start = function(httpServer) {
return serverInstance || (serverInstance = start(httpServer)); |
<|file_name|>game.js<|end_file_name|><|fim▁begin|>var GlobezGame = GlobezGame || {};
GlobezGame.Boot = function() {};
GlobezGame.Boot.prototype = {
preload: function() {
console.log("%cStarting Fish Vs Mines", "color:white; background:red");
this.load.image("loading", "assets/sprites/loading.png");
this.load.image("logo", "assets/sprites/logo.png");
},
create: function() {
this.scale.scaleMode = Phaser.ScaleManager.SHOW_ALL;
this.scale.pageAlignHorizontally = true;
this.scale.pageAlignVertically = true;
// this.scale.setScreenSize(true);
this.physics.startSystem(Phaser.Physics.ARCADE);
this.state.start("Preload");
}
}
var GlobezGame = GlobezGame || {};
GlobezGame.Preload = function() {};
GlobezGame.Preload.prototype = {
preload: function() {
console.log("%cPreloading assets", "color:white; background:red")
var loadingBar = this.add.sprite(160, 340, "loading");
loadingBar.anchor.setTo(0.5, 0.5);
this.load.setPreloadSprite(loadingBar);
var logo = this.add.sprite(160, 240, "logo");
logo.anchor.setTo(0.5, 0.5);
this.load.image("background", "assets/sprites/background.png");
this.load.image("playbutton", "assets/sprites/playbutton.png");
this.load.image("gametitle_sealife", "assets/sprites/gametitle_sealife.png");
this.load.image("gametitle_vs", "assets/sprites/gametitle_vs.png");
this.load.image("gametitle_mines", "assets/sprites/gametitle_mines.png");
this.load.image("blackfade", "assets/sprites/blackfade.png");
this.load.image("bubble", "assets/sprites/bubble.png");
},
create: function() {
this.state.start("GameTitle");
}
}
var GlobezGame = GlobezGame || {};
GlobezGame.GameTitle = function() {
startGame = false;
};
GlobezGame.GameTitle.prototype = {
create: function() {
console.log("%cStarting game title", "color:white; background:red");
this.add.image(0, 0, "background");
//
var bubblesEmitter = this.add.emitter(160, 500, 50);
bubblesEmitter.makeParticles("bubble");
bubblesEmitter.maxParticleScale = 0.6;
bubblesEmitter.minParticleScale = 0.2;
bubblesEmitter.setYSpeed(-30, -40);
bubblesEmitter.setXSpeed(-3, 3);
bubblesEmitter.gravity = 0;
bubblesEmitter.width = 320;
bubblesEmitter.minRotation = 0;
bubblesEmitter.maxRotation = 40;
bubblesEmitter.flow(15000, 2000)
//
var gameTitleSeaLife = this.add.image(160, 70, "gametitle_sealife");
gameTitleSeaLife.anchor.setTo(0.5, 0.5);
gameTitleSeaLife.angle = (2 + Math.random() * 5) * (Math.random() > 0.5 ? 1 : -1);
var seaLifeTween = this.add.tween(gameTitleSeaLife);
seaLifeTween.to({
angle: -gameTitleSeaLife.angle
}, 5000 + Math.random() * 5000, Phaser.Easing.Linear.None, true, 0, 1000, true);
//
var gameTitleVs = this.add.image(190, 120, "gametitle_vs");
gameTitleVs.anchor.setTo(0.5, 0.5);
gameTitleVs.angle = (2 + Math.random() * 5) * (Math.random() > 0.5 ? 1 : -1);
var vsTween = this.add.tween(gameTitleVs);
vsTween.to({
angle: -gameTitleVs.angle
}, 5000 + Math.random() * 5000, Phaser.Easing.Linear.None, true, 0, 1000, true);
//
var gameTitleMines = this.add.image(160, 160, "gametitle_mines");
gameTitleMines.anchor.setTo(0.5, 0.5);
gameTitleMines.angle = (2 + Math.random() * 5) * (Math.random() > 0.5 ? 1 : -1);
var minesTween = this.add.tween(gameTitleMines);<|fim▁hole|> minesTween.to({
angle: -gameTitleMines.angle
}, 5000 + Math.random() * 5000, Phaser.Easing.Linear.None, true, 0, 1000, true);
//
var playButton = this.add.button(160, 320, "playbutton", this.playTheGame, this)
playButton.anchor.setTo(0.5, 0.5);
playButton.angle = (2 + Math.random() * 5) * (Math.random() > 0.5 ? 1 : -1);
var playTween = this.add.tween(playButton);
playTween.to({
angle: -playButton.angle
}, 5000 + Math.random() * 5000, Phaser.Easing.Linear.None, true, 0, 1000, true);
//
var blackFade = this.add.sprite(0, 0, "blackfade");
var fadeTween = this.add.tween(blackFade);
fadeTween.to({
alpha: 0
}, 2000, Phaser.Easing.Cubic.Out, true);
},
playTheGame: function() {
if (!startGame) {
startGame = true
alert("Start the game!!");
}
}
}
var GlobezGame = GlobezGame || {};
GlobezGame.gameOptions = {
gameWidth: 320,
gameHeight: 480
}
GlobezGame.game = new Phaser.Game(GlobezGame.gameOptions.gameWidth, GlobezGame.gameOptions.gameHeight, Phaser.CANVAS, "");
GlobezGame.game.state.add("Boot", GlobezGame.Boot);
GlobezGame.game.state.add("Preload", GlobezGame.Preload);
GlobezGame.game.state.add("GameTitle", GlobezGame.GameTitle);
GlobezGame.game.state.start("Boot");<|fim▁end|> | |
<|file_name|>Reading_raw_input.py<|end_file_name|><|fim▁begin|>input = raw_input();<|fim▁hole|><|fim▁end|> | print input |
<|file_name|>unit_selection_system.rs<|end_file_name|><|fim▁begin|>// OpenAOE: An open source reimplementation of Age of Empires (1997)
// Copyright (c) 2016 Kevin Fuller
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
use action::{Action, MoveToPositionParams};
use dat;
use ecs::{SelectedUnitComponent, TransformComponent, UnitComponent, OnScreenComponent, DecalComponent};
use ecs::resource::*;
use media::{KeyState, MouseButton};
use resource::DrsKey;
use specs::{self, Join};
use super::System;
use types::{Fixed, Vector3};
use util::unit;
pub struct UnitSelectionSystem {
empires: dat::EmpiresDbRef,
}
impl UnitSelectionSystem {
pub fn new(empires: dat::EmpiresDbRef) -> UnitSelectionSystem {
UnitSelectionSystem { empires: empires }
}
}
impl System for UnitSelectionSystem {
fn update(&mut self, arg: specs::RunArg, _time_step: Fixed) {
fetch_components!(arg, entities, [
components(on_screen: OnScreenComponent),
components(units: UnitComponent),
mut components(decals: DecalComponent),
mut components(selected_units: SelectedUnitComponent),
mut components(transforms: TransformComponent),
resource(mouse_state: MouseState),
resource(path_finder: PathFinder),
resource(players: Players),
resource(view_projector: ViewProjector),
resource(viewport: Viewport),
resource(occupied_tiles: OccupiedTiles),
resource(terrain: Terrain),
mut resource(action_batcher: ActionBatcher),
]);
if mouse_state.key_states.key_state(MouseButton::Left) == KeyState::TransitionUp {
selected_units.clear();
let mouse_ray = calculate_mouse_ray(&viewport, &mouse_state, &view_projector, &terrain);
for (entity, _, unit, transform) in (&entities, &on_screen, &units, &transforms).iter() {
let unit_info = self.empires.unit(unit.civilization_id, unit.unit_id);
if unit_info.interaction_mode != dat::InteractionMode::NonInteracting {
let unit_box = unit::selection_box(unit_info, transform);
// Cast a ray from the mouse position through to the terrain and select any unit
// whose axis-aligned box intersects the ray.
if unit_box.intersects_ray(&mouse_ray.origin, &mouse_ray.direction) {
selected_units.insert(entity, SelectedUnitComponent);
break;
}
}
}
}
if mouse_state.key_states.key_state(MouseButton::Right) == KeyState::TransitionUp {
let mouse_ray = calculate_mouse_ray(&viewport, &mouse_state, &view_projector, &terrain);
let mut moving_unit = false;
for (entity, transform, unit, _selected_unit) in (&entities,
&transforms,
&units,
&selected_units)
.iter() {
if unit.player_id == players.local_player().player_id {
let unit_info = self.empires.unit(unit.civilization_id, unit.unit_id);
let path = path_finder.find_path(&*terrain,
&*occupied_tiles,
transform.position(),<|fim▁hole|> action_batcher.queue_for_entity(entity.get_id(),
Action::MoveToPosition(MoveToPositionParams::new(path)));
moving_unit = true;
}
}
if moving_unit {
let decal = arg.create();
transforms.insert(decal,
TransformComponent::new(mouse_ray.world_coord, 0.into()));
decals.insert(decal,
DecalComponent::new(0.into(), DrsKey::Interfac, 50405.into()));
}
}
}
}
struct MouseRay {
world_coord: Vector3,
origin: Vector3,
direction: Vector3,
}
fn calculate_mouse_ray(viewport: &Viewport,
mouse_state: &MouseState,
view_projector: &ViewProjector,
terrain: &Terrain)
-> MouseRay {
let viewport_pos = viewport.top_left_i32();
let mouse_pos = mouse_state.position + viewport_pos;
// "Origin elevation" just needs to be a bit taller than the max terrain elevation
let origin_elevation: Fixed = Fixed::from(terrain.elevation_range().1) * 2.into();
let world_coord = view_projector.unproject(&mouse_pos, &*terrain);
let origin = view_projector.unproject_at_elevation(&mouse_pos, origin_elevation);
let direction = world_coord - origin;
MouseRay {
world_coord: world_coord,
origin: origin,
direction: direction,
}
}<|fim▁end|> | &mouse_ray.world_coord,
unit_info.terrain_restriction);
action_batcher.queue_for_entity(entity.get_id(), Action::ClearQueue); |
<|file_name|>strings3.rs<|end_file_name|><|fim▁begin|>// Ok, here are a bunch of values-- some are `Strings`, some are `&strs`. Your
// task is to call one of these two functions on each value depending on what
// you think each value is. That is, add either `string_slice` or `string`
// before the parentheses on each line. If you're right, it will compile!
fn string_slice(arg: &str) { println!("{}", arg); }
fn string(arg: String) { println!("{}", arg); }
fn main() {
string_slice("blue");
string("red".to_string());
string(String::from("hi"));
string("rust is fun!".to_owned());
string("nice weather".into());
string(format!("Interpolation {}", "Station"));
string_slice(&String::from("abc")[0..1]);
string_slice(" hello there ".trim());
string("Happy Monday!".to_string().replace("Mon", "Tues"));<|fim▁hole|> string("mY sHiFt KeY iS sTiCkY".to_lowercase());
}<|fim▁end|> | |
<|file_name|>list_box_row.rs<|end_file_name|><|fim▁begin|>// This file was generated by gir (b7f5189) from gir-files (71d73f0)
// DO NOT EDIT
use Bin;
use Container;
use Widget;
use ffi;
use glib::object::Downcast;
use glib::object::IsA;
use glib::signal::connect;
use glib::translate::*;
use glib_ffi;
use std::boxed::Box as Box_;
use std::mem::transmute;
glib_wrapper! {
pub struct ListBoxRow(Object<ffi::GtkListBoxRow>): Bin, Container, Widget;
match fn {
get_type => || ffi::gtk_list_box_row_get_type(),
}
}
impl ListBoxRow {
#[cfg(feature = "v3_10")]
pub fn new() -> ListBoxRow {
assert_initialized_main_thread!();
unsafe {
Widget::from_glib_none(ffi::gtk_list_box_row_new()).downcast_unchecked()
}
}
#[cfg(feature = "v3_10")]
pub fn changed(&self) {
unsafe {
ffi::gtk_list_box_row_changed(self.to_glib_none().0);
}
}
#[cfg(feature = "v3_14")]
pub fn get_activatable(&self) -> bool {
unsafe {
from_glib(ffi::gtk_list_box_row_get_activatable(self.to_glib_none().0))
}
}
#[cfg(feature = "v3_10")]
pub fn get_header(&self) -> Option<Widget> {
unsafe {
from_glib_none(ffi::gtk_list_box_row_get_header(self.to_glib_none().0))
}
}
#[cfg(feature = "v3_10")]
pub fn get_index(&self) -> i32 {
unsafe {
ffi::gtk_list_box_row_get_index(self.to_glib_none().0)
}
}
#[cfg(feature = "v3_14")]
pub fn get_selectable(&self) -> bool {
unsafe {
from_glib(ffi::gtk_list_box_row_get_selectable(self.to_glib_none().0))
}
}
#[cfg(feature = "v3_14")]
pub fn is_selected(&self) -> bool {
unsafe {
from_glib(ffi::gtk_list_box_row_is_selected(self.to_glib_none().0))
}
}
#[cfg(feature = "v3_14")]
pub fn set_activatable(&self, activatable: bool) {
unsafe {
ffi::gtk_list_box_row_set_activatable(self.to_glib_none().0, activatable.to_glib());
}
}
#[cfg(feature = "v3_10")]
pub fn set_header<T: IsA<Widget>>(&self, header: Option<&T>) {<|fim▁hole|>
#[cfg(feature = "v3_14")]
pub fn set_selectable(&self, selectable: bool) {
unsafe {
ffi::gtk_list_box_row_set_selectable(self.to_glib_none().0, selectable.to_glib());
}
}
pub fn connect_activate<F: Fn(&ListBoxRow) + 'static>(&self, f: F) -> u64 {
unsafe {
let f: Box_<Box_<Fn(&ListBoxRow) + 'static>> = Box_::new(Box_::new(f));
connect(self.to_glib_none().0, "activate",
transmute(activate_trampoline as usize), Box_::into_raw(f) as *mut _)
}
}
}
unsafe extern "C" fn activate_trampoline(this: *mut ffi::GtkListBoxRow, f: glib_ffi::gpointer) {
callback_guard!();
let f: &Box_<Fn(&ListBoxRow) + 'static> = transmute(f);
f(&from_glib_none(this))
}<|fim▁end|> | unsafe {
ffi::gtk_list_box_row_set_header(self.to_glib_none().0, header.to_glib_none().0);
}
} |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>//! Linear Algebra eXtension (LAX)
//! ===============================
//!
//! ndarray-free safe Rust wrapper for LAPACK FFI
//!
//! Linear equation, Inverse matrix, Condition number
//! --------------------------------------------------
//!
//! As the property of $A$, several types of triangular factorization are used:
//!
//! - LU-decomposition for general matrix
//! - $PA = LU$, where $L$ is lower matrix, $U$ is upper matrix, and $P$ is permutation matrix
//! - Bunch-Kaufman diagonal pivoting method for nonpositive-definite Hermitian matrix
//! - $A = U D U^\dagger$, where $U$ is upper matrix,
//! $D$ is Hermitian and block diagonal with 1-by-1 and 2-by-2 diagonal blocks.
//!
//! | matrix type | Triangler factorization (TRF) | Solve (TRS) | Inverse matrix (TRI) | Reciprocal condition number (CON) |
//! |:--------------------------------|:------------------------------|:------------|:---------------------|:----------------------------------|
//! | General (GE) | [lu] | [solve] | [inv] | [rcond] |
//! | Symmetric (SY) / Hermitian (HE) | [bk] | [solveh] | [invh] | - |
//!
//! [lu]: solve/trait.Solve_.html#tymethod.lu
//! [solve]: solve/trait.Solve_.html#tymethod.solve
//! [inv]: solve/trait.Solve_.html#tymethod.inv
//! [rcond]: solve/trait.Solve_.html#tymethod.rcond
//!
//! [bk]: solveh/trait.Solveh_.html#tymethod.bk
//! [solveh]: solveh/trait.Solveh_.html#tymethod.solveh
//! [invh]: solveh/trait.Solveh_.html#tymethod.invh
//!
//! Eigenvalue Problem
//! -------------------
//!
//! Solve eigenvalue problem for a matrix $A$
//!
//! $$ Av_i = \lambda_i v_i $$
//!
//! or generalized eigenvalue problem
//!
//! $$ Av_i = \lambda_i B v_i $$
//!
//! | matrix type | Eigenvalue (EV) | Generalized Eigenvalue Problem (EG) |
//! |:--------------------------------|:----------------|:------------------------------------|
//! | General (GE) |[eig] | - |
//! | Symmetric (SY) / Hermitian (HE) |[eigh] |[eigh_generalized] |
//!
//! [eig]: eig/trait.Eig_.html#tymethod.eig
//! [eigh]: eigh/trait.Eigh_.html#tymethod.eigh
//! [eigh_generalized]: eigh/trait.Eigh_.html#tymethod.eigh_generalized
//!
//! Singular Value Decomposition (SVD), Least square problem
//! ----------------------------------------------------------
//!
//! | matrix type | Singular Value Decomposition (SVD) | SVD with divided-and-conquer (SDD) | Least square problem (LSD) |
//! |:-------------|:-----------------------------------|:-----------------------------------|:---------------------------|
//! | General (GE) | [svd] | [svddc] | [least_squares] |
//!
//! [svd]: svd/trait.SVD_.html#tymethod.svd
//! [svddc]: svddck/trait.SVDDC_.html#tymethod.svddc
//! [least_squares]: least_squares/trait.LeastSquaresSvdDivideConquer_.html#tymethod.least_squares
#[cfg(any(feature = "intel-mkl-system", feature = "intel-mkl-static"))]
extern crate intel_mkl_src as _src;
#[cfg(any(feature = "openblas-system", feature = "openblas-static"))]
extern crate openblas_src as _src;
#[cfg(any(feature = "netlib-system", feature = "netlib-static"))]
extern crate netlib_src as _src;
pub mod error;
pub mod layout;
mod cholesky;
mod eig;
mod eigh;
mod least_squares;
mod opnorm;
mod qr;
mod rcond;
mod solve;
mod solveh;
mod svd;
mod svddc;
mod triangular;
mod tridiagonal;
pub use self::cholesky::*;
pub use self::eig::*;
pub use self::eigh::*;
pub use self::least_squares::*;
pub use self::opnorm::*;
pub use self::qr::*;
pub use self::rcond::*;
pub use self::solve::*;
pub use self::solveh::*;
pub use self::svd::*;
pub use self::svddc::*;
pub use self::triangular::*;
pub use self::tridiagonal::*;
use cauchy::*;
pub type Pivot = Vec<i32>;
/// Trait for primitive types which implements LAPACK subroutines
pub trait Lapack:
OperatorNorm_
+ QR_
+ SVD_
+ SVDDC_
+ Solve_
+ Solveh_
+ Cholesky_
+ Eig_
+ Eigh_
+ Triangular_
+ Tridiagonal_
+ Rcond_
+ LeastSquaresSvdDivideConquer_
{
}
impl Lapack for f32 {}
impl Lapack for f64 {}
impl Lapack for c32 {}
impl Lapack for c64 {}
/// Upper/Lower specification for seveal usages
#[derive(Debug, Clone, Copy)]
#[repr(u8)]
pub enum UPLO {
Upper = b'U',
Lower = b'L',
}
impl UPLO {
pub fn t(self) -> Self {
match self {
UPLO::Upper => UPLO::Lower,
UPLO::Lower => UPLO::Upper,
}
}
}
#[derive(Debug, Clone, Copy)]
#[repr(u8)]
pub enum Transpose {
No = b'N',
Transpose = b'T',
Hermite = b'C',
}
#[derive(Debug, Clone, Copy)]
#[repr(u8)]
pub enum NormType {
One = b'O',
Infinity = b'I',
Frobenius = b'F',<|fim▁hole|>
impl NormType {
pub fn transpose(self) -> Self {
match self {
NormType::One => NormType::Infinity,
NormType::Infinity => NormType::One,
NormType::Frobenius => NormType::Frobenius,
}
}
}
/// Create a vector without initialization
///
/// Safety
/// ------
/// - Memory is not initialized. Do not read the memory before write.
///
unsafe fn vec_uninit<T: Sized>(n: usize) -> Vec<T> {
let mut v = Vec::with_capacity(n);
v.set_len(n);
v
}<|fim▁end|> | } |
<|file_name|>7.4.3_convert_table.py<|end_file_name|><|fim▁begin|>'''
Convert a table from a nested list to a nested dictionary and back.
-----------------------------------------------------------
(c) 2013 Allegra Via and Kristian Rother
Licensed under the conditions of the Python License
This code appears in section 7.4.3 of the book
"Managing Biological Data with Python".
-----------------------------------------------------------
'''
table = [
['protein', 'ext1', 'ext2', 'ext3'],
[0.16, 0.038, 0.044, 0.040],
[0.33, 0.089, 0.095, 0.091],
[0.66, 0.184, 0.191, 0.191],
[1.00, 0.280, 0.292, 0.283],
[1.32, 0.365, 0.367, 0.365],
[1.66, 0.441, 0.443, 0.444]
]
<|fim▁hole|>n = 0
key = table[0]
# to include the header , run the for loop over
# All table elements (including the first one)
for row in table[1:]:
n = n + 1
entry = {key[0]: row[0], key[1]: row[1], key[2]: row[2],
key[3]: row[3]}
nested_dict['row'+str(n)] = entry
# Test
# print(table[1:])
print(nested_dict)
nested_list = []
for entry in nested_dict:
key = nested_dict[entry]
nested_list.append([key['protein'], key['ext1'], key['ext2'],
key['ext3']])
print(nested_list)<|fim▁end|> | # convert nested list to nested dict
nested_dict = {} |
<|file_name|>paths.py<|end_file_name|><|fim▁begin|><|fim▁hole|>basedir = '/data/t3serv014/snarayan/deep/v_deepgen_4_akt_small/'
figsdir = '/home/snarayan/public_html/figs/deepgen/v4_akt/'<|fim▁end|> | |
<|file_name|>2D_Dielectric_Dielectric.js<|end_file_name|><|fim▁begin|>$(window).on('load', function() {//main
const dom = {//define inputs
tswitch: $("#wave-switch input"),
aSlider: $("input#angle"),//angle slider
nSlider: $("input#refractive-index-ratio"),
};
let layout = {//define layout of pot
showlegend: false,
scene: {
aspectmode: "cube",
xaxis: {range: [-2, 2]},
yaxis: {range: [-2, 2]},
zaxis: {range: [-2, 2]},
camera: {
eye: {x: 0, y: 0, z: -2}//adjust camera starting view
}
},
};
//define constants
let size = 100;
let t = 0;
let isPlay = false;
let E_0 = 0.5;
let w_r = 2e9;
let c = 3e8; // Speed of light
let n1 = 1;
let k_1 = (n1*w_r)/c;
let k_2,theta_i,theta_t;
let x_data = numeric.linspace(2, 0, size);//x and y data is always the same and just change z
let x_data_t = math.add(-2,x_data);
let y_data = numeric.linspace(-2, 2, size);
//constants based of of inputs
let condition = $("input[name = wave-switch]:checked").val();
let angle_of_incidence = parseFloat($("input#angle").val());
let n2 = parseFloat($("input#refractive-index-ratio").val());
function snell(theta_i){//snells law
console.log(Math.sin(theta_i));
console.log((n1 / n2))
return Math.asin((n1 / n2) * Math.sin(theta_i));
};
function getData_wave_incident(){//produces data for the incident wave on the boundry
let z,z_square = [];
let k_x = Math.cos(theta_i)*k_1;
let k_y = Math.sin(theta_i)*k_1;
for (let v=0;v < y_data.length ;v++) {
let z_row = [];
for (let i = 0; i < x_data.length ; i++) {
z = E_0* Math.sin(k_x* x_data[i]+k_y*y_data[v]+w_r*t);
z_row.push(z);
}
z_square.push(z_row);
}
return z_square
}
function getData_wave_reflected(){//produces data for the reflected wave on the boundry
let z,z_square = [];
<|fim▁hole|> for (let v=0;v < y_data.length ;v++) {
let z_row = [];
for (let i = 0; i < x_data.length ; i++) {
z = E_0_r* Math.sin(k_x* x_data[i]+k_y*y_data[v]-w_r*t);
z_row.push(z);
}
z_square.push(z_row);
}
return z_square
}
function getData_wave_transmitted(){//produces data for the incident wave on the boundry
let z,z_square = [];
let E_0_t = transmit();
let k_y = Math.sin(theta_i)*k_1;
let k_x = Math.cos(theta_t)*k_2;
for (let v=0;v < y_data.length ;v++) {
let z_row = [];
for (let i = 0; i < x_data_t.length ; i++) {
z = E_0_t*Math.sin(k_x*x_data_t[i]+k_y*y_data[v]+w_r*t);
z_row.push(z);
}
z_square.push(z_row);//Not entirelly sure the physics is correct need to review
}
return z_square
}
function transmit(){//gives the new amplitude of the transmitted wave
let E_t0;
if (isNaN(theta_t) === true){//if snells law return not a number this means total internal refection is occurring hence no transmitted wave(no attenuation accounted for)
return 0
}
else {
E_t0 = E_0 * (2. * n1 * Math.cos(theta_i)) / (n1 * Math.cos(theta_i) + n2 * Math.cos(theta_t))
return E_t0
}
};
function reflect() {//gives the amplitude of the refected wave
if (n1 === n2) {//if both materials have same refractive index then there is no reflection
return 0
}
else {
let E_r0;
if (isNaN(theta_t) === true){
E_r0 = E_0;
}
else {
E_r0 = E_0 * (n1 * Math.cos(theta_i) - n2 * Math.cos(theta_t)) / (n1 * Math.cos(theta_i) + n2 * Math.cos(theta_t))
}
return E_r0
}
};
function plot_data() {//produces the traces of the plot
$("#angle-display").html($("input#angle").val().toString()+"°");//update display
$("#refractive-index-ratio-display").html($("input#refractive-index-ratio").val().toString());
condition = $("input[name = wave-switch]:checked").val();//update value of constants
angle_of_incidence = parseFloat($("input#angle").val());
n2 = parseFloat($("input#refractive-index-ratio").val());
k_2 = (n2*w_r)/c;
theta_i = Math.PI * (angle_of_incidence / 180);
theta_t = snell(theta_i);
if (isNaN(Math.asin(n2))=== true){//update value of citical angle
$("#critical_angle-display").html("No Total Internal Reflection possible");
}else{
$("#critical_angle-display").html(((180*Math.asin(n2))/Math.PI).toFixed(2).toString()+"°");
}
let data = [];
if (condition === "incident") {//creates trace dependent of the conditions of the system
let incident_wave = {
opacity: 1,
x: x_data,
y: y_data,
z: getData_wave_incident(),
type: 'surface',
name: "Incident"
};
data.push(incident_wave);
}
else if(condition === "reflected") {
let reflected_wave = {
opacity: 1,
x: x_data,
y: y_data,
z: getData_wave_reflected(),
type: 'surface',
name: "Reflected"
};
data.push(reflected_wave);
}
else{
let incident_plus_reflected_wave = {
opacity: 1,
x: x_data,
y: y_data,
z: math.add(getData_wave_incident(),getData_wave_reflected()),
type: 'surface',
name:"Reflected and Incident combined"
};
data.push(incident_plus_reflected_wave);
}
let transmitted_wave = {
opacity: 1,
x: x_data_t,
y: y_data,
z: getData_wave_transmitted(),
type: 'surface',
name:"Transmitted"
};
let opacity_1;//opacity gives qualitative representation of refractive index
let opacity_2;
if((1 < n2) && (n2 <= 15)){//decide opacity dependant on refractive index
opacity_1 = 0;
opacity_2 = n2/10
}
else if((0.1 <= n2) && (n2< 1)){
opacity_1 = 0.1/n2;
opacity_2 = 0;
}
else{
opacity_1 = 0;
opacity_2 = 0;
}
let material_1 =//dielectric one
{
opacity: opacity_1,
color: '#379F9F',
type: "mesh3d",
name: "material 1",
z: [-2, -2, 2, 2, -2, -2, 2, 2],
y: [-2, 2, 2, -2, -2, 2, 2, -2],
x: [2, 2, 2, 2, 0, 0, 0, 0],
i: [7, 0, 0, 0, 4, 4, 6, 6, 4, 0, 3, 2],
j: [3, 4, 1, 2, 5, 6, 5, 2, 0, 1, 6, 3],
k: [0, 7, 2, 3, 6, 7, 1, 1, 5, 5, 7, 6],
};
let material_2 =//dielectric two
{
opacity: opacity_2,
color: '#379F9F',
type: "mesh3d",
name: "material 2",
z: [-2, -2, 2, 2, -2, -2, 2, 2],
y: [-2, 2, 2, -2, -2, 2, 2, -2],
x: [0, 0, 0, 0, -2, -2, -2, -2],
i: [7, 0, 0, 0, 4, 4, 6, 6, 4, 0, 3, 2],
j: [3, 4, 1, 2, 5, 6, 5, 2, 0, 1, 6, 3],
k: [0, 7, 2, 3, 6, 7, 1, 1, 5, 5, 7, 6],
};
data.push(transmitted_wave,material_1,material_2);
if (data.length < 5) {//animate function requires data sets of the same length hence those unused in situation must be filled with empty traces
let extensionSize = data.length;
for (let i = 0; i < (5 - extensionSize); ++i){
data.push(
{
type: "scatter3d",
mode: "lines",
x: [0],
y: [0],
z: [0]
}
);
}
}
return data
}
function update_graph(){//update animation
Plotly.animate("graph",
{data: plot_data()},//updated data
{
fromcurrent: true,
transition: {duration: 0,},
frame: {duration: 0, redraw: false,},
mode: "afterall"
}
);
};
function play_loop(){//handles the play button
if(isPlay === true) {
t++;//keeps time ticking
Plotly.animate("graph",
{data: plot_data()},
{
fromcurrent: true,
transition: {duration: 0,},
frame: {duration: 0, redraw: false,},
mode: "afterall"
});
requestAnimationFrame(play_loop);//prepares next frame
}
return 0;
};
function initial() {
Plotly.purge("graph");
Plotly.newPlot('graph', plot_data(),layout);//create plot
dom.tswitch.on("change", update_graph);//change of input produces reaction
dom.aSlider.on("input", update_graph);
dom.nSlider.on("input", update_graph);
$('#playButton').on('click', function() {
document.getElementById("playButton").value = (isPlay) ? "Play" : "Stop";//change button label
isPlay = !isPlay;
w_t = 0;//reset time to 0
requestAnimationFrame(play_loop);
});
};
initial();
});<|fim▁end|> | let k_x = Math.cos(-theta_i)*k_1;
let k_y = Math.sin(-theta_i)*k_1;
let E_0_r = reflect();
|
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>#[doc = r" Value read from the register"]
pub struct R {
bits: u32,
}
#[doc = r" Value to write to the register"]
pub struct W {
bits: u32,
}
impl super::OR {
#[doc = r" Modifies the contents of the register"]
#[inline(always)]
pub fn modify<F>(&self, f: F)
where
for<'w> F: FnOnce(&R, &'w mut W) -> &'w mut W,
{
let bits = self.register.get();
let r = R { bits: bits };
let mut w = W { bits: bits };
f(&r, &mut w);
self.register.set(w.bits);
}
#[doc = r" Reads the contents of the register"]
#[inline(always)]
pub fn read(&self) -> R {
R {
bits: self.register.get(),
}
}
#[doc = r" Writes to the register"]
#[inline(always)]
pub fn write<F>(&self, f: F)
where
F: FnOnce(&mut W) -> &mut W,
{
let mut w = W::reset_value();
f(&mut w);
self.register.set(w.bits);
}
#[doc = r" Writes the reset value to the register"]
#[inline(always)]
pub fn reset(&self) {
self.write(|w| w)
}
}
#[doc = r" Value of the field"]
pub struct RMPR {
bits: u8,
}
impl RMPR {
#[doc = r" Value of the field as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u8 {
self.bits
}
}
#[doc = r" Proxy"]
pub struct _RMPW<'a> {
w: &'a mut W,
}
impl<'a> _RMPW<'a> {
#[doc = r" Writes raw bits to the field"]
#[inline(always)]
pub unsafe fn bits(self, value: u8) -> &'a mut W {
const MASK: u8 = 3;
const OFFSET: u8 = 0;
self.w.bits &= !((MASK as u32) << OFFSET);
self.w.bits |= ((value & MASK) as u32) << OFFSET;
self.w
}
}
impl R {
#[doc = r" Value of the register as raw bits"]
#[inline(always)]
pub fn bits(&self) -> u32 {
self.bits
}<|fim▁hole|> const MASK: u8 = 3;
const OFFSET: u8 = 0;
((self.bits >> OFFSET) & MASK as u32) as u8
};
RMPR { bits }
}
}
impl W {
#[doc = r" Reset value of the register"]
#[inline(always)]
pub fn reset_value() -> W {
W { bits: 0 }
}
#[doc = r" Writes raw bits to the register"]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.bits = bits;
self
}
#[doc = "Bits 0:1 - Timer input 1 remap"]
#[inline(always)]
pub fn rmp(&mut self) -> _RMPW {
_RMPW { w: self }
}
}<|fim▁end|> | #[doc = "Bits 0:1 - Timer input 1 remap"]
#[inline(always)]
pub fn rmp(&self) -> RMPR {
let bits = { |
<|file_name|>test_backend_compile_engine.py<|end_file_name|><|fim▁begin|># Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import numpy as np
import tvm
from tvm import te
import tvm.testing
from tvm import relay
from tvm import autotvm
from tvm import topi
from tvm.relay.testing import run_infer_type<|fim▁hole|>
@autotvm.register_topi_compute("test/conv2d_1")
def _compute_conv2d_1(cfg, input, filter, strides, padding, dilation, out_dtype):
return topi.nn.conv2d_nchw(input, filter, strides, padding, dilation, out_dtype)
@autotvm.register_topi_schedule("test/conv2d_1")
def _schedule_conv2d_1(cfg, outs):
return topi.generic.schedule_conv2d_nchw(outs)
@autotvm.register_topi_compute("test/conv2d_2")
def _compute_conv2d_2(cfg, input, filter, strides, padding, dilation, out_dtype):
return topi.nn.conv2d_nchw(input, filter, strides, padding, dilation, out_dtype)
@autotvm.register_topi_schedule("test/conv2d_2")
def _schedule_conv2d_2(cfg, outs):
return topi.generic.schedule_conv2d_nchw(outs)
def _compute_conv2d_3(input, filter, strides, padding, dilation, out_dtype):
return topi.nn.conv2d_nchw(input, filter, strides, padding, dilation, out_dtype)
def _schedule_conv2d_3(outs):
return topi.generic.schedule_conv2d_nchw(outs)
@tvm.target.override_native_generic_func("test_conv2d_strategy")
def _tmp_strategy(attrs, inputs, out_type, target):
strategy = relay.op.OpStrategy()
strategy.add_implementation(
relay.op.strategy.wrap_compute_conv2d(_compute_conv2d_1),
relay.op.strategy.wrap_topi_schedule(_schedule_conv2d_1),
name="conv2d_1",
plevel=10,
)
strategy.add_implementation(
relay.op.strategy.wrap_compute_conv2d(_compute_conv2d_2),
relay.op.strategy.wrap_topi_schedule(_schedule_conv2d_2),
name="conv2d_2",
plevel=15,
)
ic = inputs[0].shape[1]
with tvm.te.SpecializedCondition(ic >= 16):
strategy.add_implementation(
relay.op.strategy.wrap_compute_conv2d(_compute_conv2d_3),
relay.op.strategy.wrap_topi_schedule(_schedule_conv2d_3),
name="conv2d_3",
plevel=20,
)
return strategy
def _create_record(task_name, dshape, wshape, target, cost):
args = [te.placeholder(dshape), te.placeholder(wshape), (1, 1), (1, 1, 1, 1), (1, 1), "float32"]
task = autotvm.task.create(task_name, args, target)
cfg = autotvm.ConfigEntity(0, None, {}, [])
cfg.cost = cost
inp = autotvm.MeasureInput(target=target, task=task, config=cfg)
result = autotvm.MeasureResult(costs=(cost,), error_no=0, all_cost=-1, timestamp=-1)
return (inp, result)
def test_get_valid_implementations():
target = tvm.target.Target("llvm")
def _get_impls(dshape, wshape):
data = relay.var("data", shape=dshape)
weight = relay.var("wshape", shape=wshape)
out = relay.nn.conv2d(data, weight, padding=(1, 1))
out = run_infer_type(out)
return relay.backend.compile_engine.get_valid_implementations(
relay.op.get("nn.conv2d"),
out.attrs,
[te.placeholder(dshape), te.placeholder(wshape)],
out.checked_type,
target,
)
with TempOpAttr("nn.conv2d", "FTVMStrategy", _tmp_strategy):
impls = _get_impls((1, 8, 7, 7), (32, 8, 3, 3))
assert len(impls) == 2
impls = _get_impls((1, 16, 7, 7), (32, 16, 3, 3))
assert len(impls) == 3
def test_select_implementation():
target = tvm.target.Target("llvm")
def _select_impl(dshape, wshape, use_autotvm=False):
data = relay.var("data", shape=dshape)
weight = relay.var("wshape", shape=wshape)
out = relay.nn.conv2d(data, weight, padding=(1, 1))
out = run_infer_type(out)
return relay.backend.compile_engine.select_implementation(
relay.op.get("nn.conv2d"),
out.attrs,
[te.placeholder(dshape), te.placeholder(wshape)],
out.checked_type,
target,
use_autotvm,
)
with TempOpAttr("nn.conv2d", "FTVMStrategy", _tmp_strategy):
impl, _ = _select_impl((1, 8, 7, 7), (32, 8, 3, 3))
assert impl.name == "conv2d_2"
impl, _ = _select_impl((1, 8, 7, 7), (32, 8, 3, 3), True)
assert impl.name == "conv2d_2"
impl, _ = _select_impl((1, 16, 7, 7), (32, 16, 3, 3))
assert impl.name == "conv2d_3"
impl, _ = _select_impl((1, 16, 7, 7), (32, 16, 3, 3), True)
assert impl.name == "conv2d_3"
# add autotvm record
records = []
records.append(_create_record("test/conv2d_1", (1, 8, 7, 7), (32, 8, 3, 3), target, 0.5))
records.append(_create_record("test/conv2d_1", (1, 16, 7, 7), (32, 16, 3, 3), target, 1.0))
with target:
with autotvm.apply_history_best(records):
impl, _ = _select_impl((1, 8, 7, 7), (32, 8, 3, 3), True)
assert impl.name == "conv2d_1"
impl, _ = _select_impl((1, 16, 7, 7), (32, 16, 3, 3), True)
assert impl.name == "conv2d_1"
records.append(_create_record("test/conv2d_2", (1, 8, 7, 7), (32, 8, 3, 3), target, 0.2))
records.append(_create_record("test/conv2d_1", (1, 16, 7, 7), (32, 16, 3, 3), target, 1.2))
with target:
with autotvm.apply_history_best(records):
impl, _ = _select_impl((1, 8, 7, 7), (32, 8, 3, 3), True)
assert impl.name == "conv2d_2"
impl, _ = _select_impl((1, 16, 7, 7), (32, 16, 3, 3), True)
assert impl.name == "conv2d_1"
def test_compile_engine():
engine = relay.backend.compile_engine.get()
def get_func(shape):
x = relay.var("x", shape=shape)
y = relay.add(x, x)
z = relay.add(y, x)
f = relay.Function([x], z)
mod = tvm.IRModule.from_expr(f)
mod = relay.transform.InferType()(mod)
return mod["main"]
z1 = engine.lower(get_func((10,)), "llvm")
z2 = engine.lower(get_func((10,)), "llvm")
z3 = engine.lower(get_func(()), "llvm")
assert z1.same_as(z2)
assert not z3.same_as(z1)
if tvm.testing.device_enabled("cuda"):
z4 = engine.lower(get_func(()), "cuda")
assert not z3.same_as(z4)
# Test JIT target
for target in ["llvm"]:
ctx = tvm.context(target)
if tvm.testing.device_enabled(target):
f = engine.jit(get_func((10,)), target)
x = tvm.nd.array(np.ones(10).astype("float32"), ctx=ctx)
y = tvm.nd.empty((10,), ctx=ctx)
f(x, y)
tvm.testing.assert_allclose(y.asnumpy(), x.asnumpy() * 3)
engine.dump()
def test_compile_placeholder_bypass():
engine = relay.backend.compile_engine.get()
x = relay.var("x", shape=(2, 3))
y = relay.var("y", shape=(2, 3))
z = relay.var("z", shape=(2, 3))
result = relay.Tuple([x, relay.op.concatenate([y, z], axis=0)])
func = relay.Function(relay.analysis.free_vars(result), result)
with tvm.transform.PassContext(opt_level=0):
graph, lib, params = relay.build(tvm.IRModule.from_expr(func), "llvm")
def test_compile_injective_with_tuple():
x = relay.var("x", shape=(2, 3))
y = relay.var("y", shape=(2, 3))
x_transpose = relay.transpose(x)
output = relay.Tuple([x_transpose, y])
func = relay.Function([x, y], output)
relay.build(tvm.IRModule.from_expr(func), "llvm")
def test_compile_tuple_dup():
x = relay.var("data", shape=(16, 16))
log = relay.log(x)
output = relay.Tuple([log, log])
f = relay.Function([x], output)
relay.build(tvm.IRModule.from_expr(f), "llvm")
def test_compile_full():
# Shape calculations can happen in int64. The test checks that full operator
# can handle when shapes are not int32
shape = (
tvm.tir.IntImm("int32", 1),
tvm.tir.IntImm("int64", 16),
tvm.tir.IntImm("int64", 16),
tvm.tir.IntImm("int32", 64),
)
output = relay.full(relay.const(0, "int32"), shape=shape, dtype="int32")
f = relay.Function([], output)
mod = tvm.IRModule.from_expr(f)
mod = relay.qnn.transform.CanonicalizeOps()(mod)
relay.build(mod, "llvm")
def test_compile_nhwc_pack():
data = relay.var("data", shape=(1, 1, 1, 1024), dtype="uint8")
weight = relay.var("weight", shape=(1, 1, 1024, 1001), dtype="int8")
p2 = relay.var("p2", shape=(1, 1, 1, 1), dtype="int32")
conv = relay.nn.conv2d(
data,
weight,
kernel_size=(1, 1),
data_layout="NHWC",
kernel_layout="HWIO",
out_dtype="int32",
)
multiply = relay.multiply(relay.const(-22, dtype="int32"), p2)
tile = relay.tile(multiply, reps=(1, 1, 1, 1001))
subtract = relay.subtract(conv, tile)
func = subtract
mod = relay.Function(relay.analysis.free_vars(func), func)
relay.build(mod, target="llvm")
if __name__ == "__main__":
test_get_valid_implementations()
test_select_implementation()
test_compile_engine()
test_compile_placeholder_bypass()
test_compile_injective_with_tuple()
test_compile_tuple_dup()
test_compile_full()
test_compile_nhwc_pack()<|fim▁end|> | from tvm.relay.testing.temp_op_attr import TempOpAttr
import tvm.testing
|
<|file_name|>linux_shell.py<|end_file_name|><|fim▁begin|># Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import subprocess
from devtoolslib.shell import Shell
from devtoolslib import http_server
class LinuxShell(Shell):<|fim▁hole|>
Args:
executable_path: path to the shell binary
command_prefix: optional list of arguments to prepend to the shell command,
allowing e.g. to run the shell under debugger.
"""
def __init__(self, executable_path, command_prefix=None):
self.executable_path = executable_path
self.command_prefix = command_prefix if command_prefix else []
def ServeLocalDirectory(self, local_dir_path, port=0):
"""Serves the content of the local (host) directory, making it available to
the shell under the url returned by the function.
The server will run on a separate thread until the program terminates. The
call returns immediately.
Args:
local_dir_path: path to the directory to be served
port: port at which the server will be available to the shell
Returns:
The url that the shell can use to access the content of |local_dir_path|.
"""
return 'http://%s:%d/' % http_server.StartHttpServer(local_dir_path, port)
def Run(self, arguments):
"""Runs the shell with given arguments until shell exits, passing the stdout
mingled with stderr produced by the shell onto the stdout.
Returns:
Exit code retured by the shell or None if the exit code cannot be
retrieved.
"""
command = self.command_prefix + [self.executable_path] + arguments
return subprocess.call(command, stderr=subprocess.STDOUT)
def RunAndGetOutput(self, arguments):
"""Runs the shell with given arguments until shell exits.
Args:
arguments: list of arguments for the shell
Returns:
A tuple of (return_code, output). |return_code| is the exit code returned
by the shell or None if the exit code cannot be retrieved. |output| is the
stdout mingled with the stderr produced by the shell.
"""
command = self.command_prefix + [self.executable_path] + arguments
p = subprocess.Popen(command, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
(output, _) = p.communicate()
return p.returncode, output<|fim▁end|> | """Wrapper around Mojo shell running on Linux. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.