prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>// Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
#![forbid(unsafe_code)]
//! Test infrastructure for the Diem VM.
//!
//! This crate contains helpers for executing tests against the Diem VM.
use diem_types::{transaction::TransactionStatus, vm_status::KeptVMStatus};
pub mod account;
pub mod account_universe;
pub mod common_transactions;
pub mod compile;
pub mod currencies;
pub mod data_store;
pub mod execution_strategies;
pub mod executor;
pub mod gas_costs;
mod golden_outputs;
pub mod keygen;
mod proptest_types;
pub fn assert_status_eq(s1: &KeptVMStatus, s2: &KeptVMStatus) -> bool {
assert_eq!(s1, s2);
true
}
pub fn transaction_status_eq(t1: &TransactionStatus, t2: &TransactionStatus) -> bool {
match (t1, t2) {
(TransactionStatus::Discard(s1), TransactionStatus::Discard(s2)) => {
assert_eq!(s1, s2);
true
}
(TransactionStatus::Keep(s1), TransactionStatus::Keep(s2)) => {
assert_eq!(s1, s2);
true
}
_ => false,
}
}
#[macro_export]
macro_rules! assert_prologue_parity {
($e1:expr, $e2:expr, $e3:expr) => {
assert_eq!($e1.unwrap(), $e3);
assert!(transaction_status_eq($e2, &TransactionStatus::Discard($e3)));
};
}
#[macro_export]
macro_rules! assert_prologue_disparity {
($e1:expr => $e2:expr, $e3:expr => $e4:expr) => {
assert_eq!($e1, $e2);
assert!(transaction_status_eq($e3, &$e4));
};
}<|fim▁hole|>macro_rules! current_function_name {
() => {{
fn f() {}
fn type_name_of<T>(_: T) -> &'static str {
std::any::type_name::<T>()
}
let name = type_name_of(f);
&name[..name.len() - 3]
}};
}<|fim▁end|> |
/// Returns the name of the current function. This macro is used to derive the name for the golden
/// file of each test case.
#[macro_export] |
<|file_name|>StatusPanel.java<|end_file_name|><|fim▁begin|>package cocoonClient.Panels;
import java.awt.BorderLayout;
import java.awt.Color;
import java.awt.Dimension;
import java.awt.FlowLayout;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import javax.swing.Box;
import javax.swing.BoxLayout;
import javax.swing.JScrollPane;
import javax.swing.JTable;
import javax.swing.event.ListSelectionEvent;
import javax.swing.table.DefaultTableModel;
import JSONTransmitProtocol.newReader.JSONReader;
import cocoonClient.Connector.AbstractConnector;
import cocoonClient.Data.UserInfo;
public class StatusPanel extends CocoonDisplayPanel implements AbstractConnector{
private JTable table;
public StatusPanel(){
super(UserInfo.getMainFrame());
setRightPanel(new TestRightPanel());
this.setSize(600, 500);
this.setLayout(new FlowLayout());
init();
UserInfo.getPanels().put("Status", this);
}
private void init() {
try{
//Table//以Model物件宣告建立表格的JTable元件
table = new JTable(){
public void valueChanged(ListSelectionEvent e){
super.valueChanged(e); //呼叫基礎類別的valueChanged()方法, 否則選取動作無法正常執行
if( table.getSelectedRow() == -1) return;//取得表格目前的選取列,若沒有選取列則終止執行方法
}
};
table.setModel(new DefaultTableModel(){
@Override
public boolean isCellEditable(int row, int column){
return false;
}
});
table.setShowGrid(true);
table.setAutoResizeMode(JTable.AUTO_RESIZE_OFF);
table.setSelectionBackground(Color.ORANGE);//設定選取背景顏色
table.setCellSelectionEnabled(true); //設定允許儲存格選取
//取得處理表格資料的Model物件,建立關聯
DefaultTableModel dtm = (DefaultTableModel)table.getModel(); //宣告處理表格資料的TableModel物件
String columnTitle[] = new String[]{"Date", "Username", "Problem", "Status"};
int columnWidth[] = new int[]{150, 120, 190, 120};
for(int i = 0; i < columnTitle.length; i++){
dtm.addColumn(columnTitle[i]);
<|fim▁hole|> //欄寬設定
table.getColumnModel().getColumn(i).setPreferredWidth(columnWidth[i]);
}
//註冊回應JTable元件的MouseEvent事件的監聽器物件
/* table.addMouseListener(new MouseAdapter(){
public void mouseClicked(MouseEvent e){
int selRow = table.rowAtPoint(e.getPoint());//取得滑鼠點選位置所在之資料的列索引
String Size = (String) table.getValueAt(selRow, 2); //取得被點選資料列的第3欄的值
if (Integer.parseInt(Size)> 0 ){
}
}
});*/
}catch ( Exception e){
e.printStackTrace();
}
JScrollPane pane = new JScrollPane(table);
pane.setPreferredSize(new Dimension(600, 450));
add(pane);
}
private void addStatus(String response){
JSONReader reader = new JSONReader(response);
DefaultTableModel dtm = (DefaultTableModel)table.getModel();
String result = "";
try{
result = reader.getSubmission().getResult().split("\n")[0];
}
catch(Exception e){}
dtm.addRow(new String[] {
reader.getSubmission().getTime(),
reader.getSubmission().getUsername(),
UserInfo.getProblemSet().getProblemName(reader.getSubmission().getPID()),
result
});
}
@Override
public void recieveResponse(String response) {
addStatus(response);
}
}<|fim▁end|> | }
for(int i = 0; i < columnWidth.length; i++){
|
<|file_name|>search.tests.ts<|end_file_name|><|fim▁begin|>import Search from "./search";
import { INotify } from "../../services/notify";
import { IProfileService, CategoryFilter } from "../../services/api/profileService";
import ProfileResult from "../../services/api/profileResult";
import Failure from "../../services/failure";
describe("Search", () => {
let profileService: IProfileService;
let notify: INotify;
let profiles: Array<ProfileResult>;
let sut: Search;
beforeEach(() => {
// Cancel out the console calls to avoid noisy logging in tests
spyOn(console, "info");
profiles = new Array<ProfileResult>(
<ProfileResult>{
firstName: "Fred",
lastName: "Jones"
}
);
profileService = <IProfileService>{
searchProfiles: (filters: Array<CategoryFilter>): Promise<Array<ProfileResult>> => {
return Promise.resolve(profiles);
}
};
notify = <INotify>{
showInformation: (message: string): void => {
},
showFailure: (failure: Failure): void => {
},
showWarning: (message: string): void => {
},
showSuccess: (message: string): void => {
},
showError: (message: string): void => {
}
};
sut = new Search();
sut.$router = <any>{
push: (options: any): void => {
}
};
sut.configure(profileService, notify);
});
describe("OnRunSearch", () => {
it("determines filters from selected categories", async () => {
let spy = spyOn(profileService, "searchProfiles");
let gender = "Female";
let languages = new Array<string>("English");
let skills = new Array<string>("C#");
await sut.OnRunSearch(gender, languages, skills);
expect(profileService.searchProfiles).toHaveBeenCalled();
expect(spy.calls.mostRecent().args[0].length).toEqual(3);
expect(spy.calls.mostRecent().args[0][0].group).toEqual("skill");
expect(spy.calls.mostRecent().args[0][0].name).toEqual("C#");
expect(spy.calls.mostRecent().args[0][1].group).toEqual("language");
expect(spy.calls.mostRecent().args[0][1].name).toEqual("English");
expect(spy.calls.mostRecent().args[0][2].group).toEqual("gender");
expect(spy.calls.mostRecent().args[0][2].name).toEqual("Female");
});
it("displays notification when searching profiles throws known failure", async () => {
let expected = new Failure("Uh oh!");
spyOn(notify, "showFailure");
profileService.searchProfiles = (filters: Array<CategoryFilter>): Promise<Array<ProfileResult>> => {
throw expected;
};
let gender = "Female";
let languages = new Array<string>("English");
let skills = new Array<string>("C#");
<|fim▁hole|> expect(notify.showFailure).toHaveBeenCalledWith(expected);
});
it("displays notification when searching profiles throws unknown failure", async () => {
let expected = new Error("Uh oh!");
spyOn(notify, "showError");
profileService.searchProfiles = (filters: Array<CategoryFilter>): Promise<Array<ProfileResult>> => {
throw expected;
};
let gender = "Female";
let languages = new Array<string>("English");
let skills = new Array<string>("C#");
try {
await sut.OnRunSearch(gender, languages, skills);
}
catch (e) {
expect(e).toEqual(expected);
}
expect(notify.showError).toHaveBeenCalled();
});
it("returns profile results", async () => {
let gender = "Female";
let languages = new Array<string>("English");
let skills = new Array<string>("C#");
await sut.OnRunSearch(gender, languages, skills);
expect(sut.profiles).toEqual(profiles);
});
it("sets flags after search", async () => {
let gender = "Female";
let languages = new Array<string>("English");
let skills = new Array<string>("C#");
await sut.OnRunSearch(gender, languages, skills);
expect(sut.searching).toBeFalsy();
expect(sut.searchRun).toBeTruthy();
});
it("sets flags when searching profiles throws unknown failure", async () => {
let expected = new Failure("Uh oh!");
profileService.searchProfiles = (filters: Array<CategoryFilter>): Promise<Array<ProfileResult>> => {
throw expected;
};
let gender = "Female";
let languages = new Array<string>("English");
let skills = new Array<string>("C#");
await sut.OnRunSearch(gender, languages, skills);
expect(sut.searching).toBeFalsy();
expect(sut.searchRun).toBeTruthy();
});
});
});<|fim▁end|> | await sut.OnRunSearch(gender, languages, skills);
|
<|file_name|>pago-dialog.component.ts<|end_file_name|><|fim▁begin|>import { Component, OnInit, OnDestroy } from '@angular/core';
import { ActivatedRoute } from '@angular/router';
import { Response } from '@angular/http';
import { NgbActiveModal, NgbModalRef } from '@ng-bootstrap/ng-bootstrap';
import { EventManager, AlertService, JhiLanguageService } from 'ng-jhipster';
import { Pago } from './pago.model';
import { PagoPopupService } from './pago-popup.service';
import { PagoService } from './pago.service';
import { Encargo, EncargoService } from '../encargo';
@Component({
selector: 'jhi-pago-dialog',<|fim▁hole|>export class PagoDialogComponent implements OnInit {
pago: Pago;
authorities: any[];
isSaving: boolean;
encargos: Encargo[];
constructor(
public activeModal: NgbActiveModal,
private jhiLanguageService: JhiLanguageService,
private alertService: AlertService,
private pagoService: PagoService,
private encargoService: EncargoService,
private eventManager: EventManager
) {
this.jhiLanguageService.setLocations(['pago']);
}
ngOnInit() {
this.isSaving = false;
this.authorities = ['ROLE_USER', 'ROLE_ADMIN'];
this.encargoService.query().subscribe(
(res: Response) => { this.encargos = res.json(); }, (res: Response) => this.onError(res.json()));
}
clear () {
this.activeModal.dismiss('cancel');
}
save () {
this.isSaving = true;
if (this.pago.id !== undefined) {
this.pagoService.update(this.pago)
.subscribe((res: Pago) =>
this.onSaveSuccess(res), (res: Response) => this.onSaveError(res));
} else {
this.pagoService.create(this.pago)
.subscribe((res: Pago) =>
this.onSaveSuccess(res), (res: Response) => this.onSaveError(res));
}
}
private onSaveSuccess (result: Pago) {
this.eventManager.broadcast({ name: 'pagoListModification', content: 'OK'});
this.isSaving = false;
this.activeModal.dismiss(result);
}
private onSaveError (error) {
try {
error.json();
} catch (exception) {
error.message = error.text();
}
this.isSaving = false;
this.onError(error);
}
private onError (error) {
this.alertService.error(error.message, null, null);
}
trackEncargoById(index: number, item: Encargo) {
return item.id;
}
}
@Component({
selector: 'jhi-pago-popup',
template: ''
})
export class PagoPopupComponent implements OnInit, OnDestroy {
modalRef: NgbModalRef;
routeSub: any;
constructor (
private route: ActivatedRoute,
private pagoPopupService: PagoPopupService
) {}
ngOnInit() {
this.routeSub = this.route.params.subscribe(params => {
if ( params['id'] ) {
this.modalRef = this.pagoPopupService
.open(PagoDialogComponent, params['id']);
} else {
this.modalRef = this.pagoPopupService
.open(PagoDialogComponent);
}
});
}
ngOnDestroy() {
this.routeSub.unsubscribe();
}
}<|fim▁end|> | templateUrl: './pago-dialog.component.html'
}) |
<|file_name|>integration.py<|end_file_name|><|fim▁begin|>import cgi
import hashlib
import http.server
import io
import os
import posixpath
import ssl
import threading
import time
import urllib.parse
import pyftpdlib.authorizers
import pyftpdlib.handlers
import pyftpdlib.servers
class FTPServer:
def __init__(self, port, root, report_size):
class FTPHandlerNoSIZE(pyftpdlib.handlers.FTPHandler):
proto_cmds = {k: v for k, v in pyftpdlib.handlers.proto_cmds.items() if k != 'SIZE'}
authorizer = pyftpdlib.authorizers.DummyAuthorizer()
authorizer.add_anonymous(root)
handler = pyftpdlib.handlers.FTPHandler if report_size else FTPHandlerNoSIZE
handler.authorizer = authorizer
self.server = pyftpdlib.servers.FTPServer(('', port), handler)
def serve(self):
self.server.serve_forever()
class HTTPServer:
def __init__(self, port, cert, root, report_size):
class RequestHandler(http.server.BaseHTTPRequestHandler):
def do_GET(self):
path = self.path.split('?', 1)[0].split('#', 1)[0]
path = urllib.parse.unquote(path)
path = posixpath.normpath(path)
path = os.path.join(root, path.lstrip('/'))
try:
with open(path, 'rb') as f:
data = f.read()
self.send_response(200)
content_type = 'application/json' if 'versioneers' in path else 'application/octet-stream'
self.send_header('Content-Type', content_type)
self.send_header('Content-Transfer-Encoding', 'binary')
if report_size:
self.send_header('Content-Length', len(data))
self.end_headers()
self.wfile.write(data)
except FileNotFoundError:
self.send_error(404)
def do_POST(self):
def dechunk(f):
bio = io.BytesIO()
while True:
chunksize = bytearray()
while not chunksize.endswith(b'\r\n'):
chunksize += f.read(1)
chunksize = chunksize.decode().split(':')[0]
chunksize = int(chunksize, 16)
if chunksize == 0:
break
chunk = f.read(chunksize)
assert(f.read(2) == b'\r\n')
bio.write(chunk)
bio.seek(0)
return bio
def verify_hash(f, hashtype, hsh):
try:
chksum = hashlib.new(hashtype)
except ValueError:
return False
chksum.update(f.read())
return chksum.hexdigest() == hsh
if self.headers.get('Transfer-Encoding') == 'chunked':<|fim▁hole|> data = cgi.FieldStorage(fp=fp, headers=self.headers,
environ={'REQUEST_METHOD': 'POST'},
# accept maximum of 10MB of data
limit=10 * 1024 * 1024)
try:
if 'filename' in data:
resp = b'Missing'
self.send_response(200)
self.send_header('Content-Type', 'text/plain')
self.send_header('Content-Length', len(resp))
self.end_headers()
self.wfile.write(resp)
else:
hashtype = [k for k in data.keys() if k.endswith('sum')][0]
hsh = data[hashtype].value
hashtype = hashtype.split('sum')[0]
if verify_hash(data['file'].file, hashtype, hsh):
self.send_response(204)
self.end_headers()
else:
self.send_error(500)
except (KeyError, IndexError):
self.send_error(400)
self.server = http.server.HTTPServer(('', port), RequestHandler)
if cert:
self.server.socket = ssl.wrap_socket(self.server.socket, certfile=cert, server_side=True)
def serve(self):
self.server.serve_forever()
def main():
servers = [
FTPServer(2100, '/srv', True),
FTPServer(2101, '/srv', False),
HTTPServer(8000, None, '/srv', True),
HTTPServer(8001, None, '/srv', False),
HTTPServer(4430, '/cert.pem', '/srv', True),
HTTPServer(4431, '/cert.pem', '/srv', False),
]
threads = [threading.Thread(target=s.serve) for s in servers[1:]]
for t in threads:
t.setDaemon(True)
t.start()
try:
while True:
time.sleep(1)
except KeyboardInterrupt:
pass
if __name__ == '__main__':
main()<|fim▁end|> | fp = dechunk(self.rfile)
else:
fp = self.rfile |
<|file_name|>addressbook.cpp<|end_file_name|><|fim▁begin|>/****************************************************************************
**
** Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
** Contact: http://www.qt-project.org/legal
**
** This file is part of the examples of the Qt Toolkit.
**
** $QT_BEGIN_LICENSE:BSD$
** You may use this file under the terms of the BSD license as follows:
**
** "Redistribution and use in source and binary forms, with or without
** modification, are permitted provided that the following conditions are
** met:
** * Redistributions of source code must retain the above copyright
** notice, this list of conditions and the following disclaimer.
** * Redistributions in binary form must reproduce the above copyright
** notice, this list of conditions and the following disclaimer in
** the documentation and/or other materials provided with the
** distribution.
** * Neither the name of Digia Plc and its Subsidiary(-ies) nor the names
** of its contributors may be used to endorse or promote products derived
** from this software without specific prior written permission.
**
**
** THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
** "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
** LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
** A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
** OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
** SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
** LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
** DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
** THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
** (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
** OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
**
** $QT_END_LICENSE$
**
****************************************************************************/
#include <QtWidgets><|fim▁hole|>
//! [constructor and input fields]
AddressBook::AddressBook(QWidget *parent)
: QWidget(parent)
{
QLabel *nameLabel = new QLabel(tr("Name:"));
nameLine = new QLineEdit;
QLabel *addressLabel = new QLabel(tr("Address:"));
addressText = new QTextEdit;
//! [constructor and input fields]
//! [layout]
QGridLayout *mainLayout = new QGridLayout;
mainLayout->addWidget(nameLabel, 0, 0);
mainLayout->addWidget(nameLine, 0, 1);
mainLayout->addWidget(addressLabel, 1, 0, Qt::AlignTop);
mainLayout->addWidget(addressText, 1, 1);
//! [layout]
//![setting the layout]
setLayout(mainLayout);
setWindowTitle(tr("Simple Address Book"));
}
//! [setting the layout]<|fim▁end|> | #include "addressbook.h" |
<|file_name|>transmute-non-immediate-to-immediate.rs<|end_file_name|><|fim▁begin|>// run-pass
// Issue #7988
// Transmuting non-immediate type to immediate type
<|fim▁hole|>pub fn main() {
unsafe {
::std::mem::transmute::<[isize; 1],isize>([1])
};
}<|fim▁end|> | // pretty-expanded FIXME #23616
|
<|file_name|>controller.js<|end_file_name|><|fim▁begin|>import auth from '../auth';
import clone from 'clone';
import storage from './storage';
async function addBlockOrItem(dbConn, token, codeObj, props, type) {
let user = await auth.getUser(token);
console.log(`Adding new ${type} for user ${user.login}`);
let add;
let newType = {
code: codeObj,
name: props.name,
icon: 'code',
owner: user.login
};
if(type == 'item') {
add = storage.addItemType;
newType.crosshairIcon = props.crosshairIcon;
newType.adjacentActive = props.adjacentActive;
} else {
add = storage.addBlockType;
newType.material = props.material;
}
await add(dbConn, newType);
return newType;
}
async function updateBlockOrItemCode(dbConn, token, id, codeObj, type) {
let user = await auth.getUser(token);
console.log(`Updating ${type} ${id} for user ${user.login}`);
let get, add, update;
if(type == 'item') {
get = storage.getItemType;
add = storage.addItemType;
update = storage.updateItemType;
} else {
get = storage.getBlockType;
add = storage.addBlockType;
update = storage.updateBlockType;
}
let original = await get(dbConn, id);
if(original.owner != user.login) {
throw new Error(`${type} ${id} belongs to ${original.owner} - ${user.login} doesn't have access.`);
}
let updated = clone(original);
updated.code = codeObj;
delete updated.newerVersion;
await add(dbConn, updated);
original.newerVersion = updated.id;
await update(dbConn, original);
return updated;
}
export default {
async getToolbar(dbConn, token) {
let user = await auth.getUser(token);
return await storage.getToolbar(dbConn, user.login);
},
async setToolbarItem(dbConn, token, position, type, id) {
let user = await auth.getUser(token);
<|fim▁hole|> async removeToolbarItem(dbConn, token, position) {
let user = await auth.getUser(token);
await storage.updateToolbarItem(dbConn, user.login, position, null);
},
async getAll(dbConn) {
let itemTypes = await storage.getAllItemTypes(dbConn);
let blockTypes = await storage.getAllBlockTypes(dbConn);
return {
itemTypes,
blockTypes
};
},
async getItemTypes(dbConn, token, ids) {
return await storage.getItemTypes(dbConn, ids);
},
async getBlockTypes(dbConn, token, ids) {
return await storage.getBlockTypes(dbConn, ids);
},
async updateBlockCode(dbConn, token, id, codeObj) {
return await updateBlockOrItemCode(dbConn, token, id, codeObj, 'block');
},
async updateItemCode(dbConn, token, id, codeObj) {
return await updateBlockOrItemCode(dbConn, token, id, codeObj, 'item');
},
async addBlockType(dbConn, token, codeObj, props) {
return await addBlockOrItem(dbConn, token, codeObj, props, 'block');
},
async addItemType(dbConn, token, codeObj, props) {
return await addBlockOrItem(dbConn, token, codeObj, props, 'item');
}
};<|fim▁end|> | await storage.updateToolbarItem(dbConn, user.login, position, {type, id});
}, |
<|file_name|>util.go<|end_file_name|><|fim▁begin|>// Copyright 2014 The go-ethereum Authors
// This file is part of the go-ethereum library.
//
// The go-ethereum library is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// The go-ethereum library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
package tests
import (
"bytes"
"fmt"
"math/big"
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/core"
"github.com/ethereum/go-ethereum/core/state"
"github.com/ethereum/go-ethereum/core/types"
"github.com/ethereum/go-ethereum/core/vm"
"github.com/ethereum/go-ethereum/crypto"
"github.com/ethereum/go-ethereum/ethdb"
)
func checkLogs(tlog []Log, logs vm.Logs) error {
if len(tlog) != len(logs) {
return fmt.Errorf("log length mismatch. Expected %d, got %d", len(tlog), len(logs))
} else {
for i, log := range tlog {
if common.HexToAddress(log.AddressF) != logs[i].Address {
return fmt.Errorf("log address expected %v got %x", log.AddressF, logs[i].Address)
}
if !bytes.Equal(logs[i].Data, common.FromHex(log.DataF)) {
return fmt.Errorf("log data expected %v got %x", log.DataF, logs[i].Data)
}
if len(log.TopicsF) != len(logs[i].Topics) {
return fmt.Errorf("log topics length expected %d got %d", len(log.TopicsF), logs[i].Topics)
} else {
for j, topic := range log.TopicsF {
if common.HexToHash(topic) != logs[i].Topics[j] {
return fmt.Errorf("log topic[%d] expected %v got %x", j, topic, logs[i].Topics[j])
}
}
}
genBloom := common.LeftPadBytes(types.LogsBloom(vm.Logs{logs[i]}).Bytes(), 256)
if !bytes.Equal(genBloom, common.Hex2Bytes(log.BloomF)) {
return fmt.Errorf("bloom mismatch")
}
}
}
return nil
}
type Account struct {
Balance string
Code string
Nonce string
Storage map[string]string
}
type Log struct {
AddressF string `json:"address"`
DataF string `json:"data"`
TopicsF []string `json:"topics"`
BloomF string `json:"bloom"`
}
func (self Log) Address() []byte { return common.Hex2Bytes(self.AddressF) }
func (self Log) Data() []byte { return common.Hex2Bytes(self.DataF) }
func (self Log) RlpData() interface{} { return nil }
func (self Log) Topics() [][]byte {
t := make([][]byte, len(self.TopicsF))
for i, topic := range self.TopicsF {
t[i] = common.Hex2Bytes(topic)
}
return t
}
func StateObjectFromAccount(db ethdb.Database, addr string, account Account) *state.StateObject {
obj := state.NewStateObject(common.HexToAddress(addr), db)
obj.SetBalance(common.Big(account.Balance))
if common.IsHex(account.Code) {
account.Code = account.Code[2:]
}
obj.SetCode(common.Hex2Bytes(account.Code))
obj.SetNonce(common.Big(account.Nonce).Uint64())
return obj
}
type VmEnv struct {
CurrentCoinbase string
CurrentDifficulty string
CurrentGasLimit string
CurrentNumber string
CurrentTimestamp interface{}
PreviousHash string
}
type VmTest struct {
Callcreates interface{}
//Env map[string]string
Env VmEnv
Exec map[string]string
Transaction map[string]string
Logs []Log
Gas string
Out string
Post map[string]Account
Pre map[string]Account
PostStateRoot string
}
type Env struct {
depth int
state *state.StateDB
skipTransfer bool
initial bool
Gas *big.Int
origin common.Address
parent common.Hash
coinbase common.Address
number *big.Int
time *big.Int
difficulty *big.Int
gasLimit *big.Int
logs []vm.StructLog
vmTest bool
}
func NewEnv(state *state.StateDB) *Env {
return &Env{
state: state,
}
}
func (self *Env) StructLogs() []vm.StructLog {
return self.logs
}
func (self *Env) AddStructLog(log vm.StructLog) {
self.logs = append(self.logs, log)
}
func NewEnvFromMap(state *state.StateDB, envValues map[string]string, exeValues map[string]string) *Env {
env := NewEnv(state)
env.origin = common.HexToAddress(exeValues["caller"])
env.parent = common.HexToHash(envValues["previousHash"])
env.coinbase = common.HexToAddress(envValues["currentCoinbase"])
env.number = common.Big(envValues["currentNumber"])
env.time = common.Big(envValues["currentTimestamp"])
env.difficulty = common.Big(envValues["currentDifficulty"])
env.gasLimit = common.Big(envValues["currentGasLimit"])
env.Gas = new(big.Int)
return env
}
func (self *Env) Origin() common.Address { return self.origin }
func (self *Env) BlockNumber() *big.Int { return self.number }
func (self *Env) Coinbase() common.Address { return self.coinbase }
func (self *Env) Time() *big.Int { return self.time }
func (self *Env) Difficulty() *big.Int { return self.difficulty }
func (self *Env) Db() vm.Database { return self.state }
func (self *Env) GasLimit() *big.Int { return self.gasLimit }
func (self *Env) VmType() vm.Type { return vm.StdVmTy }
func (self *Env) GetHash(n uint64) common.Hash {
return common.BytesToHash(crypto.Sha3([]byte(big.NewInt(int64(n)).String())))
}
func (self *Env) AddLog(log *vm.Log) {
self.state.AddLog(log)
}
func (self *Env) Depth() int { return self.depth }
func (self *Env) SetDepth(i int) { self.depth = i }
func (self *Env) CanTransfer(from common.Address, balance *big.Int) bool {
if self.skipTransfer {
if self.initial {
self.initial = false
return true
}
}<|fim▁hole|>}
func (self *Env) MakeSnapshot() vm.Database {
return self.state.Copy()
}
func (self *Env) SetSnapshot(copy vm.Database) {
self.state.Set(copy.(*state.StateDB))
}
func (self *Env) Transfer(from, to vm.Account, amount *big.Int) {
if self.skipTransfer {
return
}
core.Transfer(from, to, amount)
}
func (self *Env) Call(caller vm.ContractRef, addr common.Address, data []byte, gas, price, value *big.Int) ([]byte, error) {
if self.vmTest && self.depth > 0 {
caller.ReturnGas(gas, price)
return nil, nil
}
ret, err := core.Call(self, caller, addr, data, gas, price, value)
self.Gas = gas
return ret, err
}
func (self *Env) CallCode(caller vm.ContractRef, addr common.Address, data []byte, gas, price, value *big.Int) ([]byte, error) {
if self.vmTest && self.depth > 0 {
caller.ReturnGas(gas, price)
return nil, nil
}
return core.CallCode(self, caller, addr, data, gas, price, value)
}
func (self *Env) DelegateCall(caller vm.ContractRef, addr common.Address, data []byte, gas, price *big.Int) ([]byte, error) {
if self.vmTest && self.depth > 0 {
caller.ReturnGas(gas, price)
return nil, nil
}
return core.DelegateCall(self, caller, addr, data, gas, price)
}
func (self *Env) Create(caller vm.ContractRef, data []byte, gas, price, value *big.Int) ([]byte, common.Address, error) {
if self.vmTest {
caller.ReturnGas(gas, price)
nonce := self.state.GetNonce(caller.Address())
obj := self.state.GetOrNewStateObject(crypto.CreateAddress(caller.Address(), nonce))
return nil, obj.Address(), nil
} else {
return core.Create(self, caller, data, gas, price, value)
}
}
type Message struct {
from common.Address
to *common.Address
value, gas, price *big.Int
data []byte
nonce uint64
}
func NewMessage(from common.Address, to *common.Address, data []byte, value, gas, price *big.Int, nonce uint64) Message {
return Message{from, to, value, gas, price, data, nonce}
}
func (self Message) Hash() []byte { return nil }
func (self Message) From() (common.Address, error) { return self.from, nil }
func (self Message) FromFrontier() (common.Address, error) { return self.from, nil }
func (self Message) To() *common.Address { return self.to }
func (self Message) GasPrice() *big.Int { return self.price }
func (self Message) Gas() *big.Int { return self.gas }
func (self Message) Value() *big.Int { return self.value }
func (self Message) Nonce() uint64 { return self.nonce }
func (self Message) Data() []byte { return self.data }<|fim▁end|> |
return self.state.GetBalance(from).Cmp(balance) >= 0 |
<|file_name|>Controller.java<|end_file_name|><|fim▁begin|>package sample;
import javafx.fxml.FXML;
import javafx.fxml.Initializable;
import javafx.scene.control.TextArea;
import javafx.scene.paint.Color;
import javafx.scene.text.Text;
import javafx.scene.text.TextFlow;
import java.net.URL;
import java.util.ArrayList;
import java.util.List;
import java.util.ResourceBundle;
public class Controller implements Initializable {
@FXML
private TextArea input;
@FXML
private TextFlow display;
public void FocusInput(){
input.requestFocus();
}
@Override
public void initialize(URL url, ResourceBundle resourceBundle) {<|fim▁hole|> Editor ed = new Editor();
input.textProperty().addListener((observableValue, s, s2) -> {
display.getChildren().clear();
ed.render(observableValue.getValue());
List x = ed.parse();
x.forEach(i -> {
List y = (List) i;
Text t1 = new Text(y.get(1) + " ");
if((int) y.get(0) == 1) t1.setFill(Color.BLUE);
display.getChildren().add(t1);
System.out.println(i);
});
});
}
}<|fim▁end|> | |
<|file_name|>open_options.rs<|end_file_name|><|fim▁begin|>use std::fs::OpenOptions;
#[allow(unused_must_use)]
#[warn(clippy::nonsensical_open_options)]
fn main() {
OpenOptions::new().read(true).truncate(true).open("foo.txt");
OpenOptions::new().append(true).truncate(true).open("foo.txt");
<|fim▁hole|> OpenOptions::new().append(true).append(false).open("foo.txt");
OpenOptions::new().truncate(true).truncate(false).open("foo.txt");
}<|fim▁end|> | OpenOptions::new().read(true).read(false).open("foo.txt");
OpenOptions::new().create(true).create(false).open("foo.txt");
OpenOptions::new().write(true).write(false).open("foo.txt"); |
<|file_name|>DistributedBuildingMgrAI.py<|end_file_name|><|fim▁begin|>from direct.directnotify.DirectNotifyGlobal import *
from otp.ai.AIBaseGlobal import *
from toontown.building import DistributedBuildingAI
from toontown.building import GagshopBuildingAI
from toontown.building import HQBuildingAI
from toontown.building import KartShopBuildingAI
from toontown.building import PetshopBuildingAI
from toontown.hood import ZoneUtil
# from toontown.building import DistributedAnimBuildingAI
class DistributedBuildingMgrAI:
notify = directNotify.newCategory('DistributedBuildingMgrAI')
def __init__(self, air, branchId, dnaStore, trophyMgr):
self.air = air
self.branchId = branchId
self.canonicalBranchId = ZoneUtil.getCanonicalZoneId(self.branchId)
self.dnaStore = dnaStore
self.trophyMgr = trophyMgr<|fim▁hole|> def cleanup(self):
for building in self.__buildings.values():
building.cleanup()
self.__buildings = {}
def isValidBlockNumber(self, blockNumber):
return blockNumber in self.__buildings
def isSuitBlock(self, blockNumber):
if not self.isValidBlockNumber(blockNumber):
return False
return self.__buildings[blockNumber].isSuitBlock()
def getSuitBlocks(self):
blocks = []
for blockNumber, building in self.__buildings.items():
if building.isSuitBlock():
blocks.append(blockNumber)
return blocks
def getEstablishedSuitBlocks(self):
blocks = []
for blockNumber, building in self.__buildings.items():
if building.isEstablishedSuitBlock():
blocks.append(blockNumber)
return blocks
def getToonBlocks(self):
blocks = []
for blockNumber, building in self.__buildings.items():
if isinstance(building, HQBuildingAI.HQBuildingAI):
continue
if isinstance(building, GagshopBuildingAI.GagshopBuildingAI):
continue
if isinstance(building, PetshopBuildingAI.PetshopBuildingAI):
continue
if isinstance(building, KartShopBuildingAI.KartShopBuildingAI):
continue
if not building.isSuitBlock():
blocks.append(blockNumber)
return blocks
def getBuildings(self):
return self.__buildings.values()
def getFrontDoorPoint(self, blockNumber):
if self.isValidBlockNumber(blockNumber):
return self.__buildings[blockNumber].getFrontDoorPoint()
def getBuildingTrack(self, blockNumber):
if self.isValidBlockNumber(blockNumber):
return self.__buildings[blockNumber].track
def getBuilding(self, blockNumber):
if self.isValidBlockNumber(blockNumber):
return self.__buildings[blockNumber]
def setFrontDoorPoint(self, blockNumber, point):
if self.isValidBlockNumber(blockNumber):
return self.__buildings[blockNumber].setFrontDoorPoint(point)
def getDNABlockLists(self):
blocks = []
hqBlocks = []
gagshopBlocks = []
petshopBlocks = []
kartshopBlocks = []
animBldgBlocks = []
for i in xrange(self.dnaStore.getNumBlockNumbers()):
blockNumber = self.dnaStore.getBlockNumberAt(i)
buildingType = self.dnaStore.getBlockBuildingType(blockNumber)
if buildingType == 'hq':
hqBlocks.append(blockNumber)
elif buildingType == 'gagshop':
gagshopBlocks.append(blockNumber)
elif buildingType == 'petshop':
petshopBlocks.append(blockNumber)
elif buildingType == 'kartshop':
kartshopBlocks.append(blockNumber)
elif buildingType == 'animbldg':
animBldgBlocks.append(blockNumber)
else:
blocks.append(blockNumber)
return (blocks, hqBlocks, gagshopBlocks, petshopBlocks, kartshopBlocks,
animBldgBlocks)
def findAllLandmarkBuildings(self):
backups = simbase.backups.load('blockinfo', (self.air.districtId, self.branchId), default={})
(blocks, hqBlocks, gagshopBlocks, petshopBlocks, kartshopBlocks,
animBldgBlocks) = self.getDNABlockLists()
for blockNumber in blocks:
self.newBuilding(blockNumber, backup=backups.get(blockNumber, None))
for blockNumber in animBldgBlocks:
self.newAnimBuilding(blockNumber, backup=backups.get(blockNumber, None))
for blockNumber in hqBlocks:
self.newHQBuilding(blockNumber)
for blockNumber in gagshopBlocks:
self.newGagshopBuilding(blockNumber)
for block in petshopBlocks:
self.newPetshopBuilding(block)
for block in kartshopBlocks:
self.newKartShopBuilding(block)
def newBuilding(self, blockNumber, backup=None):
building = DistributedBuildingAI.DistributedBuildingAI(
self.air, blockNumber, self.branchId, self.trophyMgr)
building.generateWithRequired(self.branchId)
if backup is not None:
state = backup.get('state', 'toon')
if ((state == 'suit') and simbase.air.wantCogbuildings) or (
(state == 'cogdo') and simbase.air.wantCogdominiums):
building.track = backup.get('track', 'c')
building.difficulty = backup.get('difficulty', 1)
building.numFloors = backup.get('numFloors', 1)
building.updateSavedBy(backup.get('savedBy'))
building.becameSuitTime = backup.get('becameSuitTime', time.mktime(time.gmtime()))
if (state == 'suit') and simbase.air.wantCogbuildings:
building.setState('suit')
elif (state == 'cogdo') and simbase.air.wantCogdominiums:
building.setState('cogdo')
else:
building.setState('toon')
else:
building.setState('toon')
else:
building.setState('toon')
self.__buildings[blockNumber] = building
return building
def newAnimBuilding(self, blockNumber, backup=None):
return self.newBuilding(blockNumber, backup=backup)
def newHQBuilding(self, blockNumber):
dnaStore = self.air.dnaStoreMap[self.canonicalBranchId]
exteriorZoneId = dnaStore.getZoneFromBlockNumber(blockNumber)
exteriorZoneId = ZoneUtil.getTrueZoneId(exteriorZoneId, self.branchId)
interiorZoneId = (self.branchId - (self.branchId%100)) + 500 + blockNumber
building = HQBuildingAI.HQBuildingAI(
self.air, exteriorZoneId, interiorZoneId, blockNumber)
self.__buildings[blockNumber] = building
return building
def newGagshopBuilding(self, blockNumber):
dnaStore = self.air.dnaStoreMap[self.canonicalBranchId]
exteriorZoneId = dnaStore.getZoneFromBlockNumber(blockNumber)
exteriorZoneId = ZoneUtil.getTrueZoneId(exteriorZoneId, self.branchId)
interiorZoneId = (self.branchId - (self.branchId%100)) + 500 + blockNumber
building = GagshopBuildingAI.GagshopBuildingAI(
self.air, exteriorZoneId, interiorZoneId, blockNumber)
self.__buildings[blockNumber] = building
return building
def newPetshopBuilding(self, blockNumber):
dnaStore = self.air.dnaStoreMap[self.canonicalBranchId]
exteriorZoneId = dnaStore.getZoneFromBlockNumber(blockNumber)
exteriorZoneId = ZoneUtil.getTrueZoneId(exteriorZoneId, self.branchId)
interiorZoneId = (self.branchId - (self.branchId%100)) + 500 + blockNumber
building = PetshopBuildingAI.PetshopBuildingAI(
self.air, exteriorZoneId, interiorZoneId, blockNumber)
self.__buildings[blockNumber] = building
return building
def newKartShopBuilding(self, blockNumber):
dnaStore = self.air.dnaStoreMap[self.canonicalBranchId]
exteriorZoneId = dnaStore.getZoneFromBlockNumber(blockNumber)
exteriorZoneId = ZoneUtil.getTrueZoneId(exteriorZoneId, self.branchId)
interiorZoneId = (self.branchId - (self.branchId%100)) + 500 + blockNumber
building = KartShopBuildingAI.KartShopBuildingAI(
self.air, exteriorZoneId, interiorZoneId, blockNumber)
self.__buildings[blockNumber] = building
return building
def save(self):
buildings = {}
for blockNumber in self.getSuitBlocks():
building = self.getBuilding(blockNumber)
backup = {
'state': building.fsm.getCurrentState().getName(),
'block': building.block,
'track': building.track,
'difficulty': building.difficulty,
'numFloors': building.numFloors,
'savedBy': building.savedBy,
'becameSuitTime': building.becameSuitTime
}
buildings[blockNumber] = backup
simbase.backups.save('blockinfo', (self.air.districtId, self.branchId), buildings)<|fim▁end|> | self.__buildings = {}
self.findAllLandmarkBuildings()
|
<|file_name|>base64.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Base64 binary-to-text encoding
use std::str;
use std::fmt;
/// Available encoding character sets
pub enum CharacterSet {
/// The standard character set (uses `+` and `/`)
Standard,
/// The URL safe character set (uses `-` and `_`)
UrlSafe
}
/// Contains configuration parameters for `to_base64`.
pub struct Config {
/// Character set to use
pub char_set: CharacterSet,
/// True to pad output with `=` characters
pub pad: bool,
/// `Some(len)` to wrap lines at `len`, `None` to disable line wrapping
pub line_length: Option<uint>
}
<|fim▁hole|>pub static STANDARD: Config =
Config {char_set: Standard, pad: true, line_length: None};
/// Configuration for RFC 4648 base64url encoding
pub static URL_SAFE: Config =
Config {char_set: UrlSafe, pad: false, line_length: None};
/// Configuration for RFC 2045 MIME base64 encoding
pub static MIME: Config =
Config {char_set: Standard, pad: true, line_length: Some(76)};
static STANDARD_CHARS: &'static[u8] = bytes!("ABCDEFGHIJKLMNOPQRSTUVWXYZ",
"abcdefghijklmnopqrstuvwxyz",
"0123456789+/");
static URLSAFE_CHARS: &'static[u8] = bytes!("ABCDEFGHIJKLMNOPQRSTUVWXYZ",
"abcdefghijklmnopqrstuvwxyz",
"0123456789-_");
/// A trait for converting a value to base64 encoding.
pub trait ToBase64 {
/// Converts the value of `self` to a base64 value following the specified
/// format configuration, returning the owned string.
fn to_base64(&self, config: Config) -> String;
}
impl<'a> ToBase64 for &'a [u8] {
/**
* Turn a vector of `u8` bytes into a base64 string.
*
* # Example
*
* ```rust
* extern crate serialize;
* use serialize::base64::{ToBase64, STANDARD};
*
* fn main () {
* let str = [52,32].to_base64(STANDARD);
* println!("base 64 output: {}", str);
* }
* ```
*/
fn to_base64(&self, config: Config) -> String {
let bytes = match config.char_set {
Standard => STANDARD_CHARS,
UrlSafe => URLSAFE_CHARS
};
let mut v = Vec::new();
let mut i = 0;
let mut cur_length = 0;
let len = self.len();
while i < len - (len % 3) {
match config.line_length {
Some(line_length) =>
if cur_length >= line_length {
v.push('\r' as u8);
v.push('\n' as u8);
cur_length = 0;
},
None => ()
}
let n = (self[i] as u32) << 16 |
(self[i + 1] as u32) << 8 |
(self[i + 2] as u32);
// This 24-bit number gets separated into four 6-bit numbers.
v.push(bytes[((n >> 18) & 63) as uint]);
v.push(bytes[((n >> 12) & 63) as uint]);
v.push(bytes[((n >> 6 ) & 63) as uint]);
v.push(bytes[(n & 63) as uint]);
cur_length += 4;
i += 3;
}
if len % 3 != 0 {
match config.line_length {
Some(line_length) =>
if cur_length >= line_length {
v.push('\r' as u8);
v.push('\n' as u8);
},
None => ()
}
}
// Heh, would be cool if we knew this was exhaustive
// (the dream of bounded integer types)
match len % 3 {
0 => (),
1 => {
let n = (self[i] as u32) << 16;
v.push(bytes[((n >> 18) & 63) as uint]);
v.push(bytes[((n >> 12) & 63) as uint]);
if config.pad {
v.push('=' as u8);
v.push('=' as u8);
}
}
2 => {
let n = (self[i] as u32) << 16 |
(self[i + 1u] as u32) << 8;
v.push(bytes[((n >> 18) & 63) as uint]);
v.push(bytes[((n >> 12) & 63) as uint]);
v.push(bytes[((n >> 6 ) & 63) as uint]);
if config.pad {
v.push('=' as u8);
}
}
_ => fail!("Algebra is broken, please alert the math police")
}
unsafe {
str::raw::from_utf8(v.as_slice()).to_string()
}
}
}
/// A trait for converting from base64 encoded values.
pub trait FromBase64 {
/// Converts the value of `self`, interpreted as base64 encoded data, into
/// an owned vector of bytes, returning the vector.
fn from_base64(&self) -> Result<Vec<u8>, FromBase64Error>;
}
/// Errors that can occur when decoding a base64 encoded string
pub enum FromBase64Error {
/// The input contained a character not part of the base64 format
InvalidBase64Character(char, uint),
/// The input had an invalid length
InvalidBase64Length,
}
impl fmt::Show for FromBase64Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
InvalidBase64Character(ch, idx) =>
write!(f, "Invalid character '{}' at position {}", ch, idx),
InvalidBase64Length => write!(f, "Invalid length"),
}
}
}
impl<'a> FromBase64 for &'a str {
/**
* Convert any base64 encoded string (literal, `@`, `&`, or `~`)
* to the byte values it encodes.
*
* You can use the `String::from_utf8` function in `std::string` to turn a
* `Vec<u8>` into a string with characters corresponding to those values.
*
* # Example
*
* This converts a string literal to base64 and back.
*
* ```rust
* extern crate serialize;
* use serialize::base64::{ToBase64, FromBase64, STANDARD};
*
* fn main () {
* let hello_str = bytes!("Hello, World").to_base64(STANDARD);
* println!("base64 output: {}", hello_str);
* let res = hello_str.as_slice().from_base64();
* if res.is_ok() {
* let opt_bytes = String::from_utf8(res.unwrap());
* if opt_bytes.is_ok() {
* println!("decoded from base64: {}", opt_bytes.unwrap());
* }
* }
* }
* ```
*/
fn from_base64(&self) -> Result<Vec<u8>, FromBase64Error> {
let mut r = Vec::new();
let mut buf: u32 = 0;
let mut modulus = 0;
let mut it = self.bytes().enumerate();
for (idx, byte) in it {
let val = byte as u32;
match byte as char {
'A'..'Z' => buf |= val - 0x41,
'a'..'z' => buf |= val - 0x47,
'0'..'9' => buf |= val + 0x04,
'+'|'-' => buf |= 0x3E,
'/'|'_' => buf |= 0x3F,
'\r'|'\n' => continue,
'=' => break,
_ => return Err(InvalidBase64Character(self.char_at(idx), idx)),
}
buf <<= 6;
modulus += 1;
if modulus == 4 {
modulus = 0;
r.push((buf >> 22) as u8);
r.push((buf >> 14) as u8);
r.push((buf >> 6 ) as u8);
}
}
for (idx, byte) in it {
match byte as char {
'='|'\r'|'\n' => continue,
_ => return Err(InvalidBase64Character(self.char_at(idx), idx)),
}
}
match modulus {
2 => {
r.push((buf >> 10) as u8);
}
3 => {
r.push((buf >> 16) as u8);
r.push((buf >> 8 ) as u8);
}
0 => (),
_ => return Err(InvalidBase64Length),
}
Ok(r)
}
}
#[cfg(test)]
mod tests {
extern crate test;
use self::test::Bencher;
use base64::{Config, FromBase64, ToBase64, STANDARD, URL_SAFE};
#[test]
fn test_to_base64_basic() {
assert_eq!("".as_bytes().to_base64(STANDARD), "".to_string());
assert_eq!("f".as_bytes().to_base64(STANDARD), "Zg==".to_string());
assert_eq!("fo".as_bytes().to_base64(STANDARD), "Zm8=".to_string());
assert_eq!("foo".as_bytes().to_base64(STANDARD), "Zm9v".to_string());
assert_eq!("foob".as_bytes().to_base64(STANDARD), "Zm9vYg==".to_string());
assert_eq!("fooba".as_bytes().to_base64(STANDARD), "Zm9vYmE=".to_string());
assert_eq!("foobar".as_bytes().to_base64(STANDARD), "Zm9vYmFy".to_string());
}
#[test]
fn test_to_base64_line_break() {
assert!(![0u8, ..1000].to_base64(Config {line_length: None, ..STANDARD})
.as_slice()
.contains("\r\n"));
assert_eq!("foobar".as_bytes().to_base64(Config {line_length: Some(4),
..STANDARD}),
"Zm9v\r\nYmFy".to_string());
}
#[test]
fn test_to_base64_padding() {
assert_eq!("f".as_bytes().to_base64(Config {pad: false, ..STANDARD}), "Zg".to_string());
assert_eq!("fo".as_bytes().to_base64(Config {pad: false, ..STANDARD}), "Zm8".to_string());
}
#[test]
fn test_to_base64_url_safe() {
assert_eq!([251, 255].to_base64(URL_SAFE), "-_8".to_string());
assert_eq!([251, 255].to_base64(STANDARD), "+/8=".to_string());
}
#[test]
fn test_from_base64_basic() {
assert_eq!("".from_base64().unwrap().as_slice(), "".as_bytes());
assert_eq!("Zg==".from_base64().unwrap().as_slice(), "f".as_bytes());
assert_eq!("Zm8=".from_base64().unwrap().as_slice(), "fo".as_bytes());
assert_eq!("Zm9v".from_base64().unwrap().as_slice(), "foo".as_bytes());
assert_eq!("Zm9vYg==".from_base64().unwrap().as_slice(), "foob".as_bytes());
assert_eq!("Zm9vYmE=".from_base64().unwrap().as_slice(), "fooba".as_bytes());
assert_eq!("Zm9vYmFy".from_base64().unwrap().as_slice(), "foobar".as_bytes());
}
#[test]
fn test_from_base64_newlines() {
assert_eq!("Zm9v\r\nYmFy".from_base64().unwrap().as_slice(),
"foobar".as_bytes());
assert_eq!("Zm9vYg==\r\n".from_base64().unwrap().as_slice(),
"foob".as_bytes());
}
#[test]
fn test_from_base64_urlsafe() {
assert_eq!("-_8".from_base64().unwrap(), "+/8=".from_base64().unwrap());
}
#[test]
fn test_from_base64_invalid_char() {
assert!("Zm$=".from_base64().is_err())
assert!("Zg==$".from_base64().is_err());
}
#[test]
fn test_from_base64_invalid_padding() {
assert!("Z===".from_base64().is_err());
}
#[test]
fn test_base64_random() {
use std::rand::{task_rng, random, Rng};
for _ in range(0, 1000) {
let times = task_rng().gen_range(1u, 100);
let v = Vec::from_fn(times, |_| random::<u8>());
assert_eq!(v.as_slice()
.to_base64(STANDARD)
.as_slice()
.from_base64()
.unwrap()
.as_slice(),
v.as_slice());
}
}
#[bench]
pub fn bench_to_base64(b: &mut Bencher) {
let s = "イロハニホヘト チリヌルヲ ワカヨタレソ ツネナラム \
ウヰノオクヤマ ケフコエテ アサキユメミシ ヱヒモセスン";
b.iter(|| {
s.as_bytes().to_base64(STANDARD);
});
b.bytes = s.len() as u64;
}
#[bench]
pub fn bench_from_base64(b: &mut Bencher) {
let s = "イロハニホヘト チリヌルヲ ワカヨタレソ ツネナラム \
ウヰノオクヤマ ケフコエテ アサキユメミシ ヱヒモセスン";
let sb = s.as_bytes().to_base64(STANDARD);
b.iter(|| {
sb.as_slice().from_base64().unwrap();
});
b.bytes = sb.len() as u64;
}
}<|fim▁end|> | /// Configuration for RFC 4648 standard base64 encoding |
<|file_name|>15.2.3.6-4-542.js<|end_file_name|><|fim▁begin|>// Copyright (c) 2012 Ecma International. All rights reserved.
// This code is governed by the BSD license found in the LICENSE file.
/*---
es5id: 15.2.3.6-4-542<|fim▁hole|>includes: [propertyHelper.js]
---*/
var obj = {};
var getFunc = function() {
return 1001;
};
var verifySetFunc = "data";
var setFunc = function(value) {
verifySetFunc = value;
};
Object.defineProperty(obj, "prop", {
get: getFunc,
set: setFunc,
enumerable: true,
configurable: false
});
assert(obj.hasOwnProperty("prop"));
verifyNotConfigurable(obj, "prop");<|fim▁end|> | description: >
ES5 Attributes - property ([[Get]] is a Function, [[Set]] is a
Function, [[Enumerable]] is true, [[Configurable]] is false) is
undeletable |
<|file_name|>urlpath.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | ../../../../../share/pyshared/twisted/python/urlpath.py |
<|file_name|>mainThreadEditor.ts<|end_file_name|><|fim▁begin|>/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { Emitter, Event } from 'vs/base/common/event';
import { DisposableStore } from 'vs/base/common/lifecycle';
import { ICodeEditor } from 'vs/editor/browser/editorBrowser';
import { RenderLineNumbersType, TextEditorCursorStyle, cursorStyleToString, EditorOption } from 'vs/editor/common/config/editorOptions';
import { IRange, Range } from 'vs/editor/common/core/range';
import { ISelection, Selection } from 'vs/editor/common/core/selection';
import { IDecorationOptions, ScrollType } from 'vs/editor/common/editorCommon';
import { ISingleEditOperation, ITextModel, ITextModelUpdateOptions, IIdentifiedSingleEditOperation } from 'vs/editor/common/model';
import { IModelService } from 'vs/editor/common/services/model';
import { SnippetController2 } from 'vs/editor/contrib/snippet/snippetController2';
import { IApplyEditsOptions, IEditorPropertiesChangeData, IResolvedTextEditorConfiguration, ITextEditorConfigurationUpdate, IUndoStopOptions, TextEditorRevealType } from 'vs/workbench/api/common/extHost.protocol';
import { IEditorPane } from 'vs/workbench/common/editor';
import { withNullAsUndefined } from 'vs/base/common/types';
import { equals } from 'vs/base/common/arrays';
import { CodeEditorStateFlag, EditorState } from 'vs/editor/browser/core/editorState';
import { IClipboardService } from 'vs/platform/clipboard/common/clipboardService';
import { SnippetParser } from 'vs/editor/contrib/snippet/snippetParser';
import { MainThreadDocuments } from 'vs/workbench/api/browser/mainThreadDocuments';
export interface IFocusTracker {
onGainedFocus(): void;
onLostFocus(): void;
}
export class MainThreadTextEditorProperties {
public static readFromEditor(previousProperties: MainThreadTextEditorProperties | null, model: ITextModel, codeEditor: ICodeEditor | null): MainThreadTextEditorProperties {
const selections = MainThreadTextEditorProperties._readSelectionsFromCodeEditor(previousProperties, codeEditor);
const options = MainThreadTextEditorProperties._readOptionsFromCodeEditor(previousProperties, model, codeEditor);
const visibleRanges = MainThreadTextEditorProperties._readVisibleRangesFromCodeEditor(previousProperties, codeEditor);
return new MainThreadTextEditorProperties(selections, options, visibleRanges);
}
private static _readSelectionsFromCodeEditor(previousProperties: MainThreadTextEditorProperties | null, codeEditor: ICodeEditor | null): Selection[] {
let result: Selection[] | null = null;
if (codeEditor) {
result = codeEditor.getSelections();<|fim▁hole|> if (!result && previousProperties) {
result = previousProperties.selections;
}
if (!result) {
result = [new Selection(1, 1, 1, 1)];
}
return result;
}
private static _readOptionsFromCodeEditor(previousProperties: MainThreadTextEditorProperties | null, model: ITextModel, codeEditor: ICodeEditor | null): IResolvedTextEditorConfiguration {
if (model.isDisposed()) {
if (previousProperties) {
// shutdown time
return previousProperties.options;
} else {
throw new Error('No valid properties');
}
}
let cursorStyle: TextEditorCursorStyle;
let lineNumbers: RenderLineNumbersType;
if (codeEditor) {
const options = codeEditor.getOptions();
const lineNumbersOpts = options.get(EditorOption.lineNumbers);
cursorStyle = options.get(EditorOption.cursorStyle);
lineNumbers = lineNumbersOpts.renderType;
} else if (previousProperties) {
cursorStyle = previousProperties.options.cursorStyle;
lineNumbers = previousProperties.options.lineNumbers;
} else {
cursorStyle = TextEditorCursorStyle.Line;
lineNumbers = RenderLineNumbersType.On;
}
const modelOptions = model.getOptions();
return {
insertSpaces: modelOptions.insertSpaces,
tabSize: modelOptions.tabSize,
cursorStyle: cursorStyle,
lineNumbers: lineNumbers
};
}
private static _readVisibleRangesFromCodeEditor(previousProperties: MainThreadTextEditorProperties | null, codeEditor: ICodeEditor | null): Range[] {
if (codeEditor) {
return codeEditor.getVisibleRanges();
}
return [];
}
constructor(
public readonly selections: Selection[],
public readonly options: IResolvedTextEditorConfiguration,
public readonly visibleRanges: Range[]
) {
}
public generateDelta(oldProps: MainThreadTextEditorProperties | null, selectionChangeSource: string | null): IEditorPropertiesChangeData | null {
const delta: IEditorPropertiesChangeData = {
options: null,
selections: null,
visibleRanges: null
};
if (!oldProps || !MainThreadTextEditorProperties._selectionsEqual(oldProps.selections, this.selections)) {
delta.selections = {
selections: this.selections,
source: withNullAsUndefined(selectionChangeSource)
};
}
if (!oldProps || !MainThreadTextEditorProperties._optionsEqual(oldProps.options, this.options)) {
delta.options = this.options;
}
if (!oldProps || !MainThreadTextEditorProperties._rangesEqual(oldProps.visibleRanges, this.visibleRanges)) {
delta.visibleRanges = this.visibleRanges;
}
if (delta.selections || delta.options || delta.visibleRanges) {
// something changed
return delta;
}
// nothing changed
return null;
}
private static _selectionsEqual(a: readonly Selection[], b: readonly Selection[]): boolean {
return equals(a, b, (aValue, bValue) => aValue.equalsSelection(bValue));
}
private static _rangesEqual(a: readonly Range[], b: readonly Range[]): boolean {
return equals(a, b, (aValue, bValue) => aValue.equalsRange(bValue));
}
private static _optionsEqual(a: IResolvedTextEditorConfiguration, b: IResolvedTextEditorConfiguration): boolean {
if (a && !b || !a && b) {
return false;
}
if (!a && !b) {
return true;
}
return (
a.tabSize === b.tabSize
&& a.insertSpaces === b.insertSpaces
&& a.cursorStyle === b.cursorStyle
&& a.lineNumbers === b.lineNumbers
);
}
}
/**
* Text Editor that is permanently bound to the same model.
* It can be bound or not to a CodeEditor.
*/
export class MainThreadTextEditor {
private readonly _id: string;
private readonly _model: ITextModel;
private readonly _mainThreadDocuments: MainThreadDocuments;
private readonly _modelService: IModelService;
private readonly _clipboardService: IClipboardService;
private readonly _modelListeners = new DisposableStore();
private _codeEditor: ICodeEditor | null;
private readonly _focusTracker: IFocusTracker;
private readonly _codeEditorListeners = new DisposableStore();
private _properties: MainThreadTextEditorProperties | null;
private readonly _onPropertiesChanged: Emitter<IEditorPropertiesChangeData>;
constructor(
id: string,
model: ITextModel,
codeEditor: ICodeEditor,
focusTracker: IFocusTracker,
mainThreadDocuments: MainThreadDocuments,
modelService: IModelService,
clipboardService: IClipboardService,
) {
this._id = id;
this._model = model;
this._codeEditor = null;
this._properties = null;
this._focusTracker = focusTracker;
this._mainThreadDocuments = mainThreadDocuments;
this._modelService = modelService;
this._clipboardService = clipboardService;
this._onPropertiesChanged = new Emitter<IEditorPropertiesChangeData>();
this._modelListeners.add(this._model.onDidChangeOptions((e) => {
this._updatePropertiesNow(null);
}));
this.setCodeEditor(codeEditor);
this._updatePropertiesNow(null);
}
public dispose(): void {
this._modelListeners.dispose();
this._codeEditor = null;
this._codeEditorListeners.dispose();
}
private _updatePropertiesNow(selectionChangeSource: string | null): void {
this._setProperties(
MainThreadTextEditorProperties.readFromEditor(this._properties, this._model, this._codeEditor),
selectionChangeSource
);
}
private _setProperties(newProperties: MainThreadTextEditorProperties, selectionChangeSource: string | null): void {
const delta = newProperties.generateDelta(this._properties, selectionChangeSource);
this._properties = newProperties;
if (delta) {
this._onPropertiesChanged.fire(delta);
}
}
public getId(): string {
return this._id;
}
public getModel(): ITextModel {
return this._model;
}
public getCodeEditor(): ICodeEditor | null {
return this._codeEditor;
}
public hasCodeEditor(codeEditor: ICodeEditor | null): boolean {
return (this._codeEditor === codeEditor);
}
public setCodeEditor(codeEditor: ICodeEditor | null): void {
if (this.hasCodeEditor(codeEditor)) {
// Nothing to do...
return;
}
this._codeEditorListeners.clear();
this._codeEditor = codeEditor;
if (this._codeEditor) {
// Catch early the case that this code editor gets a different model set and disassociate from this model
this._codeEditorListeners.add(this._codeEditor.onDidChangeModel(() => {
this.setCodeEditor(null);
}));
this._codeEditorListeners.add(this._codeEditor.onDidFocusEditorWidget(() => {
this._focusTracker.onGainedFocus();
}));
this._codeEditorListeners.add(this._codeEditor.onDidBlurEditorWidget(() => {
this._focusTracker.onLostFocus();
}));
let nextSelectionChangeSource: string | null = null;
this._codeEditorListeners.add(this._mainThreadDocuments.onIsCaughtUpWithContentChanges((uri) => {
if (uri.toString() === this._model.uri.toString()) {
const selectionChangeSource = nextSelectionChangeSource;
nextSelectionChangeSource = null;
this._updatePropertiesNow(selectionChangeSource);
}
}));
const isValidCodeEditor = () => {
// Due to event timings, it is possible that there is a model change event not yet delivered to us.
// > e.g. a model change event is emitted to a listener which then decides to update editor options
// > In this case the editor configuration change event reaches us first.
// So simply check that the model is still attached to this code editor
return (this._codeEditor && this._codeEditor.getModel() === this._model);
};
const updateProperties = (selectionChangeSource: string | null) => {
// Some editor events get delivered faster than model content changes. This is
// problematic, as this leads to editor properties reaching the extension host
// too soon, before the model content change that was the root cause.
//
// If this case is identified, then let's update editor properties on the next model
// content change instead.
if (this._mainThreadDocuments.isCaughtUpWithContentChanges(this._model.uri)) {
nextSelectionChangeSource = null;
this._updatePropertiesNow(selectionChangeSource);
} else {
// update editor properties on the next model content change
nextSelectionChangeSource = selectionChangeSource;
}
};
this._codeEditorListeners.add(this._codeEditor.onDidChangeCursorSelection((e) => {
// selection
if (!isValidCodeEditor()) {
return;
}
updateProperties(e.source);
}));
this._codeEditorListeners.add(this._codeEditor.onDidChangeConfiguration((e) => {
// options
if (!isValidCodeEditor()) {
return;
}
updateProperties(null);
}));
this._codeEditorListeners.add(this._codeEditor.onDidLayoutChange(() => {
// visibleRanges
if (!isValidCodeEditor()) {
return;
}
updateProperties(null);
}));
this._codeEditorListeners.add(this._codeEditor.onDidScrollChange(() => {
// visibleRanges
if (!isValidCodeEditor()) {
return;
}
updateProperties(null);
}));
this._updatePropertiesNow(null);
}
}
public isVisible(): boolean {
return !!this._codeEditor;
}
public getProperties(): MainThreadTextEditorProperties {
return this._properties!;
}
public get onPropertiesChanged(): Event<IEditorPropertiesChangeData> {
return this._onPropertiesChanged.event;
}
public setSelections(selections: ISelection[]): void {
if (this._codeEditor) {
this._codeEditor.setSelections(selections);
return;
}
const newSelections = selections.map(Selection.liftSelection);
this._setProperties(
new MainThreadTextEditorProperties(newSelections, this._properties!.options, this._properties!.visibleRanges),
null
);
}
private _setIndentConfiguration(newConfiguration: ITextEditorConfigurationUpdate): void {
const creationOpts = this._modelService.getCreationOptions(this._model.getLanguageId(), this._model.uri, this._model.isForSimpleWidget);
if (newConfiguration.tabSize === 'auto' || newConfiguration.insertSpaces === 'auto') {
// one of the options was set to 'auto' => detect indentation
let insertSpaces = creationOpts.insertSpaces;
let tabSize = creationOpts.tabSize;
if (newConfiguration.insertSpaces !== 'auto' && typeof newConfiguration.insertSpaces !== 'undefined') {
insertSpaces = newConfiguration.insertSpaces;
}
if (newConfiguration.tabSize !== 'auto' && typeof newConfiguration.tabSize !== 'undefined') {
tabSize = newConfiguration.tabSize;
}
this._model.detectIndentation(insertSpaces, tabSize);
return;
}
const newOpts: ITextModelUpdateOptions = {};
if (typeof newConfiguration.insertSpaces !== 'undefined') {
newOpts.insertSpaces = newConfiguration.insertSpaces;
}
if (typeof newConfiguration.tabSize !== 'undefined') {
newOpts.tabSize = newConfiguration.tabSize;
}
this._model.updateOptions(newOpts);
}
public setConfiguration(newConfiguration: ITextEditorConfigurationUpdate): void {
this._setIndentConfiguration(newConfiguration);
if (!this._codeEditor) {
return;
}
if (newConfiguration.cursorStyle) {
const newCursorStyle = cursorStyleToString(newConfiguration.cursorStyle);
this._codeEditor.updateOptions({
cursorStyle: newCursorStyle
});
}
if (typeof newConfiguration.lineNumbers !== 'undefined') {
let lineNumbers: 'on' | 'off' | 'relative';
switch (newConfiguration.lineNumbers) {
case RenderLineNumbersType.On:
lineNumbers = 'on';
break;
case RenderLineNumbersType.Relative:
lineNumbers = 'relative';
break;
default:
lineNumbers = 'off';
}
this._codeEditor.updateOptions({
lineNumbers: lineNumbers
});
}
}
public setDecorations(key: string, ranges: IDecorationOptions[]): void {
if (!this._codeEditor) {
return;
}
this._codeEditor.setDecorations('exthost-api', key, ranges);
}
public setDecorationsFast(key: string, _ranges: number[]): void {
if (!this._codeEditor) {
return;
}
const ranges: Range[] = [];
for (let i = 0, len = Math.floor(_ranges.length / 4); i < len; i++) {
ranges[i] = new Range(_ranges[4 * i], _ranges[4 * i + 1], _ranges[4 * i + 2], _ranges[4 * i + 3]);
}
this._codeEditor.setDecorationsFast(key, ranges);
}
public revealRange(range: IRange, revealType: TextEditorRevealType): void {
if (!this._codeEditor) {
return;
}
switch (revealType) {
case TextEditorRevealType.Default:
this._codeEditor.revealRange(range, ScrollType.Smooth);
break;
case TextEditorRevealType.InCenter:
this._codeEditor.revealRangeInCenter(range, ScrollType.Smooth);
break;
case TextEditorRevealType.InCenterIfOutsideViewport:
this._codeEditor.revealRangeInCenterIfOutsideViewport(range, ScrollType.Smooth);
break;
case TextEditorRevealType.AtTop:
this._codeEditor.revealRangeAtTop(range, ScrollType.Smooth);
break;
default:
console.warn(`Unknown revealType: ${revealType}`);
break;
}
}
public isFocused(): boolean {
if (this._codeEditor) {
return this._codeEditor.hasTextFocus();
}
return false;
}
public matches(editor: IEditorPane): boolean {
if (!editor) {
return false;
}
return editor.getControl() === this._codeEditor;
}
public applyEdits(versionIdCheck: number, edits: ISingleEditOperation[], opts: IApplyEditsOptions): boolean {
if (this._model.getVersionId() !== versionIdCheck) {
// throw new Error('Model has changed in the meantime!');
// model changed in the meantime
return false;
}
if (!this._codeEditor) {
// console.warn('applyEdits on invisible editor');
return false;
}
if (typeof opts.setEndOfLine !== 'undefined') {
this._model.pushEOL(opts.setEndOfLine);
}
const transformedEdits = edits.map((edit): IIdentifiedSingleEditOperation => {
return {
range: Range.lift(edit.range),
text: edit.text,
forceMoveMarkers: edit.forceMoveMarkers
};
});
if (opts.undoStopBefore) {
this._codeEditor.pushUndoStop();
}
this._codeEditor.executeEdits('MainThreadTextEditor', transformedEdits);
if (opts.undoStopAfter) {
this._codeEditor.pushUndoStop();
}
return true;
}
async insertSnippet(template: string, ranges: readonly IRange[], opts: IUndoStopOptions) {
if (!this._codeEditor || !this._codeEditor.hasModel()) {
return false;
}
// check if clipboard is required and only iff read it (async)
let clipboardText: string | undefined;
const needsTemplate = SnippetParser.guessNeedsClipboard(template);
if (needsTemplate) {
const state = new EditorState(this._codeEditor, CodeEditorStateFlag.Value | CodeEditorStateFlag.Position);
clipboardText = await this._clipboardService.readText();
if (!state.validate(this._codeEditor)) {
return false;
}
}
const snippetController = SnippetController2.get(this._codeEditor);
// cancel previous snippet mode
// snippetController.leaveSnippet();
// set selection, focus editor
const selections = ranges.map(r => new Selection(r.startLineNumber, r.startColumn, r.endLineNumber, r.endColumn));
this._codeEditor.setSelections(selections);
this._codeEditor.focus();
// make modifications
snippetController?.insert(template, {
overwriteBefore: 0, overwriteAfter: 0,
undoStopBefore: opts.undoStopBefore, undoStopAfter: opts.undoStopAfter,
clipboardText
});
return true;
}
}<|fim▁end|> | } |
<|file_name|>vrpose.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use core::nonzero::NonZero;
use dom::bindings::cell::DOMRefCell;
use dom::bindings::codegen::Bindings::VRPoseBinding;
use dom::bindings::codegen::Bindings::VRPoseBinding::VRPoseMethods;
use dom::bindings::conversions::{slice_to_array_buffer_view, update_array_buffer_view};
use dom::bindings::js::Root;
use dom::bindings::reflector::{DomObject, Reflector, reflect_dom_object};
use dom::globalscope::GlobalScope;
use js::jsapi::{Heap, JSContext, JSObject};
use std::ptr;
use webvr_traits::webvr;
#[dom_struct]
pub struct VRPose {
reflector_: Reflector,
position: DOMRefCell<Heap<*mut JSObject>>,
orientation: DOMRefCell<Heap<*mut JSObject>>,
linear_vel: DOMRefCell<Heap<*mut JSObject>>,
angular_vel: DOMRefCell<Heap<*mut JSObject>>,
linear_acc: DOMRefCell<Heap<*mut JSObject>>,
angular_acc: DOMRefCell<Heap<*mut JSObject>>
}
#[allow(unsafe_code)]
unsafe fn update_or_create_typed_array(cx: *mut JSContext,
src: Option<&[f32]>,
dst: &DOMRefCell<Heap<*mut JSObject>>) {
let mut dst = dst.borrow_mut();
match src {<|fim▁hole|> update_array_buffer_view(dst.get(), &data);
}
},
None => {
if !dst.get().is_null() {
dst.set(ptr::null_mut());
}
}
}
}
#[inline]
#[allow(unsafe_code)]
fn heap_to_option(heap: &DOMRefCell<Heap<*mut JSObject>>) -> Option<NonZero<*mut JSObject>> {
let js_object = heap.borrow_mut().get();
if js_object.is_null() {
None
} else {
unsafe {
Some(NonZero::new(js_object))
}
}
}
impl VRPose {
fn new_inherited() -> VRPose {
VRPose {
reflector_: Reflector::new(),
position: DOMRefCell::new(Heap::default()),
orientation: DOMRefCell::new(Heap::default()),
linear_vel: DOMRefCell::new(Heap::default()),
angular_vel: DOMRefCell::new(Heap::default()),
linear_acc: DOMRefCell::new(Heap::default()),
angular_acc: DOMRefCell::new(Heap::default())
}
}
pub fn new(global: &GlobalScope, pose: &webvr::VRPose) -> Root<VRPose> {
let root = reflect_dom_object(box VRPose::new_inherited(),
global,
VRPoseBinding::Wrap);
root.update(&pose);
root
}
#[allow(unsafe_code)]
pub fn update(&self, pose: &webvr::VRPose) {
let cx = self.global().get_cx();
unsafe {
update_or_create_typed_array(cx, pose.position.as_ref().map(|v| &v[..]), &self.position);
update_or_create_typed_array(cx, pose.orientation.as_ref().map(|v| &v[..]), &self.orientation);
update_or_create_typed_array(cx, pose.linear_velocity.as_ref().map(|v| &v[..]), &self.linear_vel);
update_or_create_typed_array(cx, pose.angular_velocity.as_ref().map(|v| &v[..]), &self.angular_vel);
update_or_create_typed_array(cx, pose.linear_acceleration.as_ref().map(|v| &v[..]), &self.linear_acc);
update_or_create_typed_array(cx, pose.angular_acceleration.as_ref().map(|v| &v[..]), &self.angular_acc);
}
}
}
impl VRPoseMethods for VRPose {
#[allow(unsafe_code)]
// https://w3c.github.io/webvr/#dom-vrpose-position
unsafe fn GetPosition(&self, _cx: *mut JSContext) -> Option<NonZero<*mut JSObject>> {
heap_to_option(&self.position)
}
#[allow(unsafe_code)]
// https://w3c.github.io/webvr/#dom-vrpose-linearvelocity
unsafe fn GetLinearVelocity(&self, _cx: *mut JSContext) -> Option<NonZero<*mut JSObject>> {
heap_to_option(&self.linear_vel)
}
#[allow(unsafe_code)]
// https://w3c.github.io/webvr/#dom-vrpose-linearacceleration
unsafe fn GetLinearAcceleration(&self, _cx: *mut JSContext) -> Option<NonZero<*mut JSObject>> {
heap_to_option(&self.linear_acc)
}
#[allow(unsafe_code)]
// https://w3c.github.io/webvr/#dom-vrpose-orientation
unsafe fn GetOrientation(&self, _cx: *mut JSContext) -> Option<NonZero<*mut JSObject>> {
heap_to_option(&self.orientation)
}
#[allow(unsafe_code)]
// https://w3c.github.io/webvr/#dom-vrpose-angularvelocity
unsafe fn GetAngularVelocity(&self, _cx: *mut JSContext) -> Option<NonZero<*mut JSObject>> {
heap_to_option(&self.angular_vel)
}
#[allow(unsafe_code)]
// https://w3c.github.io/webvr/#dom-vrpose-angularacceleration
unsafe fn GetAngularAcceleration(&self, _cx: *mut JSContext) -> Option<NonZero<*mut JSObject>> {
heap_to_option(&self.angular_acc)
}
}<|fim▁end|> | Some(ref data) => {
if dst.get().is_null() {
dst.set(slice_to_array_buffer_view(cx, &data));
} else { |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>fn main() {
let num = 1000;
for b in 1..num {
let a = (1000000 - 2000*b) / (2000-2*b);
let c = 1000 - a - b;
if a*a + b*b == c*c {
println!("The numbers are {}, {}, {}", a, b, c);<|fim▁hole|> }
}<|fim▁end|> | println!("The product abc = {}", a*b*c);
break;
} |
<|file_name|>link.rs<|end_file_name|><|fim▁begin|>use std::{cmp};
use prelude::*;
pub fn read_link(fs: &mut Filesystem, ino: u64) -> Result<Vec<u8>> {
let inode = try!(get_inode(fs, ino));
if inode.mode.file_type == FileType::Symlink {
read_link_data(fs, &inode)
} else {
Err(Error::new(format!("inode is not a symlink")))
}
}
pub fn is_fast_symlink(fs: &Filesystem, inode: &Inode) -> bool {
if inode.mode.file_type != FileType::Symlink {
return false
}<|fim▁hole|>
if inode.file_acl != 0 {
inode.size_512 as u64 == fs.block_size() / 512
} else {
inode.size_512 == 0
}
}
pub fn read_link_data(fs: &mut Filesystem, inode: &Inode) -> Result<Vec<u8>> {
let mut buffer = make_buffer(inode.size + 4);
let length =
if is_fast_symlink(fs, &inode) {
for i in 0..cmp::min(inode.block.len(), inode.size as usize / 4 + 1) {
encode_u32(inode.block[i], &mut buffer[4*i..]);
}
inode.size
} else {
try!(read_inode_data(fs, &inode, 0, &mut buffer[..]))
};
buffer.truncate(length as usize);
Ok(buffer)
}
pub fn write_link_data(fs: &mut Filesystem, inode: &mut Inode, data: &[u8]) -> Result<()> {
try!(truncate_inode_blocks(fs, inode, 0));
if data.len() <= 15 * 4 {
use std::iter;
let data_buf: Vec<u8> = data.iter().cloned().chain(iter::repeat(0))
.take(15 * 4).collect();
for i in 0..15 {
inode.block[i] = decode_u32(&data_buf[i*4..]);
}
inode.size = data.len() as u64;
try!(update_inode(fs, inode));
} else {
try!(write_inode_data(fs, inode, 0, data));
}
Ok(())
}<|fim▁end|> | |
<|file_name|>deactivatemfadevice.py<|end_file_name|><|fim▁begin|><|fim▁hole|>#
# Redistribution and use of this software in source and binary forms,
# with or without modification, are permitted provided that the following
# conditions are met:
#
# Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from requestbuilder import Arg
from euca2ools.commands.iam import IAMRequest, AS_ACCOUNT, arg_user
class DeactivateMFADevice(IAMRequest):
DESCRIPTION = 'Deactivate an MFA device'
ARGS = [arg_user(
help='user owning the MFA device to deactivate (required)'),
Arg('-s', '--serial-number', dest='SerialNumber', metavar='SERIAL',
required=True, help='''serial number of the MFA device to
deactivate (required)'''),
AS_ACCOUNT]<|fim▁end|> | # Copyright 2009-2015 Eucalyptus Systems, Inc. |
<|file_name|>SceneControllerCallsSay.java<|end_file_name|><|fim▁begin|>package gradingTools.comp301ss21.assignment4.controller;
import gradingTools.basics.sharedTestCase.checkstyle.CheckstyleClassInstantiatedTestCase;
import gradingTools.basics.sharedTestCase.checkstyle.CheckstyleMethodCalledTestCase;
import gradingTools.comp301ss21.assignment2.testcases.inheritance.TaggedLocatable;
import util.annotations.MaxValue;
@MaxValue(2)
public class SceneControllerCallsSay extends CheckstyleMethodCalledTestCase {
// [INFO] D:\dewan_backup\Java\grail13\.\src\greeting\Cls.java:6: Expected signature main:String[]->void in type greeting.Cls:[@Comp301Tags.GREETING_MAIN]. Good! [ExpectedSignatures]
// [WARN] D:\dewan_backup\Java\grail13\.\src\greeting\Cls.java:6: Missing signature main:String[]->void in type greeting.Cls:[@Comp301Tags.GREETING_MAIN]. [ExpectedSignatures]
public SceneControllerCallsSay() {
<|fim▁hole|> super("@Comp301Tags.BRIDGE_SCENE_CONTROLLER", "(.*)!say:String-> void");
// TODO Auto-generated constructor stub
}
protected Class precedingTest() {
return TaggedLocatable.class;
}
}<|fim▁end|> | |
<|file_name|>delete-account-modal.component.ts<|end_file_name|><|fim▁begin|>// Copyright 2021 The Oppia Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS-IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* @fileoverview Component for delete account modal.
*/
import { Component } from '@angular/core';
import { OnInit } from '@angular/core';<|fim▁hole|>@Component({
selector: 'oppia-delete-account-modal',
templateUrl: './delete-account-modal.component.html'
})
export class DeleteAccountModalComponent implements OnInit {
expectedUsername: string;
username: string;
constructor(
private userService: UserService,
private ngbActiveModal: NgbActiveModal,
) {}
ngOnInit(): void {
this.expectedUsername = null;
this.userService.getUserInfoAsync().then((userInfo) => {
this.expectedUsername = userInfo.getUsername();
});
}
isValid(): boolean {
return this.username === this.expectedUsername;
}
confirm(): void {
this.ngbActiveModal.close();
}
cancel(): void {
this.ngbActiveModal.dismiss();
}
}<|fim▁end|> | import { UserService } from 'services/user.service';
import { NgbActiveModal } from '@ng-bootstrap/ng-bootstrap';
|
<|file_name|>user-access-right-drop-graph-spec.js<|end_file_name|><|fim▁begin|>/* jshint globalstrict:true, strict:true, maxlen: 5000 */
/* global describe, before, after, it, require*/
// //////////////////////////////////////////////////////////////////////////////
// / @brief tests for user access rights
// /
// / @file
// /
// / DISCLAIMER
// /
// / Copyright 2017 ArangoDB GmbH, Cologne, Germany
// /
// / Licensed under the Apache License, Version 2.0 (the "License");
// / you may not use this file except in compliance with the License.
// / You may obtain a copy of the License at
// /
// / http://www.apache.org/licenses/LICENSE-2.0
// /
// / Unless required by applicable law or agreed to in writing, software
// / distributed under the License is distributed on an "AS IS" BASIS,
// / WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// / See the License for the specific language governing permissions and
// / limitations under the License.
// /
// / Copyright holder is ArangoDB GmbH, Cologne, Germany
// /
// / @author Michael Hackstein
// / @author Mark Vollmary
// / @author Copyright 2017, ArangoDB GmbH, Cologne, Germany
// //////////////////////////////////////////////////////////////////////////////
'use strict';
const expect = require('chai').expect;
const users = require('@arangodb/users');
const helper = require('@arangodb/user-helper');
const errors = require('@arangodb').errors;
const graphModule = require('@arangodb/general-graph');
const namePrefix = helper.namePrefix;
const dbName = helper.dbName;
const rightLevels = helper.rightLevels;
const testGraphName = `${namePrefix}GraphNew`;
const testEdgeColName = `${namePrefix}EdgeColNew`;
const testVertexColName = `${namePrefix}VertexColNew`;
const userSet = helper.userSet;
const systemLevel = helper.systemLevel;
const dbLevel = helper.dbLevel;
const colLevel = helper.colLevel;
const arango = require('internal').arango;
const db = require('internal').db;
for (let l of rightLevels) {
systemLevel[l] = new Set();
dbLevel[l] = new Set();
colLevel[l] = new Set();
}
const switchUser = (user, dbname) => {
arango.reconnect(arango.getEndpoint(), dbname, user, '');
};
switchUser('root', '_system');
helper.removeAllUsers();
describe('User Rights Management', () => {
before(helper.generateAllUsers);
after(helper.removeAllUsers);
it('should check if all users are created', () => {
switchUser('root', '_system');
expect(userSet.size).to.equal(helper.userCount);
for (let name of userSet) {
expect(users.document(name), `Could not find user: ${name}`).to.not.be.undefined;
}
});
it('should test rights for', () => {
for (let name of userSet) {
let canUse = false;
try {
switchUser(name, dbName);
canUse = true;
} catch (e) {
canUse = false;
}
if (canUse) {
describe(`user ${name}`, () => {
before(() => {
switchUser(name, dbName);
});
describe('administrate on db level', () => {
const rootTestCollection = (colName, switchBack = true) => {
switchUser('root', dbName);
let col = db._collection(colName);
if (switchBack) {
switchUser(name, dbName);
}
return col !== null;
};
const rootCreateCollection = (colName, edge = false) => {
if (!rootTestCollection(colName, false)) {
if (edge) {
db._createEdgeCollection(colName);
} else {
db._create(colName);
}<|fim▁hole|> users.grantCollection(name, dbName, colName, 'none');
} else if (colLevel['ro'].has(name)) {
users.grantCollection(name, dbName, colName, 'ro');
} else if (colLevel['rw'].has(name)) {
users.grantCollection(name, dbName, colName, 'rw');
}
}
switchUser(name, dbName);
};
const rootTestGraph = (switchBack = true) => {
switchUser('root', dbName);
const graph = graphModule._exists(testGraphName);
if (switchBack) {
switchUser(name, dbName);
}
return graph !== false;
};
const rootDropGraph = () => {
if (rootTestGraph(false)) {
graphModule._drop(testGraphName, true);
}
switchUser(name, dbName);
};
const rootCreateGraph = () => {
if (!rootTestGraph(false)) {
graphModule._create(testGraphName, [{
collection: testEdgeColName,
'from': [ testVertexColName ],
'to': [ testVertexColName ]
}]);
}
switchUser(name, dbName);
};
describe('drop a', () => {
before(() => {
db._useDatabase(dbName);
rootDropGraph();
rootCreateCollection(testEdgeColName, true);
rootCreateCollection(testVertexColName, false);
rootCreateGraph();
});
after(() => {
rootDropGraph();
});
it('graph', () => {
expect(!rootTestGraph()).to.equal(false, 'Precondition failed, the graph does not exists');
if (dbLevel['rw'].has(name) && colLevel['rw'].has(name)) {
graphModule._drop(testGraphName, true);
expect(!rootTestGraph()).to.equal(true, 'Graph drop reported success, but graph was found afterwards.');
expect(!rootTestCollection(testEdgeColName)).to.equal(true, 'Graph drop reported success, but edge collection was found afterwards.');
expect(!rootTestCollection(testVertexColName)).to.equal(true, 'Graph drop reported success, but vertex collection was found afterwards.');
} else {
try {
graphModule._drop(testGraphName, true);
} catch (e) {
expect(e.errorNum).to.equal(errors.ERROR_FORBIDDEN.code);
}
expect(!rootTestGraph()).to.equal(false, `${name} was able to drop a graph with insufficent rights`);
expect(!rootTestCollection(testEdgeColName)).to.equal(false, 'Graph drop reported error, but edge collection was not found afterwards.');
expect(!rootTestCollection(testVertexColName)).to.equal(false, 'Graph drop reported error, but vertex collection was not found afterwards.');
}
});
});
describe('drop a', () => {
before(() => {
db._useDatabase(dbName);
rootDropGraph();
rootCreateCollection(testEdgeColName, true);
rootCreateCollection(testVertexColName, false);
rootCreateGraph();
});
after(() => {
rootDropGraph();
});
it('graph with specified collection access', () => {
expect(rootTestGraph()).to.equal(true, 'Precondition failed, the graph still not exists');
expect(rootTestCollection(testEdgeColName)).to.equal(true, 'Precondition failed, the edge collection still not exists');
expect(rootTestCollection(testVertexColName)).to.equal(true, 'Precondition failed, the vertex collection still not exists');
if (dbLevel['rw'].has(name) && colLevel['rw'].has(name)) {
graphModule._drop(testGraphName, true);
expect(!rootTestGraph()).to.equal(true, 'Graph drop reported success, but graph was found afterwards.');
expect(!rootTestCollection(testEdgeColName)).to.equal(true, 'Graph drop reported success, but edge collection was found afterwards.');
expect(!rootTestCollection(testVertexColName)).to.equal(true, 'Graph drop reported success, but vertex collection was found afterwards.');
} else {
try {
graphModule._drop(testGraphName, true);
} catch (e) {
expect(e.errorNum).to.equal(errors.ERROR_FORBIDDEN.code);
}
expect(!rootTestGraph()).to.equal(false, `${name} was able to drop a graph with insufficent rights`);
}
});
});
});
});
}
}
});
});<|fim▁end|> | if (colLevel['none'].has(name)) { |
<|file_name|>datepicker_tpl.js<|end_file_name|><|fim▁begin|>var tpl = [
'<div id="{uuid}" class="datepicker ui-d-n">',
' <div class="datepicker__mask"></div>',
' <div class="datepicker__main">',
' <div class="datepicker__header">',
' <div class="datepicker__time-toggle"></div>',
' <div class="datepicker__time-selector-list">',
' <div class="datepicker__time-selector-item">',
' <a href="javascript:;" class="datepicker__time-selector-arrow datepicker__time-selector-prev" id="_j_year_prev"><</a>',
' <a href="javascript:;" class="datepicker__time-selector-text" id="_j_year_text">{year}年</a>',
' <a href="javascript:;" class="datepicker__time-selector-arrow datepicker__time-selector-next" id="_j_year_next">></a>',
' </div>',
' <div class="datepicker__time-selector-item">',
' <a href="javascript:;" class="datepicker__time-selector-arrow datepicker__time-selector-prev" id="_j_month_prev"><</a>',
' <a href="javascript:;" class="datepicker__time-selector-text" id="_j_month_text">{month}月</a>',
' <a href="javascript:;" class="datepicker__time-selector-arrow datepicker__time-selector-next" id="_j_month_next" >></a>',
' </div>',
' </div>',
' </div>',
' <div class="datepicker__panel">',
' <ul class="datepicker__week-list">',
' <li class="datepicker__week-item">日</li>',
' <li class="datepicker__week-item">一</li>',
' <li class="datepicker__week-item">二</li>',
' <li class="datepicker__week-item">三</li>',
' <li class="datepicker__week-item">四</li>',
' <li class="datepicker__week-item">五</li>',
' <li class="datepicker__week-item">六</li>',<|fim▁hole|> ' <div class="datepicker__day-wrap">',
' <ul class="datepicker__day-list datepicker__day-list-curr">',
' {all_days}',
' </ul>',
' </div>',
' </div>',
' ',
' <div class="datepicker__footer">',
' <div class="datepicker__btn" id="_j_confirm_btn">确定</div>',
' <div class="datepicker__btn" id="_j_cancel_btn">取消</div>',
' </div>',
' </div>',
'</div>'
].join("");
module.exports = tpl;<|fim▁end|> | ' </ul>', |
<|file_name|>widget.preloader.js<|end_file_name|><|fim▁begin|>var WidgetPreloader = Backbone.View.extend({
tagName: 'span',
className: 'preloader',
initialize: function(){
this.render();
},
render: function(){
$(this.el).html('<div class="spinner"><div class="bar1 bar"></div><div class="bar2 bar"></div><div class="bar3 bar"></div><div class="bar4 bar"></div><div class="bar5 bar"></div><div class="bar6 bar"></div><div class="bar7 bar"></div><div class="bar8 bar"></div><div class="bar9 bar"></div><div class="bar10 bar"></div><div class="bar11 bar"></div><div class="bar12 bar"></div></div>Loading...');
return this;
}<|fim▁hole|><|fim▁end|> | }); |
<|file_name|>loghttp_test.go<|end_file_name|><|fim▁begin|>package logh
import (
"encoding/json"
"net/http"
"net/http/httptest"
"net/url"
"testing"
"time"
"github.com/dilfish/tools/io"
)
func TestRequestToInfo(t *testing.T) {
var req http.Request
tx := time.Now()
req.Method = "POST"
req.URL = &url.URL{}
req.URL.Path = "/test"
req.RemoteAddr = "1.1.1.1"
ri := RequestToInfo(&req, tx)
if ri.Time != tx {
t.Error("requestinfo.t error", ri.Time, t)
}
if ri.ClientIP != "1.1.1.1" {
t.Error("bad clientip", ri.ClientIP, "1.1.1.1")
}
if ri.Path != req.URL.Path {
t.Error("bad path", ri.Path, req.URL.Path)
}
if ri.Method != req.Method {
t.Error("bad method", ri.Method, req.Method)
}
req.RemoteAddr = "1.1.1.1:2222"
ri = RequestToInfo(&req, tx)
if ri.ClientIP != "1.1.1.1" {
t.Error("bad ip:port", ri.ClientIP, "1.1.1.1")
}
}
func TestNewRequestLogger(t *testing.T) {
get := "/get"
post := "/post"
rl := NewRequestLogger(post, get)
if rl.PostUrl != post || rl.GetUrl != get {
t.Error("bad get/post", rl, get, post)
}
var ei ErrInfo
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
bt, err := json.Marshal(ei)
if err != nil {
t.Error("marshal error:", err)
}
w.Write(bt)
}))
defer ts.Close()
rl.PostUrl = ts.URL + "/post"
req, err := http.NewRequest("POST", ts.URL+"/post", nil)
if err != nil {<|fim▁hole|> t.Error("post one error:", err)
}
rl.GetUrl = ts.URL + "/get"
_, err = rl.GetStat(time.Now().Add(-time.Second), time.Now().Add(time.Second))
if err != nil {
t.Error("get stat", err)
}
rl.PostUrl = "/post"
rl.GetUrl = "/get"
err = rl.PostOne(req)
if err == nil {
t.Error("bad post one", err)
}
_, err = rl.GetStat(time.Now().Add(-time.Second), time.Now().Add(time.Second))
if err == nil {
t.Error("bad get one", err)
}
}
func TestOpenReqLogDB(t *testing.T) {
var conf MgoConfig
err := io.ReadConfig("testdata/mongo.conf", &conf)
if err != nil {
t.Error("no such mgo config", err)
}
db := OpenReqLogDB(conf)
if db == nil {
t.Error("open mongo db error")
}
db.Close()
conf.Username = "root"
conf.Password = "ititititititiitiititititii"
db = OpenReqLogDB(conf)
if db != nil {
t.Error("fake db open good:", db)
}
}
func TestRequestLogger(t *testing.T) {
var conf MgoConfig
err := io.ReadConfig("testdata/mongo.conf", &conf)
if err != nil {
t.Error("read config error", err)
}
s := NewServeRequestLogger(conf)
if s == nil {
t.Error("new serve request error", err)
}
now := time.Now()
var r RequestInfo
r.Name = "test"
r.Method = "POST"
r.Path = "/do"
r.ClientIP = "1.1.1.1"
r.Time = now
err = s.OneRequest(&r)
if err != nil {
t.Error("one request error", err)
}
_, err = s.GetStat(now.Add(-time.Second), now.Add(time.Second))
if err != nil {
t.Error("get state error:", err)
}
}<|fim▁end|> | t.Error("new request error:", err)
}
err = rl.PostOne(req)
if err != nil { |
<|file_name|>EacOnItemPickup.java<|end_file_name|><|fim▁begin|>package Eac.event;
import Eac.Eac;
import cpw.mods.fml.common.eventhandler.SubscribeEvent;<|fim▁hole|>public class EacOnItemPickup extends Eac {
@SubscribeEvent
public void EacOnItemPickup(PlayerEvent.ItemPickupEvent e) {
if (e.pickedUp.getEntityItem().isItemEqual(new ItemStack(oreAir))) {
e.player.addStat(airoremined, 1);
}
else if (e.pickedUp.getEntityItem().isItemEqual(new ItemStack(oreShadow))) {
e.player.addStat(shadoworemined, 1);
}
}
}<|fim▁end|> | import cpw.mods.fml.common.gameevent.PlayerEvent;
import net.minecraft.item.ItemStack;
|
<|file_name|>web.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf8 -*-
"""The Tornado web framework.
核心模块, 参考示例使用代码:
- 重要模块:
- tornado.web
- tornado.ioloop # 根据示例,可知入口在此.参看: ioloop.py
- tornado.httpserver
The Tornado web framework looks a bit like web.py (http://webpy.org/) or
Google's webapp (http://code.google.com/appengine/docs/python/tools/webapp/),
but with additional tools and optimizations to take advantage of the
Tornado non-blocking web server and tools.
Here is the canonical "Hello, world" example app:
import tornado.httpserver
import tornado.ioloop
import tornado.web
class MainHandler(tornado.web.RequestHandler):
def get(self):
self.write("Hello, world")
if __name__ == "__main__":
application = tornado.web.Application([
(r"/", MainHandler),
])
http_server = tornado.httpserver.HTTPServer(application)
http_server.listen(8888)
tornado.ioloop.IOLoop.instance().start()
See the Tornado walkthrough on GitHub for more details and a good
getting started guide.
"""
import base64
import binascii
import calendar
import Cookie
import cStringIO
import datetime
import email.utils
import escape
import functools
import gzip
import hashlib
import hmac
import httplib
import locale
import logging
import mimetypes
import os.path
import re
import stat
import sys
import template
import time
import types
import urllib
import urlparse
import uuid
"""
# 模块说明: 核心模块
RequestHandler() 需要处理哪些工作:
- 1. HTTP方法支持(GET,POST, HEAD, DELETE, PUT), 预定义各种接口
- 2. 预定义接口: 配对定义[类似 unittest 的 setUp(), tearDown() 方法]
- prepare() # 运行前, 准备工作
- on_connection_close() # 运行后, 清理工作
- 根据需要, 选择使用
- 3. cookies处理:
- set
- get
- clear
- 4. HTTP头处理:
- set_status() # 状态码
- set_header() # 头信息
- 5. 重定向:
- redirect()
"""
class RequestHandler(object):
"""Subclass this class and define get() or post() to make a handler.
If you want to support more methods than the standard GET/HEAD/POST, you
should override the class variable SUPPORTED_METHODS in your
RequestHandler class.
译:
1. 继承此类,并自定义get(), post()方法,创建 handler
2. 若需要支持更多方法(GET/HEAD/POST), 需要 在 子类中 覆写 类变量 SUPPORTED_METHODS
"""
SUPPORTED_METHODS = ("GET", "HEAD", "POST", "DELETE", "PUT")
def __init__(self, application, request, transforms=None):
self.application = application
self.request = request
self._headers_written = False
self._finished = False
self._auto_finish = True
self._transforms = transforms or []
self.ui = _O((n, self._ui_method(m)) for n, m in
application.ui_methods.iteritems())
self.ui["modules"] = _O((n, self._ui_module(n, m)) for n, m in
application.ui_modules.iteritems())
self.clear()
# Check since connection is not available in WSGI
if hasattr(self.request, "connection"):
self.request.connection.stream.set_close_callback(
self.on_connection_close) # 注意 self.on_connection_close() 调用时机
@property
def settings(self):
return self.application.settings
# 如下这部分, 默认的接口定义, 如果子类没有覆写这些方法,就直接抛出异常.
# 也就是说: 这些接口, 必须要 覆写,才可以用
def head(self, *args, **kwargs):
raise HTTPError(405)
def get(self, *args, **kwargs):
raise HTTPError(405)
def post(self, *args, **kwargs):
raise HTTPError(405)
def delete(self, *args, **kwargs):
raise HTTPError(405)
def put(self, *args, **kwargs):
raise HTTPError(405)
# 预定义接口: 准备工作函数, 给需要 个性化配置用
# 注意调用时机: self._execute()
def prepare(self):
"""Called before the actual handler method.
Useful to override in a handler if you want a common bottleneck for
all of your requests.
"""
pass
# 预定义接口2: 执行完后, 附带清理工作.(根据需要自行修改)
# 注意调用时机: __init__()
def on_connection_close(self):
"""Called in async handlers if the client closed the connection.
You may override this to clean up resources associated with
long-lived connections.
Note that the select()-based implementation of IOLoop does not detect
closed connections and so this method will not be called until
you try (and fail) to produce some output. The epoll- and kqueue-
based implementations should detect closed connections even while
the request is idle.
"""
pass
def clear(self):
"""Resets all headers and content for this response."""
self._headers = {
"Server": "TornadoServer/1.0",
"Content-Type": "text/html; charset=UTF-8",
}
if not self.request.supports_http_1_1():
if self.request.headers.get("Connection") == "Keep-Alive":
self.set_header("Connection", "Keep-Alive")
self._write_buffer = []
self._status_code = 200
# 设置 HTTP状态码
def set_status(self, status_code):
"""Sets the status code for our response."""
assert status_code in httplib.responses # 使用 assert 方式 作条件判断, 出错时,直接抛出
self._status_code = status_code
# 设置 HTTP头信息
# 根据 value 类型, 作 格式转换处理
def set_header(self, name, value):
"""Sets the given response header name and value.
If a datetime is given, we automatically format it according to the
HTTP specification. If the value is not a string, we convert it to
a string. All header values are then encoded as UTF-8.
"""
if isinstance(value, datetime.datetime):
t = calendar.timegm(value.utctimetuple())
value = email.utils.formatdate(t, localtime=False, usegmt=True)
elif isinstance(value, int) or isinstance(value, long):
value = str(value)
else:
value = _utf8(value)
# If \n is allowed into the header, it is possible to inject
# additional headers or split the request. Also cap length to
# prevent obviously erroneous values.
safe_value = re.sub(r"[\x00-\x1f]", " ", value)[:4000] # 正则过滤 + 截取4000长度字符串
if safe_value != value:
raise ValueError("Unsafe header value %r", value)
self._headers[name] = value
_ARG_DEFAULT = []
def get_argument(self, name, default=_ARG_DEFAULT, strip=True):
"""Returns the value of the argument with the given name.
If default is not provided, the argument is considered to be
required, and we throw an HTTP 404 exception if it is missing.
If the argument appears in the url more than once, we return the
last value.
The returned value is always unicode.
"""
args = self.get_arguments(name, strip=strip)
if not args:
if default is self._ARG_DEFAULT:
raise HTTPError(404, "Missing argument %s" % name)
return default
return args[-1]
def get_arguments(self, name, strip=True):
"""Returns a list of the arguments with the given name.
If the argument is not present, returns an empty list.
The returned values are always unicode.
"""
values = self.request.arguments.get(name, [])
# Get rid of any weird control chars
values = [re.sub(r"[\x00-\x08\x0e-\x1f]", " ", x) for x in values]
values = [_unicode(x) for x in values]
if strip:
values = [x.strip() for x in values]
return values
@property
def cookies(self):
"""A dictionary of Cookie.Morsel objects."""
# 如果不存在,定义cookies
# 如果存在, 返回之
if not hasattr(self, "_cookies"):
self._cookies = Cookie.BaseCookie() # 定义
if "Cookie" in self.request.headers:
try:
self._cookies.load(self.request.headers["Cookie"]) # 赋值
except:
self.clear_all_cookies() # 异常时,调用 自定义清理函数
return self._cookies
def get_cookie(self, name, default=None):
"""Gets the value of the cookie with the given name, else default."""
if name in self.cookies: # 注意, 因为 cookies() 被定义成 property, 可以直接这样调用
return self.cookies[name].value
return default
def set_cookie(self, name, value, domain=None, expires=None, path="/",
expires_days=None, **kwargs):
"""Sets the given cookie name/value with the given options.
Additional keyword arguments are set on the Cookie.Morsel
directly.
See http://docs.python.org/library/cookie.html#morsel-objects
for available attributes.
"""
name = _utf8(name)
value = _utf8(value)
if re.search(r"[\x00-\x20]", name + value):
# Don't let us accidentally inject bad stuff
raise ValueError("Invalid cookie %r: %r" % (name, value))
if not hasattr(self, "_new_cookies"):
self._new_cookies = []
new_cookie = Cookie.BaseCookie()
self._new_cookies.append(new_cookie)
new_cookie[name] = value
if domain:
new_cookie[name]["domain"] = domain
if expires_days is not None and not expires:
expires = datetime.datetime.utcnow() + datetime.timedelta(
days=expires_days)
if expires:
timestamp = calendar.timegm(expires.utctimetuple())
new_cookie[name]["expires"] = email.utils.formatdate(
timestamp, localtime=False, usegmt=True)
if path:
new_cookie[name]["path"] = path
for k, v in kwargs.iteritems():
new_cookie[name][k] = v
def clear_cookie(self, name, path="/", domain=None):
"""Deletes the cookie with the given name."""
expires = datetime.datetime.utcnow() - datetime.timedelta(days=365)
# 赋空值, 清掉 cookie, 多个web框架,标准实现写法
self.set_cookie(name, value="", path=path, expires=expires,
domain=domain)
def clear_all_cookies(self):
"""Deletes all the cookies the user sent with this request."""
# 注: 注意如上2个相关函数 命名特征
# - 单个操作: clear_cookie()
# - 批量操作: clear_all_cookies()
for name in self.cookies.iterkeys():
self.clear_cookie(name)
def set_secure_cookie(self, name, value, expires_days=30, **kwargs):
"""Signs and timestamps a cookie so it cannot be forged.
You must specify the 'cookie_secret' setting in your Application
to use this method. It should be a long, random sequence of bytes
to be used as the HMAC secret for the signature.
To read a cookie set with this method, use get_secure_cookie().
"""
# 如下几步, 构造 "安全的cookie", 加 时间戳, 防伪造
timestamp = str(int(time.time()))
value = base64.b64encode(value)
signature = self._cookie_signature(name, value, timestamp) # 加时间戳
value = "|".join([value, timestamp, signature])
self.set_cookie(name, value, expires_days=expires_days, **kwargs)
def get_secure_cookie(self, name, include_name=True, value=None):
"""Returns the given signed cookie if it validates, or None.
In older versions of Tornado (0.1 and 0.2), we did not include the
name of the cookie in the cookie signature. To read these old-style
cookies, pass include_name=False to this method. Otherwise, all
attempts to read old-style cookies will fail (and you may log all
your users out whose cookies were written with a previous Tornado
version).
"""
if value is None:
value = self.get_cookie(name)
if not value:
return None
parts = value.split("|")
if len(parts) != 3:
return None
if include_name:
signature = self._cookie_signature(name, parts[0], parts[1])
else:
signature = self._cookie_signature(parts[0], parts[1])
if not _time_independent_equals(parts[2], signature):
logging.warning("Invalid cookie signature %r", value)
return None
timestamp = int(parts[1])
if timestamp < time.time() - 31 * 86400:
logging.warning("Expired cookie %r", value)
return None
# 尝试返回
try:
return base64.b64decode(parts[0])
except:
return None
def _cookie_signature(self, *parts):
self.require_setting("cookie_secret", "secure cookies")
hash = hmac.new(self.application.settings["cookie_secret"],
digestmod=hashlib.sha1)
for part in parts:
hash.update(part)
return hash.hexdigest()
# 关键代码: 重定向
#
def redirect(self, url, permanent=False):
"""Sends a redirect to the given (optionally relative) URL."""
if self._headers_written:
raise Exception("Cannot redirect after headers have been written")
self.set_status(301 if permanent else 302)
# Remove whitespace
url = re.sub(r"[\x00-\x20]+", "", _utf8(url))
self.set_header("Location", urlparse.urljoin(self.request.uri, url))
self.finish() # 调用处理
# 关键代码: 准备 渲染页面的 数据, 常用接口函数
# 特别说明:
# - 这里 write() 方法, 并没有直接 渲染页面, 而是在 准备 渲染数据
# - 实际的 渲染HTML页面操作, 在 finish() 中
def write(self, chunk):
"""Writes the given chunk to the output buffer.
To write the output to the network, use the flush() method below.
If the given chunk is a dictionary, we write it as JSON and set
the Content-Type of the response to be text/javascript.
"""
assert not self._finished
if isinstance(chunk, dict):
chunk = escape.json_encode(chunk)
self.set_header("Content-Type", "text/javascript; charset=UTF-8")
chunk = _utf8(chunk)
self._write_buffer.append(chunk) # 准备 待渲染的 HTML数据
# 关键代码: 渲染页面
#
def render(self, template_name, **kwargs):
"""Renders the template with the given arguments as the response."""
html = self.render_string(template_name, **kwargs)
# Insert the additional JS and CSS added by the modules on the page
js_embed = []
js_files = []
css_embed = []
css_files = []
html_heads = []
html_bodies = []
for module in getattr(self, "_active_modules", {}).itervalues():
# JS 部分
embed_part = module.embedded_javascript()
if embed_part:
js_embed.append(_utf8(embed_part))
file_part = module.javascript_files()
if file_part:
if isinstance(file_part, basestring):
js_files.append(file_part)
else:
js_files.extend(file_part)
# CSS 部分
embed_part = module.embedded_css()
if embed_part:
css_embed.append(_utf8(embed_part))
file_part = module.css_files()
if file_part:
if isinstance(file_part, basestring):
css_files.append(file_part)
else:
css_files.extend(file_part)
# Header 部分
head_part = module.html_head()
if head_part:
html_heads.append(_utf8(head_part))
body_part = module.html_body()
if body_part:
html_bodies.append(_utf8(body_part))
# ----------------------------------------------------------
# 如下是 分块处理部分:
# - 本质工作: 在 拼接一个 长 HTML 字符串(包含 HTML,CSS,JS)
# ----------------------------------------------------------
if js_files:
# Maintain order of JavaScript files given by modules
paths = []
unique_paths = set()
for path in js_files:
if not path.startswith("/") and not path.startswith("http:"):
path = self.static_url(path)
if path not in unique_paths:
paths.append(path)
unique_paths.add(path)
js = ''.join('<script src="' + escape.xhtml_escape(p) +
'" type="text/javascript"></script>'
for p in paths)
sloc = html.rindex('</body>')
html = html[:sloc] + js + '\n' + html[sloc:]
if js_embed:
js = '<script type="text/javascript">\n//<![CDATA[\n' + \
'\n'.join(js_embed) + '\n//]]>\n</script>'
sloc = html.rindex('</body>')
html = html[:sloc] + js + '\n' + html[sloc:]
if css_files:
paths = set()
for path in css_files:
if not path.startswith("/") and not path.startswith("http:"):
paths.add(self.static_url(path))
else:
paths.add(path)
css = ''.join('<link href="' + escape.xhtml_escape(p) + '" '
'type="text/css" rel="stylesheet"/>'
for p in paths)
hloc = html.index('</head>')
html = html[:hloc] + css + '\n' + html[hloc:]
if css_embed:
css = '<style type="text/css">\n' + '\n'.join(css_embed) + \
'\n</style>'
hloc = html.index('</head>')
html = html[:hloc] + css + '\n' + html[hloc:]
if html_heads:
hloc = html.index('</head>')
html = html[:hloc] + ''.join(html_heads) + '\n' + html[hloc:]
if html_bodies:
hloc = html.index('</body>')
html = html[:hloc] + ''.join(html_bodies) + '\n' + html[hloc:]
# 注意
self.finish(html) # 关键调用
def render_string(self, template_name, **kwargs):
"""Generate the given template with the given arguments.
We return the generated string. To generate and write a template
as a response, use render() above.
"""
# If no template_path is specified, use the path of the calling file
template_path = self.get_template_path()
if not template_path:
frame = sys._getframe(0)
web_file = frame.f_code.co_filename
while frame.f_code.co_filename == web_file:
frame = frame.f_back
template_path = os.path.dirname(frame.f_code.co_filename)
if not getattr(RequestHandler, "_templates", None):
RequestHandler._templates = {}
if template_path not in RequestHandler._templates:
loader = self.application.settings.get("template_loader") or\
template.Loader(template_path)
RequestHandler._templates[template_path] = loader # 注意
t = RequestHandler._templates[template_path].load(template_name)
args = dict(
handler=self,
request=self.request,
current_user=self.current_user,
locale=self.locale,
_=self.locale.translate,
static_url=self.static_url,
xsrf_form_html=self.xsrf_form_html,
reverse_url=self.application.reverse_url
)
args.update(self.ui)
args.update(kwargs)
return t.generate(**args)
def flush(self, include_footers=False):
"""Flushes the current output buffer to the nextwork."""
if self.application._wsgi:
raise Exception("WSGI applications do not support flush()")
chunk = "".join(self._write_buffer)
self._write_buffer = []
if not self._headers_written:
self._headers_written = True
for transform in self._transforms:
self._headers, chunk = transform.transform_first_chunk(
self._headers, chunk, include_footers)
headers = self._generate_headers()
else:
for transform in self._transforms:
chunk = transform.transform_chunk(chunk, include_footers)
headers = ""
# Ignore the chunk and only write the headers for HEAD requests
if self.request.method == "HEAD":
if headers:
self.request.write(headers) # 特别注意 self.request.write() 方法
return
if headers or chunk:
self.request.write(headers + chunk)
# 超级关键代码: 写HTML页面
#
#
def finish(self, chunk=None):
"""Finishes this response, ending the HTTP request."""
assert not self._finished
if chunk is not None:
self.write(chunk) # 特别注意, 这里的关键调用
# Automatically support ETags and add the Content-Length header if
# we have not flushed any content yet.
if not self._headers_written:
if (self._status_code == 200 and self.request.method == "GET" and
"Etag" not in self._headers):
hasher = hashlib.sha1()
for part in self._write_buffer:
hasher.update(part)
etag = '"%s"' % hasher.hexdigest()
inm = self.request.headers.get("If-None-Match")
if inm and inm.find(etag) != -1:
self._write_buffer = []
self.set_status(304)
else:
self.set_header("Etag", etag)
if "Content-Length" not in self._headers:
content_length = sum(len(part) for part in self._write_buffer)
self.set_header("Content-Length", content_length)
if hasattr(self.request, "connection"):
# Now that the request is finished, clear the callback we
# set on the IOStream (which would otherwise prevent the
# garbage collection of the RequestHandler when there
# are keepalive connections)
self.request.connection.stream.set_close_callback(None)
if not self.application._wsgi:
self.flush(include_footers=True)
self.request.finish() # 注意调用
self._log()
self._finished = True
# 给浏览器,返回 内部错误
def send_error(self, status_code=500, **kwargs):
"""Sends the given HTTP error code to the browser.
We also send the error HTML for the given error code as returned by
get_error_html. Override that method if you want custom error pages
for your application.
"""
if self._headers_written:
logging.error("Cannot send error response after headers written")
if not self._finished:
self.finish()
return
self.clear()
self.set_status(status_code)
message = self.get_error_html(status_code, **kwargs)
self.finish(message) # 写出信息
def get_error_html(self, status_code, **kwargs):
"""Override to implement custom error pages.
If this error was caused by an uncaught exception, the
exception object can be found in kwargs e.g. kwargs['exception']
"""
return "<html><title>%(code)d: %(message)s</title>" \
"<body>%(code)d: %(message)s</body></html>" % {
"code": status_code,
"message": httplib.responses[status_code],
}
# 本地配置: 通常用于设置 国际化-语言 (浏览器语言)
#
@property
def locale(self):
"""The local for the current session.
Determined by either get_user_locale, which you can override to
set the locale based on, e.g., a user preference stored in a
database, or get_browser_locale, which uses the Accept-Language
header.
"""
if not hasattr(self, "_locale"):
self._locale = self.get_user_locale() # 配置为 用户设置
if not self._locale:
self._locale = self.get_browser_locale() # 配置为 浏览器默认设置
assert self._locale
return self._locale
# 预定义接口 - 用户配置
# - 使用前, 需覆写该函数
def get_user_locale(self):
"""Override to determine the locale from the authenticated user.
If None is returned, we use the Accept-Language header.
"""
return None
# 默认浏览器设置语言环境
def get_browser_locale(self, default="en_US"):
"""Determines the user's locale from Accept-Language header.
See http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.4
"""
if "Accept-Language" in self.request.headers:
languages = self.request.headers["Accept-Language"].split(",")
locales = []
for language in languages:
parts = language.strip().split(";")
if len(parts) > 1 and parts[1].startswith("q="):
try:
score = float(parts[1][2:])
except (ValueError, TypeError):
score = 0.0
else:
score = 1.0
locales.append((parts[0], score))
if locales:
locales.sort(key=lambda (l, s): s, reverse=True)
codes = [l[0] for l in locales]
return locale.get(*codes)
return locale.get(default)
# 获取当前用户
@property
def current_user(self):
"""The authenticated user for this request.
Determined by either get_current_user, which you can override to
set the user based on, e.g., a cookie. If that method is not
overridden, this method always returns None.
We lazy-load the current user the first time this method is called
and cache the result after that.
"""
if not hasattr(self, "_current_user"):
self._current_user = self.get_current_user()
return self._current_user
# 预定义接口 - 获取当前用户
# - 使用前, 需覆写
# - 特别说明: 通常都需要用到该接口, 基本上一定是需要 覆写的
def get_current_user(self):
"""Override to determine the current user from, e.g., a cookie."""
return None
# ----------------------------------------------------
# 如下2个函数, 用于获取 默认配置参数
# - 登录 URL
# - 模板路径
# - 支持
# ----------------------------------------------------
def get_login_url(self):
"""Override to customize the login URL based on the request.
By default, we use the 'login_url' application setting.
"""
self.require_setting("login_url", "@tornado.web.authenticated")
return self.application.settings["login_url"]
def get_template_path(self):
"""Override to customize template path for each handler.
By default, we use the 'template_path' application setting.
Return None to load templates relative to the calling file.
"""
return self.application.settings.get("template_path")
# 预防 跨站攻击
#
# - 默认先判断是否记录了 token
# - 若已记录, 直接返回
# - 若未记录, 尝试从 cookie 中 获取
# - 若 cookie 中 存在, 从 cookie 中获取,并返回
# - 若 cookie 中 不存在, 主动生成 token, 并同步写入 cookie. (目的是,无需重复生成)
#
@property
def xsrf_token(self):
"""The XSRF-prevention token for the current user/session.
To prevent cross-site request forgery, we set an '_xsrf' cookie
and include the same '_xsrf' value as an argument with all POST
requests. If the two do not match, we reject the form submission
as a potential forgery.
See http://en.wikipedia.org/wiki/Cross-site_request_forgery
"""
if not hasattr(self, "_xsrf_token"):
token = self.get_cookie("_xsrf") # cookie 中获取
if not token:
token = binascii.b2a_hex(uuid.uuid4().bytes) # token 生成方法
expires_days = 30 if self.current_user else None # token 有效期
self.set_cookie("_xsrf", token, expires_days=expires_days) # 更新 cookie
self._xsrf_token = token # 更新 token
return self._xsrf_token
def check_xsrf_cookie(self):
"""Verifies that the '_xsrf' cookie matches the '_xsrf' argument.
To prevent cross-site request forgery, we set an '_xsrf' cookie
and include the same '_xsrf' value as an argument with all POST
requests. If the two do not match, we reject the form submission
as a potential forgery.
See http://en.wikipedia.org/wiki/Cross-site_request_forgery
"""
if self.request.headers.get("X-Requested-With") == "XMLHttpRequest":
return
token = self.get_argument("_xsrf", None)
if not token:
raise HTTPError(403, "'_xsrf' argument missing from POST")
if self.xsrf_token != token:
raise HTTPError(403, "XSRF cookie does not match POST argument")
# 提交表单 - 预防 xsrf 攻击方法
def xsrf_form_html(self):
"""An HTML <input/> element to be included with all POST forms.
It defines the _xsrf input value, which we check on all POST
requests to prevent cross-site request forgery.
If you have set the 'xsrf_cookies' application setting, you must include this
HTML within all of your HTML forms.
See check_xsrf_cookie() above for more information.
"""
# 特别注意: 该 <表单提交> HTML字符串, 要含有 (name="_xsrf") 字段
return '<input type="hidden" name="_xsrf" value="' + \
escape.xhtml_escape(self.xsrf_token) + '"/>'
# 静态资源路径
def static_url(self, path):
"""Returns a static URL for the given relative static file path.
This method requires you set the 'static_path' setting in your
application (which specifies the root directory of your static
files).
We append ?v=<signature> to the returned URL, which makes our
static file handler set an infinite expiration header on the
returned content. The signature is based on the content of the
file.
If this handler has a "include_host" attribute, we include the
full host for every static URL, including the "http://". Set
this attribute for handlers whose output needs non-relative static
path names.
"""
self.require_setting("static_path", "static_url")
if not hasattr(RequestHandler, "_static_hashes"):
RequestHandler._static_hashes = {}
hashes = RequestHandler._static_hashes
if path not in hashes:
try:
f = open(os.path.join(
self.application.settings["static_path"], path))
hashes[path] = hashlib.md5(f.read()).hexdigest()
f.close()
except:
logging.error("Could not open static file %r", path)
hashes[path] = None
base = self.request.protocol + "://" + self.request.host \
if getattr(self, "include_host", False) else ""
static_url_prefix = self.settings.get('static_url_prefix', '/static/')
if hashes.get(path):
return base + static_url_prefix + path + "?v=" + hashes[path][:5]
else:
return base + static_url_prefix + path
# 异步回调
def async_callback(self, callback, *args, **kwargs):
"""Wrap callbacks with this if they are used on asynchronous requests.
Catches exceptions and properly finishes the request.
"""
if callback is None:
return None
if args or kwargs:
callback = functools.partial(callback, *args, **kwargs)
def wrapper(*args, **kwargs):
try:
return callback(*args, **kwargs)
except Exception, e:
if self._headers_written:
logging.error("Exception after headers written",
exc_info=True)
else:
self._handle_request_exception(e)
return wrapper
def require_setting(self, name, feature="this feature"):
"""Raises an exception if the given app setting is not defined."""
if not self.application.settings.get(name):
raise Exception("You must define the '%s' setting in your "
"application to use %s" % (name, feature))
def reverse_url(self, name, *args):
return self.application.reverse_url(name, *args)
# 关键代码:
#
def _execute(self, transforms, *args, **kwargs):
"""Executes this request with the given output transforms."""
self._transforms = transforms
try:
if self.request.method not in self.SUPPORTED_METHODS:
raise HTTPError(405)
# If XSRF cookies are turned on, reject form submissions without
# the proper cookie
if self.request.method == "POST" and \
self.application.settings.get("xsrf_cookies"):
self.check_xsrf_cookie() # 检查
self.prepare() # 注意调用时机
if not self._finished:
getattr(self, self.request.method.lower())(*args, **kwargs)
if self._auto_finish and not self._finished:
self.finish() # 关键调用
except Exception, e:
self._handle_request_exception(e)
def _generate_headers(self):
lines = [self.request.version + " " + str(self._status_code) + " " +
httplib.responses[self._status_code]]
lines.extend(["%s: %s" % (n, v) for n, v in self._headers.iteritems()])
for cookie_dict in getattr(self, "_new_cookies", []):
for cookie in cookie_dict.values():
lines.append("Set-Cookie: " + cookie.OutputString(None))
return "\r\n".join(lines) + "\r\n\r\n"
# 打印出错日志
def _log(self):
if self._status_code < 400:
log_method = logging.info
elif self._status_code < 500:
log_method = logging.warning
else:
log_method = logging.error
request_time = 1000.0 * self.request.request_time()
# 日志打印
log_method("%d %s %.2fms", self._status_code,
self._request_summary(), request_time)
def _request_summary(self):
return self.request.method + " " + self.request.uri + " (" + \
self.request.remote_ip + ")"
def _handle_request_exception(self, e):
if isinstance(e, HTTPError):
if e.log_message:
format = "%d %s: " + e.log_message
args = [e.status_code, self._request_summary()] + list(e.args)
logging.warning(format, *args)
if e.status_code not in httplib.responses:
logging.error("Bad HTTP status code: %d", e.status_code)
self.send_error(500, exception=e)
else:
self.send_error(e.status_code, exception=e)
else:
logging.error("Uncaught exception %s\n%r", self._request_summary(),
self.request, exc_info=e)
self.send_error(500, exception=e)
def _ui_module(self, name, module):
def render(*args, **kwargs):
if not hasattr(self, "_active_modules"):
self._active_modules = {}
if name not in self._active_modules:
self._active_modules[name] = module(self)
rendered = self._active_modules[name].render(*args, **kwargs)
return rendered
return render
def _ui_method(self, method):
return lambda *args, **kwargs: method(self, *args, **kwargs)
# 装饰器定义: 异步处理
def asynchronous(method):
"""Wrap request handler methods with this if they are asynchronous.
If this decorator is given, the response is not finished when the
method returns. It is up to the request handler to call self.finish()
to finish the HTTP request. Without this decorator, the request is
automatically finished when the get() or post() method returns.
class MyRequestHandler(web.RequestHandler):
@web.asynchronous
def get(self):
http = httpclient.AsyncHTTPClient()
http.fetch("http://friendfeed.com/", self._on_download)
def _on_download(self, response):
self.write("Downloaded!")
self.finish()
"""
@functools.wraps(method)
def wrapper(self, *args, **kwargs):
if self.application._wsgi:
raise Exception("@asynchronous is not supported for WSGI apps")
self._auto_finish = False
return method(self, *args, **kwargs)
return wrapper
# 装饰器定义: 去 斜杠(/)
def removeslash(method):
"""Use this decorator to remove trailing slashes from the request path.
For example, a request to '/foo/' would redirect to '/foo' with this
decorator. Your request handler mapping should use a regular expression
like r'/foo/*' in conjunction with using the decorator.
"""
@functools.wraps(method)
def wrapper(self, *args, **kwargs):
if self.request.path.endswith("/"): # 结尾含 /
if self.request.method == "GET":
uri = self.request.path.rstrip("/") # 过滤掉 /
if self.request.query:
uri += "?" + self.request.query
self.redirect(uri) # 重定向
return
raise HTTPError(404)
return method(self, *args, **kwargs)
return wrapper
# 装饰器定义: 添加 斜杠(/)
def addslash(method):
"""Use this decorator to add a missing trailing slash to the request path.
For example, a request to '/foo' would redirect to '/foo/' with this
decorator. Your request handler mapping should use a regular expression
like r'/foo/?' in conjunction with using the decorator.
"""
@functools.wraps(method)
def wrapper(self, *args, **kwargs):
if not self.request.path.endswith("/"):
if self.request.method == "GET":
uri = self.request.path + "/"
if self.request.query:
uri += "?" + self.request.query
self.redirect(uri) # 重定向
return
raise HTTPError(404)
return method(self, *args, **kwargs)
return wrapper
# ----------------------------------------------------------------
# 入口:
#
#
# ----------------------------------------------------------------
class Application(object):
"""A collection of request handlers that make up a web application.
Instances of this class are callable and can be passed directly to
HTTPServer to serve the application:
application = web.Application([
(r"/", MainPageHandler),
])
http_server = httpserver.HTTPServer(application)
http_server.listen(8080)
ioloop.IOLoop.instance().start()
The constructor for this class takes in a list of URLSpec objects
or (regexp, request_class) tuples. When we receive requests, we
iterate over the list in order and instantiate an instance of the
first request class whose regexp matches the request path.
Each tuple can contain an optional third element, which should be a
dictionary if it is present. That dictionary is passed as keyword
arguments to the contructor of the handler. This pattern is used
for the StaticFileHandler below:
application = web.Application([
(r"/static/(.*)", web.StaticFileHandler, {"path": "/var/www"}),
])
We support virtual hosts with the add_handlers method, which takes in
a host regular expression as the first argument:
application.add_handlers(r"www\.myhost\.com", [
(r"/article/([0-9]+)", ArticleHandler),
])
You can serve static files by sending the static_path setting as a
keyword argument. We will serve those files from the /static/ URI
(this is configurable with the static_url_prefix setting),
and we will serve /favicon.ico and /robots.txt from the same directory.
"""
def __init__(self, handlers=None, default_host="", transforms=None,
wsgi=False, **settings):
"""
:param handlers:
:param default_host:
:param transforms:
:param wsgi:
:param settings:
- gzip : 压缩
- static_path : 静态资源路径
- debug : 调试开关
:return:
"""
if transforms is None:
self.transforms = []
if settings.get("gzip"): # 配置选项
self.transforms.append(GZipContentEncoding)
self.transforms.append(ChunkedTransferEncoding)
else:
self.transforms = transforms
self.handlers = []
self.named_handlers = {}
self.default_host = default_host
self.settings = settings # 自定义配置项
self.ui_modules = {}
self.ui_methods = {}
self._wsgi = wsgi
self._load_ui_modules(settings.get("ui_modules", {}))
self._load_ui_methods(settings.get("ui_methods", {}))
if self.settings.get("static_path"): # 配置项中含: 静态资源路径
path = self.settings["static_path"]
handlers = list(handlers or [])
static_url_prefix = settings.get("static_url_prefix",
"/static/")
handlers = [
(re.escape(static_url_prefix) + r"(.*)", StaticFileHandler,
dict(path=path)),
(r"/(favicon\.ico)", StaticFileHandler, dict(path=path)),
(r"/(robots\.txt)", StaticFileHandler, dict(path=path)),
] + handlers
if handlers:
self.add_handlers(".*$", handlers) # 关键调用
# Automatically reload modified modules
if self.settings.get("debug") and not wsgi: # 调试模式时, 自动监测,并重启项目
import autoreload # tornado 自定义模块
autoreload.start()
def add_handlers(self, host_pattern, host_handlers):
"""Appends the given handlers to our handler list."""
if not host_pattern.endswith("$"):
host_pattern += "$"
handlers = []
# The handlers with the wildcard host_pattern are a special
# case - they're added in the constructor but should have lower
# precedence than the more-precise handlers added later.
# If a wildcard handler group exists, it should always be last
# in the list, so insert new groups just before it.
if self.handlers and self.handlers[-1][0].pattern == '.*$':
self.handlers.insert(-1, (re.compile(host_pattern), handlers)) # 正则匹配
else:
self.handlers.append((re.compile(host_pattern), handlers)) # 正则匹配
for spec in host_handlers:
if type(spec) is type(()): # 元组
assert len(spec) in (2, 3)
pattern = spec[0]
handler = spec[1]
if len(spec) == 3:
kwargs = spec[2]
else:
kwargs = {}
spec = URLSpec(pattern, handler, kwargs) # 关键调用
handlers.append(spec)
if spec.name:
if spec.name in self.named_handlers:
logging.warning(
"Multiple handlers named %s; replacing previous value",
spec.name)
self.named_handlers[spec.name] = spec
def add_transform(self, transform_class):
"""Adds the given OutputTransform to our transform list."""
self.transforms.append(transform_class)
def _get_host_handlers(self, request):
host = request.host.lower().split(':')[0]
for pattern, handlers in self.handlers:
if pattern.match(host):
return handlers
# Look for default host if not behind load balancer (for debugging)
if "X-Real-Ip" not in request.headers:
for pattern, handlers in self.handlers:
if pattern.match(self.default_host):
return handlers
return None
def _load_ui_methods(self, methods):
if type(methods) is types.ModuleType:
self._load_ui_methods(dict((n, getattr(methods, n))
for n in dir(methods)))
elif isinstance(methods, list):
for m in methods:
self._load_ui_methods(m)
else:
for name, fn in methods.iteritems():
if not name.startswith("_") and hasattr(fn, "__call__") \
and name[0].lower() == name[0]:
self.ui_methods[name] = fn
def _load_ui_modules(self, modules):
if type(modules) is types.ModuleType:
self._load_ui_modules(dict((n, getattr(modules, n))
for n in dir(modules)))
elif isinstance(modules, list):
for m in modules:
self._load_ui_modules(m)
else:
assert isinstance(modules, dict)
for name, cls in modules.iteritems():
try:
if issubclass(cls, UIModule):
self.ui_modules[name] = cls
except TypeError:
pass
# 关键定义: 类对象 --> 可调用对象
#
# 注意: 被调用时机
# - wsgi.py
# - WSGIApplication()
# - self.__call__() 方法
#
def __call__(self, request):
"""Called by HTTPServer to execute the request."""
transforms = [t(request) for t in self.transforms]
handler = None
args = []
kwargs = {}
handlers = self._get_host_handlers(request)
if not handlers:
handler = RedirectHandler(
request, "http://" + self.default_host + "/")
else:
for spec in handlers:
match = spec.regex.match(request.path)
if match:
# None-safe wrapper around urllib.unquote to handle
# unmatched optional groups correctly
def unquote(s):
if s is None: return s
return urllib.unquote(s)
handler = spec.handler_class(self, request, **spec.kwargs)
# Pass matched groups to the handler. Since
# match.groups() includes both named and unnamed groups,
# we want to use either groups or groupdict but not both.
kwargs = dict((k, unquote(v))
for (k, v) in match.groupdict().iteritems())
if kwargs:
args = []
else:
args = [unquote(s) for s in match.groups()]
break
if not handler:
handler = ErrorHandler(self, request, 404)
# In debug mode, re-compile templates and reload static files on every
# request so you don't need to restart to see changes
if self.settings.get("debug"):
if getattr(RequestHandler, "_templates", None):
map(lambda loader: loader.reset(),
RequestHandler._templates.values())
RequestHandler._static_hashes = {}
# 关键代码调用时机:
handler._execute(transforms, *args, **kwargs)
return handler
def reverse_url(self, name, *args):
"""Returns a URL path for handler named `name`
The handler must be added to the application as a named URLSpec
"""
if name in self.named_handlers:
return self.named_handlers[name].reverse(*args)
raise KeyError("%s not found in named urls" % name)
# ----------------------------------------------------
# 异常基类
# ----------------------------------------------------
class HTTPError(Exception):
"""An exception that will turn into an HTTP error response."""
def __init__(self, status_code, log_message=None, *args):
self.status_code = status_code
self.log_message = log_message
self.args = args
def __str__(self):
message = "HTTP %d: %s" % (
self.status_code, httplib.responses[self.status_code])
if self.log_message:
return message + " (" + (self.log_message % self.args) + ")"
else:
return message
# ----------------------------------------------------
# 扩展子类: 出错处理
# ----------------------------------------------------
class ErrorHandler(RequestHandler):
"""Generates an error response with status_code for all requests."""
def __init__(self, application, request, status_code):
RequestHandler.__init__(self, application, request)
self.set_status(status_code)
def prepare(self):
raise HTTPError(self._status_code)
# ----------------------------------------------------
# 扩展子类: 重定向处理
# ----------------------------------------------------
class RedirectHandler(RequestHandler):
"""Redirects the client to the given URL for all GET requests.
You should provide the keyword argument "url" to the handler, e.g.:
application = web.Application([
(r"/oldpath", web.RedirectHandler, {"url": "/newpath"}),
])
"""
def __init__(self, application, request, url, permanent=True):
RequestHandler.__init__(self, application, request)
self._url = url
self._permanent = permanent
# GET 请求,变成 重定向调用
def get(self):
self.redirect(self._url, permanent=self._permanent)
# ----------------------------------------------------
# 扩展子类: 静态资源处理
# 说明:
# - 覆写 get(), head() 方法
# ----------------------------------------------------
class StaticFileHandler(RequestHandler):
"""A simple handler that can serve static content from a directory.
To map a path to this handler for a static data directory /var/www,
you would add a line to your application like:
application = web.Application([
(r"/static/(.*)", web.StaticFileHandler, {"path": "/var/www"}),
])
The local root directory of the content should be passed as the "path"
argument to the handler.
To support aggressive browser caching, if the argument "v" is given
with the path, we set an infinite HTTP expiration header. So, if you
want browsers to cache a file indefinitely, send them to, e.g.,
/static/images/myimage.png?v=xxx.
"""
def __init__(self, application, request, path):
RequestHandler.__init__(self, application, request)
self.root = os.path.abspath(path) + os.path.sep
def head(self, path):
self.get(path, include_body=False)
def get(self, path, include_body=True):
abspath = os.path.abspath(os.path.join(self.root, path))
if not abspath.startswith(self.root):
raise HTTPError(403, "%s is not in root static directory", path)
if not os.path.exists(abspath):
raise HTTPError(404)
if not os.path.isfile(abspath):
raise HTTPError(403, "%s is not a file", path)
stat_result = os.stat(abspath)
modified = datetime.datetime.fromtimestamp(stat_result[stat.ST_MTIME])
self.set_header("Last-Modified", modified)
if "v" in self.request.arguments:
self.set_header("Expires", datetime.datetime.utcnow() + \
datetime.timedelta(days=365*10))
self.set_header("Cache-Control", "max-age=" + str(86400*365*10))
else:
self.set_header("Cache-Control", "public")
mime_type, encoding = mimetypes.guess_type(abspath)
if mime_type:
self.set_header("Content-Type", mime_type)
self.set_extra_headers(path)
# Check the If-Modified-Since, and don't send the result if the
# content has not been modified
ims_value = self.request.headers.get("If-Modified-Since")
if ims_value is not None:
date_tuple = email.utils.parsedate(ims_value)
if_since = datetime.datetime.fromtimestamp(time.mktime(date_tuple))
if if_since >= modified:
self.set_status(304)
return
if not include_body:
return
self.set_header("Content-Length", stat_result[stat.ST_SIZE])
file = open(abspath, "rb") # 读文件
try:
self.write(file.read()) # 写出
finally:
file.close()
def set_extra_headers(self, path):
"""For subclass to add extra headers to the response"""
pass
# ----------------------------------------------------
# 扩展子类: 包裹 另外一个 回调
# 说明:
# - 覆写 prepare() 预定义接口
# ----------------------------------------------------
class FallbackHandler(RequestHandler):
"""A RequestHandler that wraps another HTTP server callback.
The fallback is a callable object that accepts an HTTPRequest,
such as an Application or tornado.wsgi.WSGIContainer. This is most
useful to use both tornado RequestHandlers and WSGI in the same server.
Typical usage:
wsgi_app = tornado.wsgi.WSGIContainer(
django.core.handlers.wsgi.WSGIHandler())
application = tornado.web.Application([
(r"/foo", FooHandler),
(r".*", FallbackHandler, dict(fallback=wsgi_app),
])
"""
def __init__(self, app, request, fallback):
RequestHandler.__init__(self, app, request)
self.fallback = fallback
# 覆写接口
def prepare(self):
self.fallback(self.request)
self._finished = True
# ----------------------------------------------------
# 自定义基类: 输出转换
# 说明:
# - 2个子类
# - GZipContentEncoding()
# - ChunkedTransferEncoding()
# ----------------------------------------------------
class OutputTransform(object):
"""A transform modifies the result of an HTTP request (e.g., GZip encoding)
A new transform instance is created for every request. See the
ChunkedTransferEncoding example below if you want to implement a
new Transform.
"""
def __init__(self, request):
pass
def transform_first_chunk(self, headers, chunk, finishing):
return headers, chunk
def transform_chunk(self, chunk, finishing):
return chunk
class GZipContentEncoding(OutputTransform):
"""Applies the gzip content encoding to the response.
See http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.11
"""
CONTENT_TYPES = set([
"text/plain", "text/html", "text/css", "text/xml",
"application/x-javascript", "application/xml", "application/atom+xml",
"text/javascript", "application/json", "application/xhtml+xml"])
MIN_LENGTH = 5
def __init__(self, request):
self._gzipping = request.supports_http_1_1() and \
"gzip" in request.headers.get("Accept-Encoding", "")
def transform_first_chunk(self, headers, chunk, finishing):
if self._gzipping:
ctype = headers.get("Content-Type", "").split(";")[0]
self._gzipping = (ctype in self.CONTENT_TYPES) and \
(not finishing or len(chunk) >= self.MIN_LENGTH) and \
(finishing or "Content-Length" not in headers) and \
("Content-Encoding" not in headers)
if self._gzipping:
headers["Content-Encoding"] = "gzip"
self._gzip_value = cStringIO.StringIO()
self._gzip_file = gzip.GzipFile(mode="w", fileobj=self._gzip_value)
self._gzip_pos = 0
chunk = self.transform_chunk(chunk, finishing) # 关键调用
if "Content-Length" in headers:
headers["Content-Length"] = str(len(chunk))
return headers, chunk
def transform_chunk(self, chunk, finishing):
if self._gzipping:
self._gzip_file.write(chunk)<|fim▁hole|> if finishing:
self._gzip_file.close()
else:
self._gzip_file.flush()
chunk = self._gzip_value.getvalue()
if self._gzip_pos > 0:
chunk = chunk[self._gzip_pos:]
self._gzip_pos += len(chunk)
return chunk
class ChunkedTransferEncoding(OutputTransform):
"""Applies the chunked transfer encoding to the response.
See http://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html#sec3.6.1
"""
def __init__(self, request):
self._chunking = request.supports_http_1_1()
def transform_first_chunk(self, headers, chunk, finishing):
if self._chunking:
# No need to chunk the output if a Content-Length is specified
if "Content-Length" in headers or "Transfer-Encoding" in headers:
self._chunking = False
else:
headers["Transfer-Encoding"] = "chunked"
chunk = self.transform_chunk(chunk, finishing)
return headers, chunk
def transform_chunk(self, block, finishing):
if self._chunking:
# Don't write out empty chunks because that means END-OF-STREAM
# with chunked encoding
if block:
block = ("%x" % len(block)) + "\r\n" + block + "\r\n"
if finishing:
block += "0\r\n\r\n"
return block
# ----------------------------------------------------
# 装饰器定义: 权限认证
# 代码功能逻辑:
# - 若当前用户已登录, 正常调用
# - 若当前用户未登录
# - 若是 GET 请求,
# - 先获取 login(网站登录页面) URL
# - URL中, 记录 next 字段参数, 记录 未登录前 访问的页面
# - 重定向到 login 页面
# - 正确登录后, 会根据 next 参数, 自动跳转到 登录前的页面
# - 其他请求, 直接抛出 403 错误页面
# 批注:
# - 权限验证的典型实现, 值得学习
# - 代码很精简, 并不复杂
# ----------------------------------------------------
def authenticated(method):
"""Decorate methods with this to require that the user be logged in."""
@functools.wraps(method)
def wrapper(self, *args, **kwargs):
if not self.current_user: # 用户未登录
if self.request.method == "GET": # GET 请求 处理
url = self.get_login_url() # 获取登录页面的 URL
if "?" not in url:
# 关键处理:
# - 在 URL 中,添加 <next>字段 [格式: ?next=/xxxx.html]
# - 目的: 当用户成功登录后,返回到登录前,访问的页面
url += "?" + urllib.urlencode(dict(next=self.request.uri))
self.redirect(url) # 重定向
return
raise HTTPError(403) # 其他请求, 抛出 403 错误
return method(self, *args, **kwargs) # 用户已登录时, 正常调用
return wrapper
# ----------------------------------------------------
# 预定义接口类: UI模块 (处理 CSS,JS)
# 说明:
# - 预定义了一些接口方法,需要 子类化, 并覆写后,才可使用
# ----------------------------------------------------
class UIModule(object):
"""A UI re-usable, modular unit on a page.
UI modules often execute additional queries, and they can include
additional CSS and JavaScript that will be included in the output
page, which is automatically inserted on page render.
"""
def __init__(self, handler):
self.handler = handler
self.request = handler.request
self.ui = handler.ui
self.current_user = handler.current_user
self.locale = handler.locale
# 预定义接口: 必须要 覆写,才能用
def render(self, *args, **kwargs):
raise NotImplementedError()
def embedded_javascript(self):
"""Returns a JavaScript string that will be embedded in the page."""
return None
def javascript_files(self):
"""Returns a list of JavaScript files required by this module."""
return None
def embedded_css(self):
"""Returns a CSS string that will be embedded in the page."""
return None
def css_files(self):
"""Returns a list of CSS files required by this module."""
return None
def html_head(self):
"""Returns a CSS string that will be put in the <head/> element"""
return None
def html_body(self):
"""Returns an HTML string that will be put in the <body/> element"""
return None
def render_string(self, path, **kwargs):
return self.handler.render_string(path, **kwargs)
# ----------------------------------------------------
# 预定义接口类: URL 匹配
# 说明:
# - URL 与 handler 映射
# ----------------------------------------------------
class URLSpec(object):
"""Specifies mappings between URLs and handlers."""
def __init__(self, pattern, handler_class, kwargs={}, name=None):
"""Creates a URLSpec.
Parameters:
pattern: Regular expression to be matched. Any groups in the regex
will be passed in to the handler's get/post/etc methods as
arguments.
handler_class: RequestHandler subclass to be invoked.
kwargs (optional): A dictionary of additional arguments to be passed
to the handler's constructor.
name (optional): A name for this handler. Used by
Application.reverse_url.
"""
if not pattern.endswith('$'):
pattern += '$'
self.regex = re.compile(pattern) # 正则匹配
self.handler_class = handler_class
self.kwargs = kwargs
self.name = name
self._path, self._group_count = self._find_groups()
def _find_groups(self):
"""Returns a tuple (reverse string, group count) for a url.
For example: Given the url pattern /([0-9]{4})/([a-z-]+)/, this method
would return ('/%s/%s/', 2).
"""
pattern = self.regex.pattern
if pattern.startswith('^'):
pattern = pattern[1:]
if pattern.endswith('$'):
pattern = pattern[:-1]
if self.regex.groups != pattern.count('('):
# The pattern is too complicated for our simplistic matching,
# so we can't support reversing it.
return (None, None)
pieces = []
for fragment in pattern.split('('):
if ')' in fragment:
paren_loc = fragment.index(')')
if paren_loc >= 0:
pieces.append('%s' + fragment[paren_loc + 1:])
else:
pieces.append(fragment)
return (''.join(pieces), self.regex.groups)
def reverse(self, *args):
assert self._path is not None, \
"Cannot reverse url regex " + self.regex.pattern
assert len(args) == self._group_count, "required number of arguments "\
"not found"
if not len(args):
return self._path
return self._path % tuple([str(a) for a in args])
url = URLSpec
# ----------------------------------------------------
# UTF8 编码处理: 编码检查
# 代码逻辑:
# - 若 s 是 unicode 字符串
# - 使用 UTF8编码,并返回
# - 若 s 不是 字符串类型, 直接报错
# - 若 s 是 ASCII 字符串, 直接返回
# ----------------------------------------------------
def _utf8(s):
if isinstance(s, unicode):
return s.encode("utf-8")
assert isinstance(s, str)
return s
# ----------------------------------------------------
# unicode 编码处理: 编码检查
# 代码逻辑:
# - 基本类似 _utf8() 函数
# ----------------------------------------------------
def _unicode(s):
if isinstance(s, str):
try:
return s.decode("utf-8")
except UnicodeDecodeError:
raise HTTPError(400, "Non-utf8 argument")
assert isinstance(s, unicode)
return s
def _time_independent_equals(a, b):
if len(a) != len(b):
return False
result = 0
for x, y in zip(a, b):
result |= ord(x) ^ ord(y)
return result == 0
class _O(dict):
"""Makes a dictionary behave like an object."""
def __getattr__(self, name):
try:
return self[name]
except KeyError:
raise AttributeError(name)
def __setattr__(self, name, value):
self[name] = value<|fim▁end|> | |
<|file_name|>regress-208496-001.js<|end_file_name|><|fim▁begin|>/* -*- Mode: javascript; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
/*
*
* Date: 05 June 2003
* SUMMARY: Testing |with (f)| inside the definition of |function f()|
*
* See http://bugzilla.mozilla.org/show_bug.cgi?id=208496
*
*/
//-----------------------------------------------------------------------------
var gTestfile = 'regress-208496-001.js';
var UBound = 0;
var BUGNUMBER = 208496;
var summary = 'Testing |with (f)| inside the definition of |function f()|';
var status = '';
var statusitems = [];
var actual = '(TEST FAILURE)';
var actualvalues = [];
var expect= '';
var expectedvalues = [];
/*
* GLOBAL SCOPE
*/
function f(par)
{
var a = par;
with(f)
{
var b = par;
actual = b;
}
}
status = inSection(1);
f('abc'); // this sets |actual|
expect = 'abc';
addThis();
status = inSection(2);
f(111 + 222); // sets |actual|
expect = 333;
addThis();
/*
* EVAL SCOPE
*/
var s = '';
s += 'function F(par)';
s += '{';
s += ' var a = par;';
s += ' with(F)';
s += ' {';
s += ' var b = par;';
s += ' actual = b;';
s += ' }';
s += '}';
s += 'status = inSection(3);';
s += 'F("abc");'; // sets |actual|
s += 'expect = "abc";';
s += 'addThis();';
s += 'status = inSection(4);';
s += 'F(111 + 222);'; // sets |actual|
s += 'expect = 333;';
s += 'addThis();';
eval(s);
/*
* FUNCTION SCOPE
*/
function g(par)
{
// Add outer variables to complicate the scope chain -
var a = '(TEST FAILURE)';<|fim▁hole|> function h(par)
{
var a = par;
with(h)
{
var b = par;
actual = b;
}
}
}
status = inSection(5);
g('abc'); // sets |actual|
expect = 'abc';
addThis();
status = inSection(6);
g(111 + 222); // sets |actual|
expect = 333;
addThis();
//-----------------------------------------------------------------------------
test();
//-----------------------------------------------------------------------------
function addThis()
{
statusitems[UBound] = status;
actualvalues[UBound] = actual;
expectedvalues[UBound] = expect;
UBound++;
}
function test()
{
enterFunc('test');
printBugNumber(BUGNUMBER);
printStatus(summary);
for (var i=0; i<UBound; i++)
{
reportCompare(expectedvalues[i], actualvalues[i], statusitems[i]);
}
exitFunc ('test');
}<|fim▁end|> | var b = '(TEST FAILURE)';
h(par);
|
<|file_name|>test_init.py<|end_file_name|><|fim▁begin|>"""Tests for init functions."""
from datetime import timedelta
from zoneminder.zm import ZoneMinder
from homeassistant import config_entries
from homeassistant.components.zoneminder import const
from homeassistant.components.zoneminder.common import is_client_in_data
from homeassistant.config_entries import (
ENTRY_STATE_LOADED,
ENTRY_STATE_NOT_LOADED,
ENTRY_STATE_SETUP_RETRY,
)
from homeassistant.const import (
ATTR_ID,
ATTR_NAME,
CONF_HOST,
CONF_PASSWORD,
CONF_PATH,
CONF_SOURCE,
CONF_SSL,
CONF_USERNAME,
CONF_VERIFY_SSL,
)
from homeassistant.core import HomeAssistant
from homeassistant.setup import async_setup_component
import homeassistant.util.dt as dt_util
from tests.async_mock import MagicMock, patch
from tests.common import async_fire_time_changed
async def test_no_yaml_config(hass: HomeAssistant) -> None:
"""Test empty yaml config."""
with patch(
"homeassistant.components.zoneminder.common.ZoneMinder", autospec=ZoneMinder
) as zoneminder_mock:
zm_client: ZoneMinder = MagicMock(spec=ZoneMinder)
zm_client.get_zms_url.return_value = "http://host1/path_zms1"
zm_client.login.return_value = True
zm_client.get_monitors.return_value = []
zoneminder_mock.return_value = zm_client
hass_config = {const.DOMAIN: []}
await async_setup_component(hass, const.DOMAIN, hass_config)
await hass.async_block_till_done()
assert not hass.services.has_service(const.DOMAIN, const.SERVICE_SET_RUN_STATE)
async def test_yaml_config_import(hass: HomeAssistant) -> None:
"""Test yaml config import."""
with patch(
"homeassistant.components.zoneminder.common.ZoneMinder", autospec=ZoneMinder
) as zoneminder_mock:
zm_client: ZoneMinder = MagicMock(spec=ZoneMinder)
zm_client.get_zms_url.return_value = "http://host1/path_zms1"
zm_client.login.return_value = True
zm_client.get_monitors.return_value = []
zoneminder_mock.return_value = zm_client
hass_config = {const.DOMAIN: [{CONF_HOST: "host1"}]}
await async_setup_component(hass, const.DOMAIN, hass_config)
await hass.async_block_till_done()
assert hass.services.has_service(const.DOMAIN, const.SERVICE_SET_RUN_STATE)
async def test_load_call_service_and_unload(hass: HomeAssistant) -> None:
"""Test config entry load/unload and calling of service."""
with patch(
"homeassistant.components.zoneminder.common.ZoneMinder", autospec=ZoneMinder
) as zoneminder_mock:
zm_client: ZoneMinder = MagicMock(spec=ZoneMinder)
zm_client.get_zms_url.return_value = "http://host1/path_zms1"
zm_client.login.side_effect = [True, True, False, True]
zm_client.get_monitors.return_value = []
zm_client.is_available.return_value = True
zoneminder_mock.return_value = zm_client
await hass.config_entries.flow.async_init(
const.DOMAIN,
context={CONF_SOURCE: config_entries.SOURCE_USER},
data={
CONF_HOST: "host1",
CONF_USERNAME: "username1",
CONF_PASSWORD: "password1",
CONF_PATH: "path1",
const.CONF_PATH_ZMS: "path_zms1",
CONF_SSL: False,
CONF_VERIFY_SSL: True,
},
)
await hass.async_block_till_done()
config_entry = next(iter(hass.config_entries.async_entries(const.DOMAIN)), None)
assert config_entry
assert config_entry.state == ENTRY_STATE_SETUP_RETRY
assert not is_client_in_data(hass, "host1")
async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=10))
await hass.async_block_till_done()
assert config_entry.state == ENTRY_STATE_LOADED
assert is_client_in_data(hass, "host1")
assert hass.services.has_service(const.DOMAIN, const.SERVICE_SET_RUN_STATE)
await hass.services.async_call(
const.DOMAIN,
const.SERVICE_SET_RUN_STATE,
{ATTR_ID: "host1", ATTR_NAME: "away"},
)
await hass.async_block_till_done()
zm_client.set_active_state.assert_called_with("away")
await config_entry.async_unload(hass)
await hass.async_block_till_done()
assert config_entry.state == ENTRY_STATE_NOT_LOADED
assert not is_client_in_data(hass, "host1")<|fim▁hole|><|fim▁end|> | assert not hass.services.has_service(const.DOMAIN, const.SERVICE_SET_RUN_STATE) |
<|file_name|>apps.py<|end_file_name|><|fim▁begin|><|fim▁hole|>
class PhotosAppConfig(AppConfig):
name = 'livinglots_usercontent.photos'
def ready(self):
try:
from actstream import registry
from . import signals
registry.register(self.get_model('Photo'))
except ImportError:
# django-activity-stream is not installed and that's okay
pass<|fim▁end|> | from django.apps import AppConfig |
<|file_name|>views.py<|end_file_name|><|fim▁begin|># Create your views here.<|fim▁hole|>from django.contrib.auth.models import User
from repository.models import PisiPackage
def show_user (request, name=None):
user = get_object_or_404 (User, username=name)
context = { 'user' : user }
packages = None
try:
packages = PisiPackage.objects.filter(known_user=user).order_by("-date_updated")
count = len(packages)
total_packages = len(PisiPackage.objects.all())
pct = float (float(count) / (total_packages)) * 100
packages = packages[:7]
context = { 'user': user, 'package_count': count, 'package_ratio': pct, 'packages': packages}
except Exception, e:
print e
pass
return render (request, "profiles/individ.html", context)<|fim▁end|> | from django.http import HttpResponse
from django.shortcuts import render, get_object_or_404
|
<|file_name|>lasso_regression.py<|end_file_name|><|fim▁begin|>from sklearn.linear_model import Lasso
def get_lasso_prediction(train_data, train_truth, test_data, test_truth, alpha=1.0, iter_id=0):
clf = Lasso(alpha=alpha)
clf.fit(train_data, train_truth)
predicted = clf.predict(test_data)<|fim▁hole|><|fim▁end|> | return predicted.ravel() |
<|file_name|>AntiXSS.java<|end_file_name|><|fim▁begin|>/**
* Balero CMS Project: Proyecto 100% Mexicano de código libre.
* Página Oficial: http://www.balerocms.com
*
* @author Anibal Gomez <[email protected]>
* @copyright Copyright (C) 2015 Neblina Software. Derechos reservados.
* @license Licencia BSD; vea LICENSE.txt
*/
package com.neblina.balero.util;
import org.owasp.html.Sanitizers;
public class AntiXSS {
/**
* Sanitize common elements b, p, etc. img and a.
* @author Anibal Gomez
* @param input Unsafe Input
* @return Safe Output
*/<|fim▁hole|> public String blind(String input) {
org.owasp.html.PolicyFactory policy = Sanitizers.STYLES
.and(Sanitizers.FORMATTING)
.and(Sanitizers.IMAGES)
.and(Sanitizers.LINKS);
String output = policy.sanitize(input);
return output;
}
}<|fim▁end|> | |
<|file_name|>htmliframeelement.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::HTMLIFrameElementBinding;
use dom::bindings::utils::{DOMString, ErrorResult};
use dom::document::AbstractDocument;
use dom::element::HTMLIframeElementTypeId;
use dom::htmlelement::HTMLElement;
use dom::node::{AbstractNode, Node};
use dom::windowproxy::WindowProxy;
use extra::url::Url;
use servo_msg::constellation_msg::{PipelineId, SubpageId};
use std::ascii::StrAsciiExt;
enum SandboxAllowance {
AllowNothing = 0x00,
AllowSameOrigin = 0x01,
AllowTopNavigation = 0x02,
AllowForms = 0x04,
AllowScripts = 0x08,
AllowPointerLock = 0x10,
AllowPopups = 0x20
}
pub struct HTMLIFrameElement {
htmlelement: HTMLElement,
frame: Option<Url>,
size: Option<IFrameSize>,
sandbox: Option<u8>
}
pub struct IFrameSize {
pipeline_id: PipelineId,
subpage_id: SubpageId,
}
impl HTMLIFrameElement {
pub fn is_sandboxed(&self) -> bool {
self.sandbox.is_some()
}
}
impl HTMLIFrameElement {
pub fn new_inherited(localName: ~str, document: AbstractDocument) -> HTMLIFrameElement {
HTMLIFrameElement {
htmlelement: HTMLElement::new_inherited(HTMLIframeElementTypeId, localName, document),
frame: None,
size: None,
sandbox: None,
}
}
pub fn new(localName: ~str, document: AbstractDocument) -> AbstractNode {
let element = HTMLIFrameElement::new_inherited(localName, document);
Node::reflect_node(@mut element, document, HTMLIFrameElementBinding::Wrap)
}
}
impl HTMLIFrameElement {
pub fn Src(&self) -> DOMString {
~""
}
pub fn SetSrc(&mut self, _src: DOMString) -> ErrorResult {
Ok(())
}
pub fn Srcdoc(&self) -> DOMString {
~""
}
pub fn SetSrcdoc(&mut self, _srcdoc: DOMString) -> ErrorResult {
Ok(())
}
pub fn Name(&self) -> DOMString {
~""
}
pub fn SetName(&mut self, _name: DOMString) -> ErrorResult {
Ok(())
}
pub fn Sandbox(&self, _abstract_self: AbstractNode) -> DOMString {
match self.htmlelement.element.GetAttribute(~"sandbox") {
Some(s) => s.to_owned(),
None => ~"",
}
}
pub fn SetSandbox(&mut self, abstract_self: AbstractNode, sandbox: DOMString) {
self.htmlelement.element.SetAttribute(abstract_self, ~"sandbox", sandbox);
}
pub fn AfterSetAttr(&mut self, name: DOMString, value: DOMString) {
if "sandbox" == name {
let mut modes = AllowNothing as u8;
for word in value.split_iter(' ') {
// FIXME: Workaround for https://github.com/mozilla/rust/issues/10683
let word_lower = word.to_ascii_lower();
modes |= match word_lower.as_slice() {
"allow-same-origin" => AllowSameOrigin,
"allow-forms" => AllowForms,
"allow-pointer-lock" => AllowPointerLock,
"allow-popups" => AllowPopups,
"allow-scripts" => AllowScripts,
"allow-top-navigation" => AllowTopNavigation,
_ => AllowNothing
} as u8;
}
self.sandbox = Some(modes);
}
}
pub fn AllowFullscreen(&self) -> bool {
false
}
pub fn SetAllowFullscreen(&mut self, _allow: bool) -> ErrorResult {
Ok(())
}
pub fn Width(&self) -> DOMString {
~""
}
pub fn SetWidth(&mut self, _width: DOMString) -> ErrorResult {
Ok(())
}
pub fn Height(&self) -> DOMString {
~""
}
pub fn SetHeight(&mut self, _height: DOMString) -> ErrorResult {
Ok(())
}
pub fn GetContentDocument(&self) -> Option<AbstractDocument> {
None
}
pub fn GetContentWindow(&self) -> Option<@mut WindowProxy> {
None
}
pub fn Align(&self) -> DOMString {
~""
}
pub fn SetAlign(&mut self, _align: DOMString) -> ErrorResult {
Ok(())
}
pub fn Scrolling(&self) -> DOMString {
~""
}
pub fn SetScrolling(&mut self, _scrolling: DOMString) -> ErrorResult {
Ok(())
}
pub fn FrameBorder(&self) -> DOMString {
~""
}
pub fn SetFrameBorder(&mut self, _frameborder: DOMString) -> ErrorResult {
Ok(())
}
pub fn LongDesc(&self) -> DOMString {
~""
}
pub fn SetLongDesc(&mut self, _longdesc: DOMString) -> ErrorResult {
Ok(())
}
pub fn MarginHeight(&self) -> DOMString {
~""
}
pub fn SetMarginHeight(&mut self, _marginheight: DOMString) -> ErrorResult {
Ok(())<|fim▁hole|> }
pub fn SetMarginWidth(&mut self, _marginwidth: DOMString) -> ErrorResult {
Ok(())
}
pub fn GetSVGDocument(&self) -> Option<AbstractDocument> {
None
}
}<|fim▁end|> | }
pub fn MarginWidth(&self) -> DOMString {
~"" |
<|file_name|>8646922c8a04_change_default_pool_slots_to_1.py<|end_file_name|><|fim▁begin|>#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Change default pool_slots to 1
Revision ID: 8646922c8a04
Revises: 449b4072c2da
Create Date: 2021-02-23 23:19:22.409973
"""
import dill
import sqlalchemy as sa
from alembic import op
from sqlalchemy import Column, Float, Integer, PickleType, String
# revision identifiers, used by Alembic.
from sqlalchemy.ext.declarative import declarative_base
from airflow.models.base import COLLATION_ARGS
from airflow.utils.sqlalchemy import UtcDateTime
revision = '8646922c8a04'
down_revision = '449b4072c2da'
branch_labels = None
depends_on = None
Base = declarative_base()
BATCH_SIZE = 5000
ID_LEN = 250
class TaskInstance(Base): # type: ignore
"""Task instance class."""
__tablename__ = "task_instance"
task_id = Column(String(ID_LEN, **COLLATION_ARGS), primary_key=True)
dag_id = Column(String(ID_LEN, **COLLATION_ARGS), primary_key=True)
execution_date = Column(UtcDateTime, primary_key=True)
start_date = Column(UtcDateTime)
end_date = Column(UtcDateTime)
duration = Column(Float)
state = Column(String(20))
_try_number = Column('try_number', Integer, default=0)
max_tries = Column(Integer)
hostname = Column(String(1000))
unixname = Column(String(1000))
job_id = Column(Integer)
pool = Column(String(50), nullable=False)
pool_slots = Column(Integer, default=1)
queue = Column(String(256))
priority_weight = Column(Integer)
operator = Column(String(1000))
queued_dttm = Column(UtcDateTime)
queued_by_job_id = Column(Integer)
pid = Column(Integer)
executor_config = Column(PickleType(pickler=dill))
external_executor_id = Column(String(ID_LEN, **COLLATION_ARGS))<|fim▁hole|>def upgrade():
"""Change default pool_slots to 1 and make pool_slots not nullable"""
connection = op.get_bind()
sessionmaker = sa.orm.sessionmaker()
session = sessionmaker(bind=connection)
session.query(TaskInstance).filter(TaskInstance.pool_slots.is_(None)).update(
{TaskInstance.pool_slots: 1}, synchronize_session=False
)
session.commit()
with op.batch_alter_table("task_instance", schema=None) as batch_op:
batch_op.alter_column("pool_slots", existing_type=sa.Integer, nullable=False)
def downgrade():
"""Unapply Change default pool_slots to 1"""
with op.batch_alter_table("task_instance", schema=None) as batch_op:
batch_op.alter_column("pool_slots", existing_type=sa.Integer, nullable=True)<|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>VERSION = "0.12beta4"<|fim▁hole|><|fim▁end|> | VERSION_NAME = "Anderssen" |
<|file_name|>data_source_aws_lambda_layer_version_test.go<|end_file_name|><|fim▁begin|>package aws
import (
"fmt"
"testing"
"github.com/hashicorp/terraform/helper/acctest"
"github.com/hashicorp/terraform/helper/resource"
)
func TestAccDataSourceAWSLambdaLayerVersion_basic(t *testing.T) {
rName := acctest.RandomWithPrefix("tf-acc-test")
dataSourceName := "data.aws_lambda_layer_version.test"
resourceName := "aws_lambda_layer_version.test"
resource.ParallelTest(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
Steps: []resource.TestStep{
{
Config: testAccDataSourceAWSLambdaLayerVersionConfigBasic(rName),
Check: resource.ComposeAggregateTestCheckFunc(
resource.TestCheckResourceAttrPair(dataSourceName, "layer_name", resourceName, "layer_name"),
resource.TestCheckResourceAttrPair(dataSourceName, "version", resourceName, "version"),
resource.TestCheckResourceAttrPair(dataSourceName, "compatible_runtimes.%", resourceName, "compatible_runtimes.%s"),
resource.TestCheckResourceAttrPair(dataSourceName, "description", resourceName, "description"),
resource.TestCheckResourceAttrPair(dataSourceName, "license_info", resourceName, "license_info"),
resource.TestCheckResourceAttrPair(dataSourceName, "arn", resourceName, "arn"),
resource.TestCheckResourceAttrPair(dataSourceName, "layer_arn", resourceName, "layer_arn"),
resource.TestCheckResourceAttrPair(dataSourceName, "created_date", resourceName, "created_date"),
resource.TestCheckResourceAttrPair(dataSourceName, "source_code_hash", resourceName, "source_code_hash"),
resource.TestCheckResourceAttrPair(dataSourceName, "source_code_size", resourceName, "source_code_size"),
),
},<|fim▁hole|>func TestAccDataSourceAWSLambdaLayerVersion_version(t *testing.T) {
rName := acctest.RandomWithPrefix("tf-acc-test")
dataSourceName := "data.aws_lambda_layer_version.test"
resourceName := "aws_lambda_layer_version.test"
resource.ParallelTest(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
Steps: []resource.TestStep{
{
Config: testAccDataSourceAWSLambdaLayerVersionConfigVersion(rName),
Check: resource.ComposeAggregateTestCheckFunc(
resource.TestCheckResourceAttrPair(dataSourceName, "layer_name", resourceName, "layer_name"),
resource.TestCheckResourceAttrPair(dataSourceName, "version", resourceName, "version"),
),
},
},
})
}
func TestAccDataSourceAWSLambdaLayerVersion_runtime(t *testing.T) {
rName := acctest.RandomWithPrefix("tf-acc-test")
dataSourceName := "data.aws_lambda_layer_version.test"
resourceName := "aws_lambda_layer_version.test"
resource.ParallelTest(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
Steps: []resource.TestStep{
{
Config: testAccDataSourceAWSLambdaLayerVersionConfigRuntimes(rName),
Check: resource.ComposeAggregateTestCheckFunc(
resource.TestCheckResourceAttrPair(dataSourceName, "layer_name", resourceName, "layer_name"),
resource.TestCheckResourceAttrPair(dataSourceName, "version", resourceName, "version"),
),
},
},
})
}
func testAccDataSourceAWSLambdaLayerVersionConfigBasic(rName string) string {
return fmt.Sprintf(`
resource "aws_lambda_layer_version" "test" {
filename = "test-fixtures/lambdatest.zip"
layer_name = %[1]q
compatible_runtimes = ["nodejs8.10"]
}
data "aws_lambda_layer_version" "test" {
layer_name = "${aws_lambda_layer_version.test.layer_name}"
}
`, rName)
}
func testAccDataSourceAWSLambdaLayerVersionConfigVersion(rName string) string {
return fmt.Sprintf(`
resource "aws_lambda_layer_version" "test" {
filename = "test-fixtures/lambdatest.zip"
layer_name = %[1]q
compatible_runtimes = ["nodejs8.10"]
}
resource "aws_lambda_layer_version" "test_two" {
filename = "test-fixtures/lambdatest_modified.zip"
layer_name = %[1]q
compatible_runtimes = ["nodejs8.10"]
}
data "aws_lambda_layer_version" "test" {
layer_name = "${aws_lambda_layer_version.test_two.layer_name}"
version = "${aws_lambda_layer_version.test.version}"
}
`, rName)
}
func testAccDataSourceAWSLambdaLayerVersionConfigRuntimes(rName string) string {
return fmt.Sprintf(`
resource "aws_lambda_layer_version" "test" {
filename = "test-fixtures/lambdatest.zip"
layer_name = %[1]q
compatible_runtimes = ["go1.x"]
}
resource "aws_lambda_layer_version" "test_two" {
filename = "test-fixtures/lambdatest_modified.zip"
layer_name = "${aws_lambda_layer_version.test.layer_name}"
compatible_runtimes = ["nodejs8.10"]
}
data "aws_lambda_layer_version" "test" {
layer_name = "${aws_lambda_layer_version.test_two.layer_name}"
compatible_runtime = "go1.x"
}
`, rName)
}<|fim▁end|> | },
})
}
|
<|file_name|>pipeline_activity_crud.ts<|end_file_name|><|fim▁begin|>/*
* Copyright 2020 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import {ApiRequestBuilder, ApiResult, ApiVersion} from "helpers/api_request_builder";
import {SparkRoutes} from "helpers/spark_routes";
import {PipelineActivity, Stage} from "models/pipeline_activity/pipeline_activity";
import {ResultAwarePage} from "views/pages/page_operations";
export class PipelineActivityService {
private static API_VERSION_HEADER = ApiVersion.v1;
activities(pipelineName: string, start: number, filter: string, page: ResultAwarePage<PipelineActivity>): void {
let params: { [key: string]: string | number } = {pipelineName, start};
if (filter) {
params = {pipelineName, start: 0, perPage: 25, labelFilter: filter};
}
ApiRequestBuilder.GET(SparkRoutes.apiPipelineActivity(params), PipelineActivityService.API_VERSION_HEADER)
.then((result) => this.onResult(result, page));
}
runStage(stage: Stage) {
return ApiRequestBuilder.POST(
SparkRoutes.runStage(stage.pipelineName(), stage.pipelineCounter(), stage.stageName()),
PipelineActivityService.API_VERSION_HEADER);
}<|fim▁hole|> cancelStageInstance(stage: Stage) {
return ApiRequestBuilder.POST(
SparkRoutes.cancelStageInstance(stage.pipelineName(), stage.pipelineCounter(), stage.stageName(), stage.stageCounter()),
PipelineActivityService.API_VERSION_HEADER
);
}
run(pipelineName: string) {
return ApiRequestBuilder.POST(SparkRoutes.pipelineTriggerPath(pipelineName), PipelineActivityService.API_VERSION_HEADER);
}
pausePipeline(pipelineName: string, pauseCause: string) {
return ApiRequestBuilder.POST(SparkRoutes.pipelinePausePath(pipelineName),
PipelineActivityService.API_VERSION_HEADER,
{payload: {pause_cause: pauseCause}});
}
unpausePipeline(pipelineName: string) {
return ApiRequestBuilder.POST(SparkRoutes.pipelineUnpausePath(pipelineName), PipelineActivityService.API_VERSION_HEADER);
}
commentOnPipelineRun(pipelineName: string, labelOrCounter: string | number, comment: string) {
return ApiRequestBuilder.POST(SparkRoutes.commentOnPipelineInstance(pipelineName, labelOrCounter),
PipelineActivityService.API_VERSION_HEADER,
{payload: {comment}});
}
private onResult(result: ApiResult<string>, page: ResultAwarePage<PipelineActivity>) {
return result.do((successResponse) => page.onSuccess(PipelineActivity.fromJSON(JSON.parse(successResponse.body))),
(errorResponse) => page.onFailure(errorResponse.message));
}
}<|fim▁end|> | |
<|file_name|>AssFilterAutoLoader.cpp<|end_file_name|><|fim▁begin|>/*
* Copyright(C) 2016-2017 Blitzker
*
* This program is free software : you can redistribute it and / or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program.If not, see <http://www.gnu.org/licenses/>.
*/
#include "stdafx.h"
#include "AssDebug.h"
#include "AssFilter.h"
#include "AssFilterAutoLoader.h"
#include "registry.h"
AFAutoLoaderDummyInputPin::AFAutoLoaderDummyInputPin(AssFilterAutoLoader *pFilter, CCritSec *pLock, HRESULT *pResult, LPCWSTR pName)
: CBaseInputPin(NAME("DummyInputPin"), pFilter, pLock, pResult, pName)
, m_filter(pFilter)
{
}
HRESULT AFAutoLoaderDummyInputPin::CheckMediaType(const CMediaType* mt)
{
return m_filter->CheckInput(mt);
}
AssFilterAutoLoader::AssFilterAutoLoader(LPUNKNOWN pUnk, HRESULT* pResult)
: CBaseFilter(NAME("AssFilterAutoLoader"), pUnk, &m_pLock, __uuidof(AssFilterAutoLoader))
{
#ifdef DEBUG
DbgSetModuleLevel(LOG_ERROR, 10);
DbgSetModuleLevel(LOG_LOCKING, 10);
DbgSetModuleLevel(LOG_TRACE, 10);
DbgSetLogFileDesktop(L"AssFilterDbg.Log");
#endif
m_pin = std::make_unique<AFAutoLoaderDummyInputPin>(this, &m_pLock, pResult, L"");
m_loaded = false;
}
AssFilterAutoLoader::~AssFilterAutoLoader()
{
}
CUnknown* WINAPI AssFilterAutoLoader::CreateInstance(LPUNKNOWN pUnk, HRESULT* pResult)
{
try
{
return new AssFilterAutoLoader(pUnk, pResult);
}
catch (std::bad_alloc&)
{
if (pResult)
*pResult = E_OUTOFMEMORY;
}
return nullptr;
}
CBasePin* AssFilterAutoLoader::GetPin(int n)
{
if (n == 0)
return m_pin.get();
return NULL;
}
int AssFilterAutoLoader::GetPinCount()
{
return 1;
}
STDMETHODIMP AssFilterAutoLoader::JoinFilterGraph(IFilterGraph* pGraph, LPCWSTR pName)
{
m_loaded = false;
if(pGraph)
{
if (pName)
{
DbgLog((LOG_TRACE, 1, L"AssFilterAutoLoader::JoinFilterGraph() -> %s joined the graph!", pName));
}
IEnumFiltersPtr pEnumFilters;
if (SUCCEEDED(pGraph->EnumFilters(&pEnumFilters)))
{
for (IBaseFilterPtr pBaseFilter; pEnumFilters->Next(1, &pBaseFilter, 0) == S_OK; pBaseFilter = NULL)
{
//FILTER_INFO pInfo;
//pBaseFilter->QueryFilterInfo(&pInfo);
//DbgLog((LOG_TRACE, 1, L"AssFilterAutoLoader::JoinFilterGraph() -> Filter name: %s", pInfo.achName));
//if (pInfo.pGraph != NULL)
// pInfo.pGraph->Release();
if (pBaseFilter != (IBaseFilterPtr)this)
{
CLSID clsid;
pBaseFilter->GetClassID(&clsid);
if (clsid == __uuidof(AssFilterAutoLoader))
{
DbgLog((LOG_TRACE, 1, L"AssFilterAutoLoader::JoinFilterGraph() -> AssFilterAutoLoader already in the graph"));
return E_FAIL;
}
if (clsid == __uuidof(AssFilter))
{
DbgLog((LOG_TRACE, 1, L"AssFilterAutoLoader::JoinFilterGraph() -> AssFilter already in the graph"));
return E_FAIL;
}
}
}
}
}
return __super::JoinFilterGraph(pGraph, pName);
}
STDMETHODIMP AssFilterAutoLoader::QueryFilterInfo(FILTER_INFO* pInfo)
{
CheckPointer(pInfo, E_POINTER);
ValidateReadWritePtr(pInfo, sizeof(FILTER_INFO));
HRESULT hr = __super::QueryFilterInfo(pInfo);
if (SUCCEEDED(hr))
{
wcscpy_s(pInfo->achName, _countof(pInfo->achName) - 1, L"AssFilterModAutoLoader");
}
return hr;
}
bool AssFilterAutoLoader::AutoLoad(IFilterGraph* pGraph)
{
// Find subtitle pin (MEDIASUBTYPE_ASS or MEDIASUBTYPE_UTF8) on the graph splitter
bool have_subtitle_pin = false;
IEnumFiltersPtr pEnumFilters;
if (SUCCEEDED(pGraph->EnumFilters(&pEnumFilters)))
{
DbgLog((LOG_TRACE, 1, L"AssFilterAutoLoader::AutoLoad -> Succeeded EnumFilters"));
for (IBaseFilterPtr pBaseFilter; pEnumFilters->Next(1, &pBaseFilter, 0) == S_OK; pBaseFilter = NULL)
{
IFileSourceFilterPtr pFSF;
if (SUCCEEDED(pBaseFilter->QueryInterface(IID_PPV_ARGS(&pFSF))))
{
DbgLog((LOG_TRACE, 1, L"AssFilterAutoLoader::AutoLoad -> Succeeded QueryInterface"));
IEnumPinsPtr pEnumPins;
if (SUCCEEDED(pBaseFilter->EnumPins(&pEnumPins)))
{
DbgLog((LOG_TRACE, 1, L"AssFilterAutoLoader::AutoLoad -> Succeeded EnumPins"));
for (IPinPtr pPin; pEnumPins->Next(1, &pPin, 0) == S_OK; pPin = NULL)
{
IEnumMediaTypesPtr pEnumMediaTypes;
if (SUCCEEDED(pPin->EnumMediaTypes(&pEnumMediaTypes)))
{
DbgLog((LOG_TRACE, 1, L"AssFilterAutoLoader::AutoLoad -> Succeeded EnumMediaTypes"));
AM_MEDIA_TYPE* pMediaType = NULL;
for (; pEnumMediaTypes->Next(1, &pMediaType, NULL) == S_OK; DeleteMediaType(pMediaType), pMediaType = NULL)
{
if (pMediaType->majortype == MEDIATYPE_Subtitle && ((pMediaType->subtype == MEDIASUBTYPE_ASS) ||
(pMediaType->subtype == MEDIASUBTYPE_UTF8) || (pMediaType->subtype == MEDIASUBTYPE_VOBSUB) ||
(pMediaType->subtype == MEDIASUBTYPE_HDMVSUB)))
{
DbgLog((LOG_TRACE, 1, L"AssFilterAutoLoader::AutoLoad -> Found subtitle pin on source filter"));
have_subtitle_pin = true;
break;
}
}
if (pMediaType)
DeleteMediaType(pMediaType);
}
if (have_subtitle_pin)
break;
}
}
}
if (have_subtitle_pin)
break;
}
}
return !have_subtitle_pin;
}
bool AssFilterAutoLoader::DisableAutoLoad()
{
HRESULT hr;
BOOL bFlag;
CRegistry reg = CRegistry(HKEY_CURRENT_USER, ASSFILTER_REGISTRY_KEY, hr, TRUE);
if (SUCCEEDED(hr))
{
bFlag = reg.ReadBOOL(L"DisableAutoLoad", hr);
if (!SUCCEEDED(hr))
return false;
}
return bFlag ? true : false;
}
HRESULT AssFilterAutoLoader::CheckInput(const CMediaType* mt)
{
HRESULT hr = NOERROR;
if (!m_loaded)
{
m_loaded = true;
#ifdef DEBUG
if (mt->majortype==MEDIATYPE_Video)
{
DbgLog((LOG_TRACE, 1, L"AssFilterAutoLoader::CheckInput() -> MEDIATYPE_Video"));
}
else if (mt->majortype==MEDIATYPE_Audio)
{
DbgLog((LOG_TRACE, 1, L"AssFilterAutoLoader::CheckInput() -> MEDIATYPE_Audio"));
}
else if (mt->majortype==MEDIATYPE_Subtitle)
{
DbgLog((LOG_TRACE, 1, L"AssFilterAutoLoader::CheckInput() -> MEDIATYPE_Subtitle"));
}
else
{
DbgLog((LOG_TRACE, 1, L"AssFilterAutoLoader::CheckInput() -> Other MEDIATYPE"));
}
#endif // DEBUG
if (mt->majortype == MEDIATYPE_Audio ||
mt->majortype == MEDIATYPE_Subtitle ||
mt->majortype == MEDIATYPE_Video)
{
if (AutoLoad(m_pGraph) && !DisableAutoLoad())
{
DbgLog((LOG_TRACE, 1, L"AssFilterAutoLoader::CheckInput() -> Autoload"));
IBaseFilterPtr filter;
hr = CoCreateInstance(__uuidof(AssFilter), NULL, CLSCTX_INPROC_SERVER, IID_PPV_ARGS(&filter));
if (FAILED(hr))
{
DbgLog((LOG_TRACE, 1, L"AssFilterAutoLoader::CheckInput -> Failed to create AssFilterMod."));
return E_FAIL;
}
hr = m_pGraph->AddFilter(filter, L"AssFilterMod(AutoLoad)");
if (FAILED(hr))
{
DbgLog((LOG_TRACE, 1, L"AssFilterAutoLoader::CheckInput -> Failed to AddFilter."));
return E_FAIL;
}
if (mt->majortype == MEDIATYPE_Subtitle)
{
IGraphConfigPtr graph;
if (SUCCEEDED(filter->QueryInterface(IID_PPV_ARGS(&graph))))
{
hr = graph->AddFilterToCache(filter);
if (FAILED(hr))<|fim▁hole|>
return E_FAIL;
}
}
}
}
}
}
return E_FAIL;
}<|fim▁end|> | {
DbgLog((LOG_TRACE, 1, L"AssFilterAutoLoader::CheckInput -> Failed to add filter to cache.")); |
<|file_name|>convert_to_q.py<|end_file_name|><|fim▁begin|># Mantid Repository : https://github.com/mantidproject/mantid
#
# Copyright © 2018 ISIS Rutherford Appleton Laboratory UKRI,
# NScD Oak Ridge National Laboratory, European Spallation Source
# & Institut Laue - Langevin
# SPDX - License - Identifier: GPL - 3.0 +
# pylint: disable=too-few-public-methods<|fim▁hole|>
"""State describing the conversion to momentum transfer"""
from __future__ import (absolute_import, division, print_function)
import json
import copy
from sans.state.state_base import (StateBase, rename_descriptor_names, BoolParameter, PositiveFloatParameter,
ClassTypeParameter, StringParameter)
from sans.common.enums import (ReductionDimensionality, RangeStepType, SANSFacility)
from sans.state.state_functions import (is_pure_none_or_not_none, is_not_none_and_first_larger_than_second,
validation_message)
from sans.state.automatic_setters import (automatic_setters)
# ----------------------------------------------------------------------------------------------------------------------
# State
# ----------------------------------------------------------------------------------------------------------------------
@rename_descriptor_names
class StateConvertToQ(StateBase):
reduction_dimensionality = ClassTypeParameter(ReductionDimensionality)
use_gravity = BoolParameter()
gravity_extra_length = PositiveFloatParameter()
radius_cutoff = PositiveFloatParameter()
wavelength_cutoff = PositiveFloatParameter()
# 1D settings
q_min = PositiveFloatParameter()
q_max = PositiveFloatParameter()
q_1d_rebin_string = StringParameter()
# 2D settings
q_xy_max = PositiveFloatParameter()
q_xy_step = PositiveFloatParameter()
q_xy_step_type = ClassTypeParameter(RangeStepType)
# -----------------------
# Q Resolution specific
# ---------------------
use_q_resolution = BoolParameter()
q_resolution_collimation_length = PositiveFloatParameter()
q_resolution_delta_r = PositiveFloatParameter()
moderator_file = StringParameter()
# Circular aperture settings
q_resolution_a1 = PositiveFloatParameter()
q_resolution_a2 = PositiveFloatParameter()
# Rectangular aperture settings
q_resolution_h1 = PositiveFloatParameter()
q_resolution_h2 = PositiveFloatParameter()
q_resolution_w1 = PositiveFloatParameter()
q_resolution_w2 = PositiveFloatParameter()
def __init__(self):
super(StateConvertToQ, self).__init__()
self.reduction_dimensionality = ReductionDimensionality.OneDim
self.use_gravity = False
self.gravity_extra_length = 0.0
self.use_q_resolution = False
self.radius_cutoff = 0.0
self.wavelength_cutoff = 0.0
def validate(self):
is_invalid = {}
# 1D Q settings
if not is_pure_none_or_not_none([self.q_min, self.q_max]):
entry = validation_message("The q boundaries for the 1D reduction are inconsistent.",
"Make sure that both q boundaries are set (or none).",
{"q_min": self.q_min,
"q_max": self.q_max})
is_invalid.update(entry)
if is_not_none_and_first_larger_than_second([self.q_min, self.q_max]):
entry = validation_message("Incorrect q bounds for 1D reduction.",
"Make sure that the lower q bound is smaller than the upper q bound.",
{"q_min": self.q_min,
"q_max": self.q_max})
is_invalid.update(entry)
if self.reduction_dimensionality is ReductionDimensionality.OneDim:
if self.q_min is None or self.q_max is None:
entry = validation_message("Q bounds not set for 1D reduction.",
"Make sure to set the q boundaries when using a 1D reduction.",
{"q_min": self.q_min,
"q_max": self.q_max})
is_invalid.update(entry)
if self.q_1d_rebin_string is not None:
if self.q_1d_rebin_string == "":
entry = validation_message("Q rebin string does not seem to be valid.",
"Make sure to provide a valid rebin string",
{"q_1d_rebin_string": self.q_1d_rebin_string})
is_invalid.update(entry)
elif not is_valid_rebin_string(self.q_1d_rebin_string):
entry = validation_message("Q rebin string does not seem to be valid.",
"Make sure to provide a valid rebin string",
{"q_1d_rebin_string": self.q_1d_rebin_string})
is_invalid.update(entry)
# QXY settings
if self.reduction_dimensionality is ReductionDimensionality.TwoDim:
if self.q_xy_max is None or self.q_xy_step is None:
entry = validation_message("Q bounds not set for 2D reduction.",
"Make sure that the q_max value bound and the step for the 2D reduction.",
{"q_xy_max": self.q_xy_max,
"q_xy_step": self.q_xy_step})
is_invalid.update(entry)
# Q Resolution settings
if self.use_q_resolution:
if not is_pure_none_or_not_none([self.q_resolution_a1, self.q_resolution_a2]):
entry = validation_message("Inconsistent circular geometry.",
"Make sure that both diameters for the circular apertures are set.",
{"q_resolution_a1": self.q_resolution_a1,
"q_resolution_a2": self.q_resolution_a2})
is_invalid.update(entry)
if not is_pure_none_or_not_none([self.q_resolution_h1, self.q_resolution_h2, self.q_resolution_w1,
self.q_resolution_w2]):
entry = validation_message("Inconsistent rectangular geometry.",
"Make sure that both diameters for the circular apertures are set.",
{"q_resolution_h1": self.q_resolution_h1,
"q_resolution_h2": self.q_resolution_h2,
"q_resolution_w1": self.q_resolution_w1,
"q_resolution_w2": self.q_resolution_w2})
is_invalid.update(entry)
if all(element is None for element in [self.q_resolution_a1, self.q_resolution_a2, self.q_resolution_w1,
self.q_resolution_w2, self.q_resolution_h1, self.q_resolution_h2]):
entry = validation_message("Aperture is undefined.",
"Make sure that you set the geometry for a circular or a "
"rectangular aperture.",
{"q_resolution_a1": self.q_resolution_a1,
"q_resolution_a2": self.q_resolution_a2,
"q_resolution_h1": self.q_resolution_h1,
"q_resolution_h2": self.q_resolution_h2,
"q_resolution_w1": self.q_resolution_w1,
"q_resolution_w2": self.q_resolution_w2})
is_invalid.update(entry)
if self.moderator_file is None:
entry = validation_message("Missing moderator file.",
"Make sure to specify a moderator file when using q resolution.",
{"moderator_file": self.moderator_file})
is_invalid.update(entry)
is_invalid.update({"moderator_file": "A moderator file is required for the q resolution calculation."})
if is_invalid:
raise ValueError("StateMoveDetectorISIS: The provided inputs are illegal. "
"Please see: {0}".format(json.dumps(is_invalid)))
# ----------------------------------------------------------------------------------------------------------------------
# Builder
# ----------------------------------------------------------------------------------------------------------------------
class StateConvertToQBuilder(object):
@automatic_setters(StateConvertToQ)
def __init__(self):
super(StateConvertToQBuilder, self).__init__()
self.state = StateConvertToQ()
def build(self):
self.state.validate()
return copy.copy(self.state)
# ------------------------------------------
# Factory method for StateConvertToQBuilder
# ------------------------------------------
def get_convert_to_q_builder(data_info):
# The data state has most of the information that we require to define the q conversion.
# For the factory method, only the facility/instrument is of relevance.
facility = data_info.facility
if facility is SANSFacility.ISIS:
return StateConvertToQBuilder()
else:
raise NotImplementedError("StateConvertToQBuilder: Could not find any valid save builder for the "
"specified StateData object {0}".format(str(data_info)))
# -------------------------------------------
# Free functions
# -------------------------------------------
def is_valid_rebin_string(rebin_string):
is_valid = True
try:
values = [float(el) for el in rebin_string.split(",")]
if len(values) < 2:
is_valid = False
elif len(values) == 2:
if values[0] > values[1]:
is_valid = False
elif len(values) % 2 == 1: # odd number of entries
step_points = values[::2]
if not is_increasing(step_points):
is_valid = False
else:
is_valid = False
except: # noqa
is_valid = False
return is_valid
def is_increasing(step_points):
return all(el1 <= el2 for el1, el2 in zip(step_points, step_points[1:]))<|fim▁end|> | |
<|file_name|>positioned.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::cell::ArcRefCell;
use crate::context::LayoutContext;
use crate::dom_traversal::{Contents, NodeAndStyleInfo, NodeExt};
use crate::formatting_contexts::IndependentFormattingContext;
use crate::fragments::{BoxFragment, CollapsedBlockMargins, Fragment};
use crate::geom::flow_relative::{Rect, Sides, Vec2};
use crate::geom::{LengthOrAuto, LengthPercentageOrAuto};
use crate::sizing::ContentSizesRequest;
use crate::style_ext::{ComputedValuesExt, DisplayInside};
use crate::{ContainingBlock, DefiniteContainingBlock};
use rayon::iter::{IntoParallelRefIterator, ParallelExtend};
use rayon_croissant::ParallelIteratorExt;
use servo_arc::Arc;
use style::computed_values::position::T as Position;
use style::properties::ComputedValues;
use style::values::computed::{Length, LengthPercentage};
use style::values::specified::text::TextDecorationLine;
use style::Zero;
#[derive(Debug, Serialize)]
pub(crate) struct AbsolutelyPositionedBox {
pub contents: IndependentFormattingContext,
}
pub(crate) struct PositioningContext {
for_nearest_positioned_ancestor: Option<Vec<HoistedAbsolutelyPositionedBox>>,
// For nearest `containing block for all descendants` as defined by the CSS transforms
// spec.
// https://www.w3.org/TR/css-transforms-1/#containing-block-for-all-descendants
for_nearest_containing_block_for_all_descendants: Vec<HoistedAbsolutelyPositionedBox>,
}
pub(crate) struct HoistedAbsolutelyPositionedBox {
absolutely_positioned_box: Arc<AbsolutelyPositionedBox>,
/// The rank of the child from which this absolutely positioned fragment
/// came from, when doing the layout of a block container. Used to compute
/// static positions when going up the tree.
pub(crate) tree_rank: usize,
box_offsets: Vec2<AbsoluteBoxOffsets>,
/// A reference to a Fragment which is shared between this `HoistedAbsolutelyPositionedBox`
/// and its placeholder `AbsoluteOrFixedPositionedFragment` in the original tree position.
/// This will be used later in order to paint this hoisted box in tree order.
pub fragment: ArcRefCell<Option<ArcRefCell<Fragment>>>,
}
#[derive(Clone, Debug)]
pub(crate) enum AbsoluteBoxOffsets {
StaticStart {
start: Length,
},
Start {
start: LengthPercentage,
},
End {
end: LengthPercentage,
},
Both {
start: LengthPercentage,
end: LengthPercentage,
},
}
impl AbsolutelyPositionedBox {
pub fn construct<'dom>(
context: &LayoutContext,
node_info: &NodeAndStyleInfo<impl NodeExt<'dom>>,
display_inside: DisplayInside,
contents: Contents,
) -> Self {
// "Shrink-to-fit" in https://drafts.csswg.org/css2/visudet.html#abs-non-replaced-width
let content_sizes = ContentSizesRequest::inline_if(
// If inline-size is non-auto, that value is used without shrink-to-fit
!node_info.style.inline_size_is_length() &&
// If it is, then the only case where shrink-to-fit is *not* used is
// if both offsets are non-auto, leaving inline-size as the only variable
// in the constraint equation.
!node_info.style.inline_box_offsets_are_both_non_auto(),
);
Self {
contents: IndependentFormattingContext::construct(
context,
node_info,
display_inside,
contents,
content_sizes,
// Text decorations are not propagated to any out-of-flow descendants.
TextDecorationLine::NONE,
),
}
}
pub(crate) fn to_hoisted(
self_: Arc<Self>,
initial_start_corner: Vec2<Length>,
tree_rank: usize,
) -> HoistedAbsolutelyPositionedBox {
fn absolute_box_offsets(
initial_static_start: Length,
start: LengthPercentageOrAuto,
end: LengthPercentageOrAuto,
) -> AbsoluteBoxOffsets {
match (start.non_auto(), end.non_auto()) {
(None, None) => AbsoluteBoxOffsets::StaticStart {
start: initial_static_start,
},
(Some(start), Some(end)) => AbsoluteBoxOffsets::Both {
start: start.clone(),
end: end.clone(),
},
(None, Some(end)) => AbsoluteBoxOffsets::End { end: end.clone() },
(Some(start), None) => AbsoluteBoxOffsets::Start {
start: start.clone(),
},
}
}
let box_offsets = self_.contents.style.box_offsets();
HoistedAbsolutelyPositionedBox {
tree_rank,
box_offsets: Vec2 {
inline: absolute_box_offsets(
initial_start_corner.inline,
box_offsets.inline_start,
box_offsets.inline_end,
),
block: absolute_box_offsets(
initial_start_corner.block,
box_offsets.block_start,
box_offsets.block_end,
),
},
fragment: ArcRefCell::new(None),
absolutely_positioned_box: self_,
}
}
}
impl PositioningContext {
pub(crate) fn new_for_containing_block_for_all_descendants() -> Self {
Self {
for_nearest_positioned_ancestor: None,
for_nearest_containing_block_for_all_descendants: Vec::new(),
}
}
pub(crate) fn new_for_rayon(collects_for_nearest_positioned_ancestor: bool) -> Self {
Self {
for_nearest_positioned_ancestor: if collects_for_nearest_positioned_ancestor {
Some(Vec::new())
} else {
None
},
for_nearest_containing_block_for_all_descendants: Vec::new(),
}
}
pub(crate) fn collects_for_nearest_positioned_ancestor(&self) -> bool {
self.for_nearest_positioned_ancestor.is_some()
}
pub(crate) fn new_for_style(style: &ComputedValues) -> Option<Self> {
if style.establishes_containing_block_for_all_descendants() {
Some(Self::new_for_containing_block_for_all_descendants())
} else if style.establishes_containing_block() {
Some(Self {
for_nearest_positioned_ancestor: Some(Vec::new()),
for_nearest_containing_block_for_all_descendants: Vec::new(),
})
} else {
None
}
}
/// Given `fragment_layout_fn`, a closure which lays out a fragment in a provided
/// `PositioningContext`, create a new positioning context if necessary for the fragment and
/// lay out the fragment and all its children. Returns the newly created `BoxFragment`.
pub(crate) fn layout_maybe_position_relative_fragment(
&mut self,
layout_context: &LayoutContext,
containing_block: &ContainingBlock,
style: &ComputedValues,
fragment_layout_fn: impl FnOnce(&mut Self) -> BoxFragment,
) -> BoxFragment {
// Try to create a context, but if one isn't necessary, simply create the fragment
// using the given closure and the current `PositioningContext`.
let mut new_context = match Self::new_for_style(style) {
Some(new_context) => new_context,
None => return fragment_layout_fn(self),
};
let mut new_fragment = fragment_layout_fn(&mut new_context);
new_context.layout_collected_children(layout_context, &mut new_fragment);
// If the new context has any hoisted boxes for the nearest containing block for
// all descendants than collect them and pass them up the tree.
vec_append_owned(
&mut self.for_nearest_containing_block_for_all_descendants,
new_context.for_nearest_containing_block_for_all_descendants,
);
if style.clone_position() == Position::Relative {
new_fragment.content_rect.start_corner +=
&relative_adjustement(style, containing_block);
}
new_fragment
}
/// Given `fragment_layout_fn`, a closure which lays out a fragment in a provided
/// `PositioningContext`, create a positioning context for a positioned fragment and lay out
/// the fragment and all its children. Returns the resulting `BoxFragment`.
fn create_and_layout_positioned(
layout_context: &LayoutContext,
style: &ComputedValues,
for_nearest_containing_block_for_all_descendants: &mut Vec<HoistedAbsolutelyPositionedBox>,
fragment_layout_fn: impl FnOnce(&mut Self) -> BoxFragment,
) -> BoxFragment {
let mut new_context = match Self::new_for_style(style) {
Some(new_context) => new_context,
None => unreachable!(),
};
let mut new_fragment = fragment_layout_fn(&mut new_context);
new_context.layout_collected_children(layout_context, &mut new_fragment);
for_nearest_containing_block_for_all_descendants
.extend(new_context.for_nearest_containing_block_for_all_descendants);
new_fragment
}
// Lay out the hoisted boxes collected into this `PositioningContext` and add them
// to the given `BoxFragment`.
pub fn layout_collected_children(
&mut self,
layout_context: &LayoutContext,
new_fragment: &mut BoxFragment,
) {
let padding_rect = Rect {
size: new_fragment.content_rect.size.clone(),
// Ignore the content rect’s position in its own containing block:
start_corner: Vec2::zero(),
}
.inflate(&new_fragment.padding);<|fim▁hole|> size: padding_rect.size.clone(),
style: &new_fragment.style,
};
let take_hoisted_boxes_pending_layout = |context: &mut Self| match context
.for_nearest_positioned_ancestor
.as_mut()
{
Some(fragments) => std::mem::take(fragments),
None => std::mem::take(&mut context.for_nearest_containing_block_for_all_descendants),
};
// Loop because it’s possible that we discover (the static position of)
// more absolutely-positioned boxes while doing layout for others.
let mut hoisted_boxes = take_hoisted_boxes_pending_layout(self);
let mut laid_out_child_fragments = Vec::new();
while !hoisted_boxes.is_empty() {
HoistedAbsolutelyPositionedBox::layout_many(
layout_context,
&hoisted_boxes,
&mut laid_out_child_fragments,
&mut self.for_nearest_containing_block_for_all_descendants,
&containing_block,
);
hoisted_boxes = take_hoisted_boxes_pending_layout(self);
}
new_fragment.children.extend(laid_out_child_fragments);
}
pub(crate) fn push(&mut self, box_: HoistedAbsolutelyPositionedBox) {
if let Some(nearest) = &mut self.for_nearest_positioned_ancestor {
match box_
.absolutely_positioned_box
.contents
.style
.clone_position()
{
Position::Fixed => {}, // fall through
Position::Absolute => return nearest.push(box_),
Position::Static | Position::Relative => unreachable!(),
}
}
self.for_nearest_containing_block_for_all_descendants
.push(box_)
}
pub(crate) fn append(&mut self, other: Self) {
vec_append_owned(
&mut self.for_nearest_containing_block_for_all_descendants,
other.for_nearest_containing_block_for_all_descendants,
);
match (
self.for_nearest_positioned_ancestor.as_mut(),
other.for_nearest_positioned_ancestor,
) {
(Some(a), Some(b)) => vec_append_owned(a, b),
(None, None) => {},
_ => unreachable!(),
}
}
pub(crate) fn adjust_static_positions(
&mut self,
tree_rank_in_parent: usize,
f: impl FnOnce(&mut Self) -> Vec<Fragment>,
) -> Vec<Fragment> {
let for_containing_block_for_all_descendants =
self.for_nearest_containing_block_for_all_descendants.len();
let for_nearest_so_far = self
.for_nearest_positioned_ancestor
.as_ref()
.map(|v| v.len());
let fragments = f(self);
adjust_static_positions(
&mut self.for_nearest_containing_block_for_all_descendants
[for_containing_block_for_all_descendants..],
&fragments,
tree_rank_in_parent,
);
if let Some(nearest) = &mut self.for_nearest_positioned_ancestor {
adjust_static_positions(
&mut nearest[for_nearest_so_far.unwrap()..],
&fragments,
tree_rank_in_parent,
);
}
fragments
}
pub(crate) fn layout_initial_containing_block_children(
&mut self,
layout_context: &LayoutContext,
initial_containing_block: &DefiniteContainingBlock,
fragments: &mut Vec<ArcRefCell<Fragment>>,
) {
debug_assert!(self.for_nearest_positioned_ancestor.is_none());
// Loop because it’s possible that we discover (the static position of)
// more absolutely-positioned boxes while doing layout for others.
while !self
.for_nearest_containing_block_for_all_descendants
.is_empty()
{
HoistedAbsolutelyPositionedBox::layout_many(
layout_context,
&std::mem::take(&mut self.for_nearest_containing_block_for_all_descendants),
fragments,
&mut self.for_nearest_containing_block_for_all_descendants,
initial_containing_block,
)
}
}
}
impl HoistedAbsolutelyPositionedBox {
pub(crate) fn layout_many(
layout_context: &LayoutContext,
boxes: &[Self],
fragments: &mut Vec<ArcRefCell<Fragment>>,
for_nearest_containing_block_for_all_descendants: &mut Vec<HoistedAbsolutelyPositionedBox>,
containing_block: &DefiniteContainingBlock,
) {
if layout_context.use_rayon {
fragments.par_extend(boxes.par_iter().mapfold_reduce_into(
for_nearest_containing_block_for_all_descendants,
|for_nearest_containing_block_for_all_descendants, box_| {
let new_fragment = ArcRefCell::new(Fragment::Box(box_.layout(
layout_context,
for_nearest_containing_block_for_all_descendants,
containing_block,
)));
*box_.fragment.borrow_mut() = Some(new_fragment.clone());
new_fragment
},
Vec::new,
vec_append_owned,
))
} else {
fragments.extend(boxes.iter().map(|box_| {
let new_fragment = ArcRefCell::new(Fragment::Box(box_.layout(
layout_context,
for_nearest_containing_block_for_all_descendants,
containing_block,
)));
*box_.fragment.borrow_mut() = Some(new_fragment.clone());
new_fragment
}))
}
}
pub(crate) fn layout(
&self,
layout_context: &LayoutContext,
for_nearest_containing_block_for_all_descendants: &mut Vec<HoistedAbsolutelyPositionedBox>,
containing_block: &DefiniteContainingBlock,
) -> BoxFragment {
let cbis = containing_block.size.inline;
let cbbs = containing_block.size.block;
let style = &self.absolutely_positioned_box.contents.style;
let pbm = style.padding_border_margin(&containing_block.into());
let size;
let replaced_used_size;
match self.absolutely_positioned_box.contents.as_replaced() {
Ok(replaced) => {
// https://drafts.csswg.org/css2/visudet.html#abs-replaced-width
// https://drafts.csswg.org/css2/visudet.html#abs-replaced-height
let used_size =
replaced.used_size_as_if_inline_element(&containing_block.into(), style, &pbm);
size = Vec2 {
inline: LengthOrAuto::LengthPercentage(used_size.inline),
block: LengthOrAuto::LengthPercentage(used_size.block),
};
replaced_used_size = Some(used_size);
},
Err(_non_replaced) => {
size = style.content_box_size(&containing_block.into(), &pbm);
replaced_used_size = None;
},
}
let inline_axis = solve_axis(
cbis,
pbm.padding_border_sums.inline,
pbm.margin.inline_start,
pbm.margin.inline_end,
/* avoid_negative_margin_start */ true,
&self.box_offsets.inline,
size.inline,
);
let block_axis = solve_axis(
cbis,
pbm.padding_border_sums.block,
pbm.margin.block_start,
pbm.margin.block_end,
/* avoid_negative_margin_start */ false,
&self.box_offsets.block,
size.block,
);
let margin = Sides {
inline_start: inline_axis.margin_start,
inline_end: inline_axis.margin_end,
block_start: block_axis.margin_start,
block_end: block_axis.margin_end,
};
PositioningContext::create_and_layout_positioned(
layout_context,
style,
for_nearest_containing_block_for_all_descendants,
|positioning_context| {
let size;
let fragments;
match self.absolutely_positioned_box.contents.as_replaced() {
Ok(replaced) => {
// https://drafts.csswg.org/css2/visudet.html#abs-replaced-width
// https://drafts.csswg.org/css2/visudet.html#abs-replaced-height
let style = &self.absolutely_positioned_box.contents.style;
size = replaced_used_size.unwrap();
fragments = replaced.make_fragments(style, size.clone());
},
Err(non_replaced) => {
// https://drafts.csswg.org/css2/visudet.html#abs-non-replaced-width
// https://drafts.csswg.org/css2/visudet.html#abs-non-replaced-height
let inline_size = inline_axis.size.auto_is(|| {
let anchor = match inline_axis.anchor {
Anchor::Start(start) => start,
Anchor::End(end) => end,
};
let available_size = cbis -
anchor -
pbm.padding_border_sums.inline -
margin.inline_sum();
self.absolutely_positioned_box
.contents
.content_sizes
.shrink_to_fit(available_size)
});
let containing_block_for_children = ContainingBlock {
inline_size,
block_size: block_axis.size,
style,
};
// https://drafts.csswg.org/css-writing-modes/#orthogonal-flows
assert_eq!(
containing_block.style.writing_mode,
containing_block_for_children.style.writing_mode,
"Mixed writing modes are not supported yet"
);
let dummy_tree_rank = 0;
let independent_layout = non_replaced.layout(
layout_context,
positioning_context,
&containing_block_for_children,
dummy_tree_rank,
);
size = Vec2 {
inline: inline_size,
block: block_axis
.size
.auto_is(|| independent_layout.content_block_size),
};
fragments = independent_layout.fragments
},
};
let pb = &pbm.padding + &pbm.border;
let inline_start = match inline_axis.anchor {
Anchor::Start(start) => start + pb.inline_start + margin.inline_start,
Anchor::End(end) => {
cbis - end - pb.inline_end - margin.inline_end - size.inline
},
};
let block_start = match block_axis.anchor {
Anchor::Start(start) => start + pb.block_start + margin.block_start,
Anchor::End(end) => cbbs - end - pb.block_end - margin.block_end - size.block,
};
let content_rect = Rect {
start_corner: Vec2 {
inline: inline_start,
block: block_start,
},
size,
};
BoxFragment::new(
self.absolutely_positioned_box.contents.tag,
style.clone(),
fragments,
content_rect,
pbm.padding,
pbm.border,
margin,
CollapsedBlockMargins::zero(),
)
},
)
}
}
enum Anchor {
Start(Length),
End(Length),
}
struct AxisResult {
anchor: Anchor,
size: LengthOrAuto,
margin_start: Length,
margin_end: Length,
}
/// This unifies some of the parts in common in:
///
/// * https://drafts.csswg.org/css2/visudet.html#abs-non-replaced-width
/// * https://drafts.csswg.org/css2/visudet.html#abs-non-replaced-height
///
/// … and:
///
/// * https://drafts.csswg.org/css2/visudet.html#abs-replaced-width
/// * https://drafts.csswg.org/css2/visudet.html#abs-replaced-height
///
/// In the replaced case, `size` is never `Auto`.
fn solve_axis(
containing_size: Length,
padding_border_sum: Length,
computed_margin_start: LengthOrAuto,
computed_margin_end: LengthOrAuto,
avoid_negative_margin_start: bool,
box_offsets: &AbsoluteBoxOffsets,
size: LengthOrAuto,
) -> AxisResult {
match box_offsets {
AbsoluteBoxOffsets::StaticStart { start } => AxisResult {
anchor: Anchor::Start(*start),
size,
margin_start: computed_margin_start.auto_is(Length::zero),
margin_end: computed_margin_end.auto_is(Length::zero),
},
AbsoluteBoxOffsets::Start { start } => AxisResult {
anchor: Anchor::Start(start.percentage_relative_to(containing_size)),
size,
margin_start: computed_margin_start.auto_is(Length::zero),
margin_end: computed_margin_end.auto_is(Length::zero),
},
AbsoluteBoxOffsets::End { end } => AxisResult {
anchor: Anchor::End(end.percentage_relative_to(containing_size)),
size,
margin_start: computed_margin_start.auto_is(Length::zero),
margin_end: computed_margin_end.auto_is(Length::zero),
},
AbsoluteBoxOffsets::Both { start, end } => {
let start = start.percentage_relative_to(containing_size);
let end = end.percentage_relative_to(containing_size);
let margin_start;
let margin_end;
let used_size;
if let LengthOrAuto::LengthPercentage(s) = size {
used_size = s;
let margins = containing_size - start - end - padding_border_sum - s;
match (computed_margin_start, computed_margin_end) {
(LengthOrAuto::Auto, LengthOrAuto::Auto) => {
if avoid_negative_margin_start && margins < Length::zero() {
margin_start = Length::zero();
margin_end = margins;
} else {
margin_start = margins / 2.;
margin_end = margins / 2.;
}
},
(LengthOrAuto::Auto, LengthOrAuto::LengthPercentage(end)) => {
margin_start = margins - end;
margin_end = end;
},
(LengthOrAuto::LengthPercentage(start), LengthOrAuto::Auto) => {
margin_start = start;
margin_end = margins - start;
},
(
LengthOrAuto::LengthPercentage(start),
LengthOrAuto::LengthPercentage(end),
) => {
margin_start = start;
margin_end = end;
},
}
} else {
margin_start = computed_margin_start.auto_is(Length::zero);
margin_end = computed_margin_end.auto_is(Length::zero);
// FIXME(nox): What happens if that is negative?
used_size =
containing_size - start - end - padding_border_sum - margin_start - margin_end
};
AxisResult {
anchor: Anchor::Start(start),
size: LengthOrAuto::LengthPercentage(used_size),
margin_start,
margin_end,
}
},
}
}
fn adjust_static_positions(
absolutely_positioned_fragments: &mut [HoistedAbsolutelyPositionedBox],
child_fragments: &[Fragment],
tree_rank_in_parent: usize,
) {
for abspos_fragment in absolutely_positioned_fragments {
let original_tree_rank = abspos_fragment.tree_rank;
abspos_fragment.tree_rank = tree_rank_in_parent;
let child_fragment_rect = match &child_fragments[original_tree_rank] {
Fragment::Box(b) => &b.content_rect,
Fragment::AbsoluteOrFixedPositioned(_) => continue,
Fragment::Anonymous(a) => &a.rect,
_ => unreachable!(),
};
if let AbsoluteBoxOffsets::StaticStart { start } = &mut abspos_fragment.box_offsets.inline {
*start += child_fragment_rect.start_corner.inline;
}
if let AbsoluteBoxOffsets::StaticStart { start } = &mut abspos_fragment.box_offsets.block {
*start += child_fragment_rect.start_corner.block;
}
}
}
fn vec_append_owned<T>(a: &mut Vec<T>, mut b: Vec<T>) {
if a.is_empty() {
*a = b
} else {
a.append(&mut b)
}
}
/// https://drafts.csswg.org/css2/visuren.html#relative-positioning
pub(crate) fn relative_adjustement(
style: &ComputedValues,
containing_block: &ContainingBlock,
) -> Vec2<Length> {
let cbis = containing_block.inline_size;
let cbbs = containing_block.block_size.auto_is(Length::zero);
let box_offsets = style.box_offsets().map_inline_and_block_axes(
|v| v.percentage_relative_to(cbis),
|v| v.percentage_relative_to(cbbs),
);
fn adjust(start: LengthOrAuto, end: LengthOrAuto) -> Length {
match (start, end) {
(LengthOrAuto::Auto, LengthOrAuto::Auto) => Length::zero(),
(LengthOrAuto::Auto, LengthOrAuto::LengthPercentage(end)) => -end,
(LengthOrAuto::LengthPercentage(start), _) => start,
}
}
Vec2 {
inline: adjust(box_offsets.inline_start, box_offsets.inline_end),
block: adjust(box_offsets.block_start, box_offsets.block_end),
}
}<|fim▁end|> | let containing_block = DefiniteContainingBlock { |
<|file_name|>predefined.html.py<|end_file_name|><|fim▁begin|>XXXXXXXXX XXXXX
XXXXXX
XXXXXX
XXXXX XXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX XXXXXXX X XXXXXXXXXX XXXXXX XXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXX
XXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXX X
XXXXXXXXXXXXXXXXXXXXXXXX X
XXXX XXXXXXXXXX
XXXXXXXXXXX X
XXXXXX X XX XX XX XX XX X X
X
X XX
X XX
XXXXXXXXX
XXXXXXX
XXXXX XXXXXXXXXXXXXXXXXXX
XXXX XXXXXXXXXXXXXXXXXX
XXXXXXXXX
XXXXXXXXXXXXXX XXXXXXX XXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXXXXXXX
XXXX XXXXXXXXXXXXX
XXXXXXXXXXXXX XXXXXXXX XXX XXXXXXX XX XXXXXXX X XXXXXX XXXXXXXX XXXXX XX XXX XXXX XX XXX XXXX XXXXXX XXXXXXXXX XXX XXXX XXXXX XXXXXXX XXX XXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXX XX XX XXXXX XX XXXXXXXX XXXX XXX XXXXXX XXXXXXXX XXX XXXXXXXXX
XXXXXX XXXX XXXXXXXXXXX XXXXX XXX XXXXXXXXXX XXXXXXXX XXXXXX XXXXX XX XXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXX
XXXXXXXXXXXXXXXXXXXXXX
XXXXXX
XXXXXX XXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXX
XXXX
XXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXX
XXXXXXXXXXXX
XXXXXXXXX XXXXXXXXX
XXXXXXXXXXXXXXX
XXXXX
XXXXXXXX
XXXXXXX
XXXX
XXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXX
XXXXXXXXXXXX
XXXXXXXXX XXXXXXXXX
XXXXXXXXXXXXXXX
XXXXX
XXXXXXXX
XXXXXXX
XXXX
XXXXXXXXX XXXXXXXXXX
XXXXXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXX XXXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXX XXXXXXXX
XXXXXXXXXX XXXXXXXXX XXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXX XXXXXXXXXX
XXXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXX XXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXX XXXXXXXXXXXXXXX
XXXXXXXXXXXXXXX XXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXX XXXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXXXXXX XXXXXXXXXXXXXXX
XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXX XXXXXXXXXX
XXXXXXXXXXXXXX XXXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXX XXXXXXXXXX
XXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXX XXXXXXXXXXX
XXXXXXXXXX XXXXXXXXXXXX
XXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXX XXXXXXXXXX
XXXXXXXXXXX XXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXX XXXXXXXXXXXX
XXXXXXXXXX XXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXX XXXXXXXXXXXXXXXX
XXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXX XXXXXXXXXX
XXXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXX XXXXXXXXX
XXXXXXXXX XXXXXXXXX XXXXXXX XXXXXXXXXX
XXXXXXX XXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXX XXXXXXXXXXX
XXXXXXXXXXX XXXXXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXX XXXXXXXXXX
XXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXX XXXXXXXXX
XXXXXXXXXXXXX XXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXXXXXX XXXXXXXXX
XXXXXXX XXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXX XXXXXXXXXX
XXXXXXXXX XXXXXXXXX XXXXXXX XXXXXXXXXX
XXXXXXX XXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXX XXXXXXXXXX
XXXXXXXXXXXXX XXXXXXXXXXXX
XXXXXXX XXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXX XXXXXXXXXXX
XXXXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXXX XXXXXXXXXX
XXXXXXXXX XXXXXXXXX XXXXXXX XXXXXXXXXX
XXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXX XXXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXXX XXXXXXXXXX
XXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXX XXXXXXXXXXX
XXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXX XXXXXXXXXX
XXXXXXXXX XXXXXXXXX XXXXXXX XXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXX XXXXXXXXX
XXXXXXXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX<|fim▁hole|> XXXX
XXXXXXXXXXXX XXXXXXXXXX
XXXXXXXXXXXXXXX XXXXXXXXXXXXXXX
XXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXX XXXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXXXX XXXXXXXXXXX
XXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXX XXXXXXXXXXX
XXXXXXXX XXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXX XXXXXXXXXXX
XXXXXXXXXXXXXX XXXXXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXX XXXXXXXXXXX
XXXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXX XXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXX XXXXXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXX XXXXXXXXXXXX
XXXXXXXXXXXXX XXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXX XXXXXXXXXXX
XXXXXXXXXX XXXXXXXXXXXX
XXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXX XXXXXXXXXX
XXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXX XXXXXXXXX
XXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXX XXXXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXX XXXXXXXXXXXX
XXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXX XXXXXXXXXXXX
XXXXXXXX XXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXX XXXXXXXXXX
XXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXX XXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXX XXXXXXXXXXXX
XXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXXX XXXXXXXXXXX
XXXXXXXXXX XXXXXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXX XXXXXXXXXXXX
XXXXXXXXX XXXXXXXXXXXXXX
XXXXXXX XXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXXX XXXXXXXXXXX
XXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXX XXXXXXXXXX
XXXXXXXXXXX XXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXX XXXXXXXXXXX
XXXXXXXXXXXX XXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXXXX XXXXXXXXXX
XXXXXXXXXXXXXX XXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXX
XXXXXXXXX XXXXXXXXXXX
XXXXXXXXXXXX XXXXXXXXXXXX
XXXXXXX XXXXXXXXX
XXXXXXXXXXX
XXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXXXXXX
XXXXX
XXXXXXXX
XXXXXXXX
XXX XXXXXXXXXXXXX
XXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXXXXXXXX
XXXXXXXXXXXX
XXXXXXXXXXXXX
XXXXXXXXXXXXXXX XXXXXXXXXXX
XXXXX
XXXX XXXXXXXXXXXXX
XXXX XXXXXXXXXXX
XXXXXX XXXXXXXXXX XXXXX XXXXX XX XXXX XX XXXXXXXXXX XXX XXXXX XXXXX XX XXXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX X
XXXXXXXXXXXXXXXXXXXXXXXX X
XXXX XXXXXXXXXX
XXXXXXXXXXX X
XXXXXX X XX XX XX XX XX X X
X
X XX
X XXXXXXXXX
XXXXX XXXXXXXX XX XXX XXXXX XXXXX XXX XXXXXXXXX XXXXXXXXXX XXXXXXX XXXXX XXX XXXXXX XXX XXX XX XXXX XXXXXXXXXXXX
XXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXX
XXXXXX
XXXX XXXXXXXXXXXXXX
XXXXXX XXXX XXXXX XXXXX XX XXX XXX XXXX XXXXX XXXXXXXX XXXXXX XX XXX XXXX XXXXXXXX XX XXXXXXXXXXXXXXX
XXXXXX
XXXX XXXXXXXXXXXX
XXXXX
XXXXXXX XXXXXXX XXXX X XXXXXX XXX XX XXXXXXXXXX XXX XXXXXX XXXX XX XXXXXX XXXX XXX XXXXXXX XXXXX XXXXXXXX XX XXXXX XX XXXXXXXXX XXXXXXX XXX XXXXXX XXX
XXXXXXXXXX XXX XXXX XX XXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXX
XXXXXX
XXXXXX XXXXXXXXX XXX XXXXXXX XXXXX XXX XXXXXX XXX XXX XX XXXX XXXXXXX XX XXXXXXX XXX XXXXXXX XX XXX XXXXXXXXXX
XXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXX
XXXXXX
XXXX XXXXXXXXXXXXX
XXXXXXX XXXXX XXXXX XXXX XX XXXXX XXX XXXXXX XXXX XXXX XXX XXXX XXXXXX XX XXXXX XXXXXX XXXX XXXX XXXX XXXXXX XXXXXXXXXXXXX XX XXX XXXXXXXXXX XXXX XX
XXXXXXXXXXX
XXXXXX
XXXX XXXXXXXXXXXX
XXXXXX XXXXXX XXXX XX XXXXXXX XXX XXXXXXXXXXX XXXXXXXXXX XXX XXXX XXXXX XX XXXXX XXXXXX XXXXXX XXXX XXXX XXXX XX XXXX XX XXXXXXX XXXXXX XXXXX XXXX XXXXXXXXXXX
XXXXXXXXXX XXXXXXX XXX XX XXXXXXX XX XXX XXXXXXXXX XXXXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXX XXXXXXXXX XX XXX XXXXXXXXXX
XXXXXXXXXXXXXXXXXXXXXX
XXXXXX
XXXXXX
XXXXXXXXXX
XXXXXX
XXXXXXXXX
XXXX XXXXXXXXXXXXXXX
XXXX XXXXXXXXXXXXXXXXXXXXXXX
XXXX XXXXXXXXXXXXXX
XXXXXXXXX XXXXXXXXXXXXX
XXXX XXXXXXXXXXXX
XXXX XXXXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXX XXXXXXXXXX XXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXX XXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXX
XXX XXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXX XXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXX XX XXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXX XXXXXXXXXXXXXXXXXX
XXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXXX
XXXXX
XXXXXX
XXXXXX
XXXX XXXXXXXXXXXXXXXXX
XXXXXXXXX XXXXX XX XXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXX XXX XXXX XXXXXXXXXXX XXXXX XXX XXX XXXXXXXXXX XXX XXXXXXXXXXXX
XXXXXXXXXXXXX XXXXX XXX X XXXX XXXXX XX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXX XXXXXX XXX XXXXXXXXXXXX XX XXXXXXXXXXXXXXX
XX XXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXX XXX XXXXXXX XX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXX XXXXXX XXXXXXXXXXXXX
XXXXXXXXXX XX XXXXXXXX XXXXX XXX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXXXXXXXXXXXXXXX
XXXXXX
XXXXXX
XXXXXX
XXXXXXXXXX
XXXXXXX
XXXXXXX<|fim▁end|> | |
<|file_name|>MulitAuctionServiceImpl.java<|end_file_name|><|fim▁begin|>/* */ package com.hundsun.network.gates.wulin.biz.service.pojo.auction;
/* */
/* */ import com.hundsun.network.gates.luosi.biz.security.ServiceException;
/* */ import com.hundsun.network.gates.luosi.common.enums.EnumActiveStatus;
/* */ import com.hundsun.network.gates.luosi.common.enums.EnumBidCheckStatus;
/* */ import com.hundsun.network.gates.luosi.common.enums.EnumBidPriceStatus;
/* */ import com.hundsun.network.gates.luosi.common.enums.EnumOperatorType;
/* */ import com.hundsun.network.gates.luosi.common.remote.ServiceResult;
/* */ import com.hundsun.network.gates.luosi.wulin.reomte.enums.EnumAuctionErrors;
/* */ import com.hundsun.network.gates.luosi.wulin.reomte.request.AuctionMulitBidRequest;
/* */ import com.hundsun.network.gates.luosi.wulin.reomte.request.SystemMessageRequest;
/* */ import com.hundsun.network.gates.wulin.biz.dao.auction.AuctionBidderDAO;
/* */ import com.hundsun.network.gates.wulin.biz.dao.auction.AuctionFreeBidDAO;
/* */ import com.hundsun.network.gates.wulin.biz.dao.auction.AuctionHallDAO;
/* */ import com.hundsun.network.gates.wulin.biz.dao.auction.AuctionLogDAO;
/* */ import com.hundsun.network.gates.wulin.biz.domain.auction.AuctionBidder;
/* */ import com.hundsun.network.gates.wulin.biz.domain.auction.AuctionFreeBid;
/* */ import com.hundsun.network.gates.wulin.biz.domain.auction.AuctionLog;
/* */ import com.hundsun.network.gates.wulin.biz.domain.auction.AuctionMulitBidProject;
/* */ import com.hundsun.network.gates.wulin.biz.domain.query.AuctionMulitBidProjectQuery;
/* */ import com.hundsun.network.gates.wulin.biz.domain.query.MulitAuctionReviewQuery;
/* */ import com.hundsun.network.gates.wulin.biz.service.BaseService;
/* */ import com.hundsun.network.gates.wulin.biz.service.auction.MulitAuctionService;
/* */ import com.hundsun.network.gates.wulin.biz.service.message.SystemMessageService;
/* */ import com.hundsun.network.gates.wulin.biz.service.project.ProjectListingService;
/* */ import com.hundsun.network.melody.common.util.StringUtil;
/* */ import java.io.IOException;
/* */ import java.util.ArrayList;
/* */ import java.util.HashMap;
/* */ import java.util.List;
/* */ import java.util.Locale;
/* */ import org.apache.commons.logging.Log;
/* */ import org.codehaus.jackson.map.ObjectMapper;
/* */ import org.springframework.beans.factory.annotation.Autowired;
/* */ import org.springframework.context.MessageSource;
/* */ import org.springframework.stereotype.Service;
/* */ import org.springframework.transaction.TransactionStatus;
/* */ import org.springframework.transaction.support.TransactionCallback;
/* */ import org.springframework.transaction.support.TransactionTemplate;
/* */
/* */ @Service("mulitAuctionService")
/* */ public class MulitAuctionServiceImpl extends BaseService
/* */ implements MulitAuctionService
/* */ {
/* */
/* */ @Autowired
/* */ private ProjectListingService projectListingService;
/* */
/* */ @Autowired
/* */ private AuctionFreeBidDAO auctionFreeBidDAO;
/* */
/* */ @Autowired
/* */ private AuctionBidderDAO auctionBidderDAO;
/* */
/* */ @Autowired
/* */ private AuctionLogDAO auctionLogDAO;
/* */
/* */ @Autowired
/* */ private MessageSource messageSource;
/* */
/* */ @Autowired
/* */ private AuctionHallDAO auctionHallDAO;
/* */
/* */ @Autowired
/* */ private SystemMessageService systemMessageService;
/* */
/* */ public ServiceResult review(final AuctionMulitBidRequest request)
/* */ {
/* 70 */ ServiceResult serviceResult = new ServiceResult();
/* */
/* 72 */ if ((null == request) || (StringUtil.isEmpty(request.getBidderAccount())) || (StringUtil.isEmpty(request.getReviewer())) || (StringUtil.isEmpty(request.getProjectCode())) || (StringUtil.isEmpty(request.getRemark())))
/* */ {
/* 76 */ serviceResult.setErrorNOInfo(Integer.valueOf(EnumAuctionErrors.PARAMETER_ERROR.getValue()), EnumAuctionErrors.PARAMETER_ERROR.getInfo());
/* */
/* 78 */ return serviceResult;
/* */ }
/* 80 */ AuctionMulitBidProjectQuery query = new AuctionMulitBidProjectQuery();
/* 81 */ query.setReviewer(request.getReviewer());
/* 82 */ query.setProjectCode(request.getProjectCode());
/* 83 */ List projectList = this.projectListingService.queryAuctionMulitBidProjectUncheckedByProjectCode(query);
/* */
/* 86 */ if ((null == projectList) || (projectList.size() <= 0)) {
/* 87 */ serviceResult.setErrorNOInfo(Integer.valueOf(EnumAuctionErrors.CHECK_PROJECT_LISTING_NULL.getValue()), EnumAuctionErrors.CHECK_PROJECT_LISTING_NULL.getInfo());
/* */
/* 89 */ return serviceResult;
/* */ }
/* */
/* 92 */ AuctionFreeBid auctionFreeBid = queryTopUncheckFreeBid(request.getProjectCode(), request.getBidderAccount());
/* */
/* 94 */ if (null == auctionFreeBid) {
/* 95 */ serviceResult.setErrorNOInfo(Integer.valueOf(EnumAuctionErrors.PARAMETER_ERROR.getValue()), EnumAuctionErrors.PARAMETER_ERROR.getInfo());
/* */
/* 97 */ return serviceResult;
/* */ }
/* */
/* 100 */ AuctionBidder auctionBidder = this.auctionBidderDAO.selectNormalByBidderAccount(request.getProjectCode(), request.getBidderAccount());
/* */
/* 102 */ if (null == auctionBidder) {
/* 103 */ serviceResult.setErrorNOInfo(Integer.valueOf(EnumAuctionErrors.CHECK_BIDDER_NULL.getValue()), EnumAuctionErrors.CHECK_BIDDER_NULL.getInfo());
/* */
/* 105 */ return serviceResult;
/* */ }
/* 107 */ ObjectMapper mapper = new ObjectMapper();
/* 108 */ String auctionBidderJson = "";
/* */ try {
/* 110 */ auctionBidderJson = mapper.writeValueAsString(auctionBidder);
/* */ } catch (IOException e) {
/* 112 */ if (this.log.isErrorEnabled()) {
/* 113 */ this.log.error("convert auctionBidder to json format fail,", e);
/* */ }
/* */ }
/* 116 */ final String fAuctionBidderJson = auctionBidderJson;
/* 117 */ final AuctionFreeBid fAuctionFreeBid = auctionFreeBid;
/* 118 */ final String logRemark = getMessage("project.auction.mulitbid.review.log.remark", new String[] { request.getReviewer(), auctionBidder.getBidderAccount() });
/* */
/* 120 */ final AuctionBidder fAuctionBidder = auctionBidder;
/* 121 */ final AuctionMulitBidProject fAuctionMulitBidProject = (AuctionMulitBidProject)projectList.get(0);
/* */
/* 123 */ serviceResult = (ServiceResult)this.transactionTemplate.execute(new TransactionCallback() {
/* */ public ServiceResult doInTransaction(TransactionStatus status) {
/* 125 */ ServiceResult result = new ServiceResult();
/* 126 */ Object savePoint = status.createSavepoint();
/* */ try
/* */ {
/* 129 */ AuctionFreeBid auctionFreeBid = new AuctionFreeBid();
/* 130 */ auctionFreeBid.setBidderAccount(fAuctionFreeBid.getBidderAccount());
/* 131 */ auctionFreeBid.setBidderTrademark(fAuctionFreeBid.getBidderTrademark());
/* 132 */ auctionFreeBid.setBidOperatorAccount(fAuctionFreeBid.getBidOperatorAccount());
/* 133 */ auctionFreeBid.setCheckRemark(request.getRemark());
/* 134 */ auctionFreeBid.setCheckStatus(EnumBidCheckStatus.Fail.getValue());
/* 135 */ auctionFreeBid.setIp(fAuctionFreeBid.getIp());
/* 136 */ auctionFreeBid.setOperator(request.getOperator());
/* 137 */ auctionFreeBid.setPrice(fAuctionFreeBid.getPrice());
/* 138 */ auctionFreeBid.setProjectCode(request.getProjectCode());
/* 139 */ auctionFreeBid.setStatus(fAuctionFreeBid.getStatus());
/* 140 */ MulitAuctionServiceImpl.this.auctionFreeBidDAO.insert(auctionFreeBid);
/* */
/* 143 */ if (MulitAuctionServiceImpl.this.auctionBidderDAO.deleteByBidderAccount(request.getProjectCode(), request.getBidderAccount()) <= 0)
/* */ {
/* 145 */ throw new ServiceException(EnumAuctionErrors.REVIEW_DELETE_BIDDER_FAIL.getInfo(), Integer.valueOf(EnumAuctionErrors.REVIEW_DELETE_BIDDER_FAIL.getValue()));
/* */ }
/* */
/* 150 */ if (EnumActiveStatus.Yes.getValue().equals(fAuctionBidder.getIsPriority())) {
/* 151 */ HashMap actionHallMap = new HashMap();
/* 152 */ actionHallMap.put("priorityNumSub", Integer.valueOf(1));
/* 153 */ actionHallMap.put("whereProjectCode", request.getProjectCode());
/* 154 */ if (MulitAuctionServiceImpl.this.auctionHallDAO.updateByMap(actionHallMap) <= 0) {
/* 155 */ throw new ServiceException(EnumAuctionErrors.REVIEW_UPDATE_HALL_FALL.getInfo(), Integer.valueOf(EnumAuctionErrors.REVIEW_UPDATE_HALL_FALL.getValue()));
/* */ }
/* */
/* */ }
/* */
/* 172 */ SystemMessageRequest systemMessageRequest = new SystemMessageRequest();
/* 173 */ systemMessageRequest.setSendAccount(EnumOperatorType.SYSTEM.getValue());
/* 174 */ systemMessageRequest.setContent(MulitAuctionServiceImpl.this.getMessage("project.auction.mulitbid.review.message.content", new String[] { fAuctionMulitBidProject.getProjectTitle(), request.getRemark() }));
/* */
/* 177 */ systemMessageRequest.setTitle(MulitAuctionServiceImpl.this.getMessage("project.auction.mulitbid.review.message.title", new String[0]));
/* */
/* 179 */ List userAccountList = new ArrayList();
/* 180 */ userAccountList.add(fAuctionBidder.getBidderAccount());
/* 181 */ systemMessageRequest.setUserAccountList(userAccountList);
/* 182 */ MulitAuctionServiceImpl.this.systemMessageService.sendSystemMessage(systemMessageRequest);
/* */
/* 185 */ AuctionLog auctionLog = new AuctionLog();
/* 186 */ auctionLog.setDataJson(fAuctionBidderJson);
/* 187 */ auctionLog.setProjectCode(request.getProjectCode());
/* 188 */ auctionLog.setRemark(logRemark);
/* 189 */ auctionLog.setOperatorType(EnumOperatorType.REVIEWER.getValue());
/* 190 */ auctionLog.setOperator(request.getReviewer());
/* 191 */ MulitAuctionServiceImpl.this.auctionLogDAO.insert(auctionLog);
/* */ }
/* */ catch (ServiceException e) {
/* 194 */ status.rollbackToSavepoint(savePoint);
/* 195 */ MulitAuctionServiceImpl.this.log.error("MulitAuctionServiceImpl review fail", e);
/* 196 */ result.setErrorNO(e.getErrorNO());
/* 197 */ result.setErrorInfo(e.getErrorInfo());
/* */ } catch (Exception e) {
/* 199 */ status.rollbackToSavepoint(savePoint);
/* 200 */ MulitAuctionServiceImpl.this.log.error("MulitAuctionServiceImpl review error", e);
/* 201 */ result.setErrorNO(Integer.valueOf(EnumAuctionErrors.INTERNAL_ERROR.getValue()));
/* 202 */ result.setErrorInfo(EnumAuctionErrors.INTERNAL_ERROR.getInfo());
/* */ }
/* 204 */ return result;
/* */ }
/* */ });
/* 208 */ return serviceResult;
/* */ }
/* */
/* */ public AuctionFreeBid queryTopUncheckFreeBid(String projectCode, String bidderAccount)
/* */ {
/* 213 */ MulitAuctionReviewQuery query = new MulitAuctionReviewQuery();
/* 214 */ query.setBidderAccount(bidderAccount);
/* 215 */ query.setCheckStatus(EnumBidCheckStatus.Pass);
/* 216 */ query.setProjectCode(projectCode);
/* 217 */ query.setStatus(EnumBidPriceStatus.EFFECTIVE);
/* 218 */ return this.auctionFreeBidDAO.selectTopByMulitAuctionReviewQuery(query);
/* */ }
/* */
/* */ protected String getMessage(String code, String[] args) {
/* 222 */ return this.messageSource.getMessage(code, args, Locale.CHINA);
/* */ }
/* */ }
<|fim▁hole|>/* Location: E:\__安装归档\linquan-20161112\deploy16\wulin\webroot\WEB-INF\classes\
* Qualified Name: com.hundsun.network.gates.wulin.biz.service.pojo.auction.MulitAuctionServiceImpl
* JD-Core Version: 0.6.0
*/<|fim▁end|> | |
<|file_name|>db_wbx.py<|end_file_name|><|fim▁begin|>#
# Copyright 2007 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from gnuradio import usrp1
import time,math
from usrpm import usrp_dbid
import db_base
import db_instantiator
from usrpm.usrp_fpga_regs import *
#debug_using_gui = True # Must be set to True or False
debug_using_gui = False # Must be set to True or False
#if debug_using_gui:
# import flexrf_debug_gui
# d'board i/o pin defs
# TX IO Pins
TX_POWER = (1 << 0) # TX Side Power
RX_TXN = (1 << 1) # T/R antenna switch for TX/RX port
# RX IO Pins
RX2_RX1N = (1 << 0) # antenna switch between RX2 and TX/RX port
RXENABLE = (1 << 1) # enables mixer
PLL_LOCK_DETECT = (1 << 2) # Muxout pin from PLL -- MUST BE INPUT
MReset = (1 << 3) # NB6L239 Master Reset, asserted low
SELA0 = (1 << 4) # NB6L239 SelA0
SELA1 = (1 << 5) # NB6L239 SelA1
SELB0 = (1 << 6) # NB6L239 SelB0
SELB1 = (1 << 7) # NB6L239 SelB1
PLL_ENABLE = (1 << 8) # CE Pin on PLL
AUX_SCLK = (1 << 9) # ALT SPI SCLK
AUX_SDO = (1 << 10) # ALT SPI SDO
AUX_SEN = (1 << 11) # ALT SPI SEN
SPI_ENABLE_TX_A = usrp1.SPI_ENABLE_TX_A
SPI_ENABLE_TX_B = usrp1.SPI_ENABLE_TX_B
SPI_ENABLE_RX_A = usrp1.SPI_ENABLE_RX_A
SPI_ENABLE_RX_B = usrp1.SPI_ENABLE_RX_B
"""
A few comments about the WBX boards:
They are half-duplex. I.e., transmit and receive are mutually exclusive.
There is a single LO for both the Tx and Rx sides.
The the shared control signals are hung off of the Rx side.
The shared io controls are duplexed onto the Rx side pins.
The wbx_high d'board always needs to be in 'auto_tr_mode'
"""
class wbx_base(db_base.db_base):
"""
Abstract base class for all wbx boards.
Derive board specific subclasses from db_wbx_base_{tx,rx}
"""
def __init__(self, usrp, which):
"""
@param usrp: instance of usrp.source_c
@param which: which side: 0 or 1 corresponding to side A or B respectively
@type which: int
"""
# sets _u _which _tx and _slot
db_base.db_base.__init__(self, usrp, which)
self.first = True
self.spi_format = usrp1.SPI_FMT_MSB | usrp1.SPI_FMT_HDR_0
# FIXME -- the write reg functions don't work with 0xffff for masks
self._rx_write_oe(int(PLL_ENABLE|MReset|SELA0|SELA1|SELB0|SELB1|RX2_RX1N|RXENABLE), 0x7fff)
self._rx_write_io((PLL_ENABLE|MReset|0|RXENABLE), (PLL_ENABLE|MReset|RX2_RX1N|RXENABLE))
self._tx_write_oe((TX_POWER|RX_TXN), 0x7fff)
self._tx_write_io((0|RX_TXN), (TX_POWER|RX_TXN)) # TX off, TR switch set to RX
self.spi_enable = (SPI_ENABLE_RX_A, SPI_ENABLE_RX_B)[which]
self.set_auto_tr(False)
#if debug_using_gui:
# title = "FlexRF Debug Rx"
# if self._tx:
# title = "FlexRF Debug Tx"
# self.gui = flexrf_debug_gui.flexrf_debug_gui(self, title)
# self.gui.Show(True)
def __del__(self):
#self._u.write_io(self._which, self.power_off, POWER_UP) # turn off power to board
#self._u._write_oe(self._which, 0, 0xffff) # turn off all outputs
self.set_auto_tr(False)
def _lock_detect(self):
"""
@returns: the value of the VCO/PLL lock detect bit.
@rtype: 0 or 1
"""
if self._rx_read_io() & PLL_LOCK_DETECT:
return True
else: # Give it a second chance
if self._rx_read_io() & PLL_LOCK_DETECT:
return True
else:
return False
# Both sides need access to the Rx pins.
# Write them directly, bypassing the convenience routines.
# (Sort of breaks modularity, but will work...)
def _tx_write_oe(self, value, mask):
return self._u._write_fpga_reg((FR_OE_0, FR_OE_2)[self._which],
((mask & 0xffff) << 16) | (value & 0xffff))
def _rx_write_oe(self, value, mask):
return self._u._write_fpga_reg((FR_OE_1, FR_OE_3)[self._which],
((mask & 0xffff) << 16) | (value & 0xffff))
def _tx_write_io(self, value, mask):
return self._u._write_fpga_reg((FR_IO_0, FR_IO_2)[self._which],
((mask & 0xffff) << 16) | (value & 0xffff))
def _rx_write_io(self, value, mask):
return self._u._write_fpga_reg((FR_IO_1, FR_IO_3)[self._which],
((mask & 0xffff) << 16) | (value & 0xffff))
def _rx_read_io(self):
t = self._u._read_fpga_reg((FR_RB_IO_RX_A_IO_TX_A, FR_RB_IO_RX_B_IO_TX_B)[self._which])
return (t >> 16) & 0xffff
def _tx_read_io(self):
t = self._u._read_fpga_reg((FR_RB_IO_RX_A_IO_TX_A, FR_RB_IO_RX_B_IO_TX_B)[self._which])
return t & 0xffff
def _compute_regs(self, freq):
"""
Determine values of registers, along with actual freq.
@param freq: target frequency in Hz
@type freq: float
@returns: (R, N, func, init, actual_freq)
@rtype: tuple(int, int, int, int, float)
Override this in derived classes.
"""
raise NotImplementedError
def _refclk_freq(self):
return float(self._u.fpga_master_clock_freq())/self._refclk_divisor()
def _refclk_divisor(self):
"""<|fim▁hole|>
# ----------------------------------------------------------------
def set_freq(self, freq):
"""
@returns (ok, actual_baseband_freq) where:
ok is True or False and indicates success or failure,
actual_baseband_freq is the RF frequency that corresponds to DC in the IF.
"""
raise NotImplementedError
def gain_range(self):
"""
Return range of gain that can be set by this d'board.
@returns (min_gain, max_gain, step_size)
Where gains are expressed in decibels (your mileage may vary)
"""
raise NotImplementedError
def set_gain(self, gain):
"""
Set the gain.
@param gain: gain in decibels
@returns True/False
"""
raise NotImplementedError
def _set_pga(self, pga_gain):
if(self._which == 0):
self._u.set_pga (0, pga_gain)
self._u.set_pga (1, pga_gain)
else:
self._u.set_pga (2, pga_gain)
self._u.set_pga (3, pga_gain)
def is_quadrature(self):
"""
Return True if this board requires both I & Q analog channels.
This bit of info is useful when setting up the USRP Rx mux register.
"""
return True
# ----------------------------------------------------------------
class wbx_base_tx(wbx_base):
def __init__(self, usrp, which):
"""
@param usrp: instance of usrp.sink_c
@param which: 0 or 1 corresponding to side TX_A or TX_B respectively.
"""
wbx_base.__init__(self, usrp, which)
# power up the transmit side, NO -- but set antenna to receive
self._u.write_io(self._which, (TX_POWER), (TX_POWER|RX_TXN))
self._lo_offset = 0e6
# Gain is not set by the PGA, but the PGA must be set at max gain in the TX
return self._set_pga(self._u.pga_max())
def __del__(self):
# Power down and leave the T/R switch in the R position
self._u.write_io(self._which, (RX_TXN), (TX_POWER|RX_TXN))
wbx_base.__del__(self)
def set_auto_tr(self, on):
if on:
self.set_atr_mask (RX_TXN)
self.set_atr_txval(0)
self.set_atr_rxval(RX_TXN)
else:
self.set_atr_mask (0)
self.set_atr_txval(0)
self.set_atr_rxval(0)
def set_enable(self, on):
"""
Enable transmitter if on is True
"""
if on:
v = 0
else:
v = RX_TXN
self._u.write_io(self._which, v, RX_TXN)
def set_lo_offset(self, offset):
"""
Set amount by which LO is offset from requested tuning frequency.
@param offset: offset in Hz
"""
self._lo_offset = offset
def lo_offset(self):
"""
Get amount by which LO is offset from requested tuning frequency.
@returns Offset in Hz
"""
return self._lo_offset
class wbx_base_rx(wbx_base):
def __init__(self, usrp, which):
"""
@param usrp: instance of usrp.source_c
@param which: 0 or 1 corresponding to side RX_A or RX_B respectively.
"""
wbx_base.__init__(self, usrp, which)
# set up for RX on TX/RX port
self.select_rx_antenna('TX/RX')
self.bypass_adc_buffers(True)
self._lo_offset = -4e6
def __del__(self):
# Power down
self._u.write_io(self._which, 0, (RXENABLE))
wbx_base.__del__(self)
def set_auto_tr(self, on):
if on:
self.set_atr_mask (ENABLE)
self.set_atr_txval( 0)
self.set_atr_rxval(ENABLE)
else:
self.set_atr_mask (0)
self.set_atr_txval(0)
self.set_atr_rxval(0)
def select_rx_antenna(self, which_antenna):
"""
Specify which antenna port to use for reception.
@param which_antenna: either 'TX/RX' or 'RX2'
"""
if which_antenna in (0, 'TX/RX'):
self._u.write_io(self._which, 0, RX2_RX1N)
elif which_antenna in (1, 'RX2'):
self._u.write_io(self._which, RX2_RX1N, RX2_RX1N)
else:
raise ValueError, "which_antenna must be either 'TX/RX' or 'RX2'"
def set_gain(self, gain):
"""
Set the gain.
@param gain: gain in decibels
@returns True/False
"""
maxgain = self.gain_range()[1] - self._u.pga_max()
mingain = self.gain_range()[0]
if gain > maxgain:
pga_gain = gain-maxgain
assert pga_gain <= self._u.pga_max()
agc_gain = maxgain
else:
pga_gain = 0
agc_gain = gain
V_maxgain = .2
V_mingain = 1.2
V_fullscale = 3.3
dac_value = (agc_gain*(V_maxgain-V_mingain)/(maxgain-mingain) + V_mingain)*4096/V_fullscale
assert dac_value>=0 and dac_value<4096
return self._u.write_aux_dac(self._which, 0, int(dac_value)) and \
self._set_pga(int(pga_gain))
def set_lo_offset(self, offset):
"""
Set amount by which LO is offset from requested tuning frequency.
@param offset: offset in Hz
"""
self._lo_offset = offset
def lo_offset(self):
"""
Get amount by which LO is offset from requested tuning frequency.
@returns Offset in Hz
"""
return self._lo_offset
def i_and_q_swapped(self):
"""
Return True if this is a quadrature device and ADC 0 is Q.
"""
return True
# ----------------------------------------------------------------
class _ADF410X_common(object):
def __init__(self):
# R-Register Common Values
self.R_RSV = 0 # bits 23,22,21
self.LDP = 1 # bit 20 Lock detect in 5 cycles
self.TEST = 0 # bit 19,18 Normal
self.ABP = 0 # bit 17,16 2.9ns
# N-Register Common Values
self.N_RSV = 0 # 23,22
self.CP_GAIN = 0 # 21
# Function Register Common Values
self.P = 0 # bits 23,22 0 = 8/9, 1 = 16/17, 2 = 32/33, 3 = 64/65
self.PD2 = 0 # bit 21 Normal operation
self.CP2 = 7 # bits 20,19,18 CP Gain = 5mA
self.CP1 = 7 # bits 17,16,15 CP Gain = 5mA
self.TC = 0 # bits 14-11 PFD Timeout
self.FL = 0 # bit 10,9 Fastlock Disabled
self.CP3S = 0 # bit 8 CP Enabled
self.PDP = 0 # bit 7 Phase detector polarity, Positive=1
self.MUXOUT = 1 # bits 6:4 Digital Lock Detect
self.PD1 = 0 # bit 3 Normal operation
self.CR = 0 # bit 2 Normal operation
def _compute_regs(self, freq):
"""
Determine values of R, control, and N registers, along with actual freq.
@param freq: target frequency in Hz
@type freq: float
@returns: (R, N, control, actual_freq)
@rtype: tuple(int, int, int, float)
"""
# Band-specific N-Register Values
phdet_freq = self._refclk_freq()/self.R_DIV
print "phdet_freq = %f" % (phdet_freq,)
desired_n = round(freq*self.freq_mult/phdet_freq)
print "desired_n %f" % (desired_n,)
actual_freq = desired_n * phdet_freq
print "actual freq %f" % (actual_freq,)
B = math.floor(desired_n/self._prescaler())
A = desired_n - self._prescaler()*B
print "A %d B %d" % (A,B)
self.B_DIV = int(B) # bits 20:8
self.A_DIV = int(A) # bit 6:2
#assert self.B_DIV >= self.A_DIV
if self.B_DIV < self.A_DIV:
return (0,0,0,0)
R = (self.R_RSV<<21) | (self.LDP<<20) | (self.TEST<<18) | \
(self.ABP<<16) | (self.R_DIV<<2)
N = (self.N_RSV<<22) | (self.CP_GAIN<<21) | (self.B_DIV<<8) | (self.A_DIV<<2)
control = (self.P<<22) | (self.PD2<<21) | (self.CP2<<18) | (self.CP1<<15) | \
(self.TC<<11) | (self.FL<<9) | (self.CP3S<<8) | (self.PDP<<7) | \
(self.MUXOUT<<4) | (self.PD1<<3) | (self.CR<<2)
return (R,N,control,actual_freq/self.freq_mult)
def _write_all(self, R, N, control):
"""
Write all PLL registers:
R counter latch,
N counter latch,
Function latch,
Initialization latch
Adds 10ms delay between writing control and N if this is first call.
This is the required power-up sequence.
@param R: 24-bit R counter latch
@type R: int
@param N: 24-bit N counter latch
@type N: int
@param control: 24-bit control latch
@type control: int
"""
self._write_R(R)
self._write_func(control)
self._write_init(control)
if self.first:
time.sleep(0.010)
self.first = False
self._write_N(N)
def _write_R(self, R):
self._write_it((R & ~0x3) | 0)
def _write_N(self, N):
self._write_it((N & ~0x3) | 1)
def _write_func(self, func):
self._write_it((func & ~0x3) | 2)
def _write_init(self, init):
self._write_it((init & ~0x3) | 3)
def _write_it(self, v):
s = ''.join((chr((v >> 16) & 0xff),
chr((v >> 8) & 0xff),
chr(v & 0xff)))
self._u._write_spi(0, self.spi_enable, self.spi_format, s)
def _prescaler(self):
if self.P == 0:
return 8
elif self.P == 1:
return 16
elif self.P == 2:
return 32
elif self.P == 3:
return 64
else:
raise ValueError, "Prescaler out of range"
#----------------------------------------------------------------------
class _lo_common(_ADF410X_common):
def __init__(self):
_ADF410X_common.__init__(self)
# Band-specific R-Register Values
self.R_DIV = 4 # bits 15:2
# Band-specific C-Register values
self.P = 0 # bits 23,22 0 = Div by 8/9
self.CP2 = 7 # bits 19:17
self.CP1 = 7 # bits 16:14
# Band specifc N-Register Values
self.DIVSEL = 0 # bit 23
self.DIV2 = 0 # bit 22
self.CPGAIN = 0 # bit 21
self.freq_mult = 1
self.div = 1
self.aux_div = 2
def freq_range(self): # FIXME
return (50e6, 1000e6, 16e6)
def set_divider(self, main_or_aux, divisor):
if main_or_aux not in (0, 'main', 1, 'aux'):
raise ValueError, "main_or_aux must be 'main' or 'aux'"
if main_or_aux in (0, 'main'):
if divisor not in (1,2,4,8):
raise ValueError, "Main Divider Must be 1, 2, 4, or 8"
for (div,val) in ((1,0),(2,1),(4,2),(8,3)):
if(div == divisor):
self.main_div = val
else:
if divisor not in (2,4,8,16):
raise ValueError, "Aux Divider Must be 2, 4, 8 or 16"
for (div,val) in ((2,0),(4,1),(8,2),(16,3)):
if(div == divisor):
self.aux_div = val
vala = self.main_div*SELA0
valb = self.aux_div*SELB0
mask = SELA0|SELA1|SELB0|SELB1
self._rx_write_io(((self.main_div*SELA0) | (self.aux_div*SELB0)),
(SELA0|SELA1|SELB0|SELB1))
def set_freq(self, freq):
#freq += self._lo_offset
if(freq < 20e6 or freq > 1200e6):
raise ValueError, "Requested frequency out of range"
div = 1
lo_freq = freq * 2
while lo_freq < 1e9 and div < 8:
div = div * 2
lo_freq = lo_freq * 2
print "For RF freq of %f, we set DIV=%d and LO Freq=%f" % (freq, div, lo_freq)
self.set_divider('main', div)
self.set_divider('aux', div*2)
R, N, control, actual_freq = self._compute_regs(lo_freq)
print "R %d N %d control %d actual freq %f" % (R,N,control,actual_freq)
if R==0:
return(False,0)
self._write_all(R, N, control)
return (self._lock_detect(), actual_freq/div/2)
#------------------------------------------------------------
class db_wbx_lo_tx(_lo_common, wbx_base_tx):
def __init__(self, usrp, which):
wbx_base_tx.__init__(self, usrp, which)
_lo_common.__init__(self)
def gain_range(self):
"""
Return range of gain that can be set by this d'board.
@returns (min_gain, max_gain, step_size)
Where gains are expressed in decibels (your mileage may vary)
Gain is controlled by a VGA in the output amplifier, not the PGA
"""
return (-56, 0, 0.1)
def set_gain(self, gain):
"""
Set the gain.
@param gain: gain in decibels
@returns True/False
"""
maxgain = self.gain_range()[1]
mingain = self.gain_range()[0]
if gain > maxgain:
txvga_gain = maxgain
elif gain < mingain:
txvga_gain = mingain
else:
txvga_gain = gain
V_maxgain = 1.4
V_mingain = 0.1
V_fullscale = 3.3
dac_value = ((txvga_gain-mingain)*(V_maxgain-V_mingain)/(maxgain-mingain) + V_mingain)*4096/V_fullscale
assert dac_value>=0 and dac_value<4096
print "DAC value %d" % (dac_value,)
return self._u.write_aux_dac(self._which, 1, int(dac_value))
class db_wbx_lo_rx(_lo_common, wbx_base_rx):
def __init__(self, usrp, which):
wbx_base_rx.__init__(self, usrp, which)
_lo_common.__init__(self)
def gain_range(self):
"""
Return range of gain that can be set by this d'board.
@returns (min_gain, max_gain, step_size)
Where gains are expressed in decibels (your mileage may vary)
"""
return (self._u.pga_min(), self._u.pga_max() + 45, 0.05)
#------------------------------------------------------------
# hook these daughterboard classes into the auto-instantiation framework
db_instantiator.add(usrp_dbid.WBX_LO_TX, lambda usrp, which : (db_wbx_lo_tx(usrp, which),))
db_instantiator.add(usrp_dbid.WBX_LO_RX, lambda usrp, which : (db_wbx_lo_rx(usrp, which),))<|fim▁end|> | Return value to stick in REFCLK_DIVISOR register
"""
return 1 |
<|file_name|>package.py<|end_file_name|><|fim▁begin|>##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Lzma(AutotoolsPackage):
"""LZMA Utils are legacy data compression software with high compression
ratio. LZMA Utils are no longer developed, although critical bugs may be<|fim▁hole|> tools of LZMA Utils. This should make transition from LZMA Utils to XZ
Utils relatively easy."""
homepage = "http://tukaani.org/lzma/"
url = "http://tukaani.org/lzma/lzma-4.32.7.tar.gz"
version('4.32.7', '2a748b77a2f8c3cbc322dbd0b4c9d06a')<|fim▁end|> | fixed as long as fixing them doesn't require huge changes to the code.
Users of LZMA Utils should move to XZ Utils. XZ Utils support the legacy
.lzma format used by LZMA Utils, and can also emulate the command line |
<|file_name|>TorManager.py<|end_file_name|><|fim▁begin|>import logging
import re
import socket
import binascii
import sys
import os
import time
import gevent
import subprocess
import atexit
from Config import config
from Crypt import CryptRsa
from Site import SiteManager
from lib.PySocks import socks
from gevent.coros import RLock
from util import helper
from Debug import Debug
class TorManager:
def __init__(self, fileserver_ip=None, fileserver_port=None):
self.privatekeys = {} # Onion: Privatekey
self.site_onions = {} # Site address: Onion
self.tor_exe = "tools/tor/tor.exe"
self.tor_process = None
self.log = logging.getLogger("TorManager")
self.start_onions = None
self.conn = None
self.lock = RLock()
if config.tor == "disable":
self.enabled = False
self.start_onions = False
self.status = "Disabled"
else:
self.enabled = True
self.status = "Waiting"
if fileserver_port:
self.fileserver_port = fileserver_port
else:
self.fileserver_port = config.fileserver_port
self.ip, self.port = config.tor_controller.split(":")
self.port = int(self.port)
self.proxy_ip, self.proxy_port = config.tor_proxy.split(":")
self.proxy_port = int(self.proxy_port)
# Test proxy port
if config.tor != "disable":
try:
assert self.connect(), "No connection"
self.log.debug("Tor proxy port %s check ok" % config.tor_proxy)
except Exception, err:
self.log.debug("Tor proxy port %s check error: %s" % (config.tor_proxy, err))
self.enabled = False
# Change to self-bundled Tor ports
from lib.PySocks import socks
self.port = 49051
self.proxy_port = 49050
socks.setdefaultproxy(socks.PROXY_TYPE_SOCKS5, "127.0.0.1", self.proxy_port)
if os.path.isfile(self.tor_exe): # Already, downloaded: sync mode
self.startTor()
else: # Not downloaded yet: Async mode
gevent.spawn(self.startTor)
def startTor(self):
if sys.platform.startswith("win"):
try:
if not os.path.isfile(self.tor_exe):
self.downloadTor()
self.log.info("Starting Tor client %s..." % self.tor_exe)
tor_dir = os.path.dirname(self.tor_exe)
self.tor_process = subprocess.Popen(r"%s -f torrc" % self.tor_exe, cwd=tor_dir, close_fds=True)
for wait in range(1,10): # Wait for startup
time.sleep(wait * 0.5)
self.enabled = True
if self.connect():
break
# Terminate on exit
atexit.register(self.stopTor)
except Exception, err:
self.log.error("Error starting Tor client: %s" % Debug.formatException(err))
self.enabled = False
return False
def stopTor(self):
self.log.debug("Stopping...")
self.tor_process.terminate()
def downloadTor(self):
self.log.info("Downloading Tor...")
# Check Tor webpage for link
download_page = helper.httpRequest("https://www.torproject.org/download/download.html").read()
download_url = re.search('href="(.*?tor.*?win32.*?zip)"', download_page).group(1)
if not download_url.startswith("http"):
download_url = "https://www.torproject.org/download/" + download_url
# Download Tor client
self.log.info("Downloading %s" % download_url)
data = helper.httpRequest(download_url, as_file=True)
data_size = data.tell()
# Handle redirect
if data_size < 1024 and "The document has moved" in data.getvalue():
download_url = re.search('href="(.*?tor.*?win32.*?zip)"', data.getvalue()).group(1)
data = helper.httpRequest(download_url, as_file=True)
data_size = data.tell()
if data_size > 1024:
import zipfile
zip = zipfile.ZipFile(data)
self.log.info("Unpacking Tor")
for inner_path in zip.namelist():
if ".." in inner_path:
continue
dest_path = inner_path
dest_path = re.sub("^Data/Tor/", "tools/tor/data/", dest_path)
dest_path = re.sub("^Data/", "tools/tor/data/", dest_path)
dest_path = re.sub("^Tor/", "tools/tor/", dest_path)
dest_dir = os.path.dirname(dest_path)
if dest_dir and not os.path.isdir(dest_dir):
os.makedirs(dest_dir)
if dest_dir != dest_path.strip("/"):
data = zip.read(inner_path)
if not os.path.isfile(dest_path):
open(dest_path, 'wb').write(data)
else:
self.log.error("Bad response from server: %s" % data.getvalue())
return False
def connect(self):
if not self.enabled:
return False
self.site_onions = {}
self.privatekeys = {}
if "socket_noproxy" in dir(socket): # Socket proxy-patched, use non-proxy one
conn = socket.socket_noproxy(socket.AF_INET, socket.SOCK_STREAM)
else:
conn = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.log.debug("Connecting to %s:%s" % (self.ip, self.port))
try:
with self.lock:
conn.connect((self.ip, self.port))
res_protocol = self.send("PROTOCOLINFO", conn)
version = re.search('Tor="([0-9\.]+)"', res_protocol).group(1)
# Version 0.2.7.5 required because ADD_ONION support
assert int(version.replace(".", "0")) >= 20705, "Tor version >=0.2.7.5 required"
# Auth cookie file
cookie_match = re.search('COOKIEFILE="(.*?)"', res_protocol)
if cookie_match:
cookie_file = cookie_match.group(1)
auth_hex = binascii.b2a_hex(open(cookie_file, "rb").read())
res_auth = self.send("AUTHENTICATE %s" % auth_hex, conn)
else:
res_auth = self.send("AUTHENTICATE", conn)
assert "250 OK" in res_auth, "Authenticate error %s" % res_auth<|fim▁hole|> except Exception, err:
self.conn = None
self.status = "Error (%s)" % err
self.log.error("Tor controller connect error: %s" % err)
self.enabled = False
return self.conn
def disconnect(self):
self.conn.close()
self.conn = None
def startOnions(self):
self.log.debug("Start onions")
self.start_onions = True
# Get new exit node ip
def resetCircuits(self):
res = self.request("SIGNAL NEWNYM")
if "250 OK" not in res:
self.status = "Reset circuits error (%s)" % res
self.log.error("Tor reset circuits error: %s" % res)
def addOnion(self):
res = self.request("ADD_ONION NEW:RSA1024 port=%s" % self.fileserver_port)
match = re.search("ServiceID=([A-Za-z0-9]+).*PrivateKey=RSA1024:(.*?)[\r\n]", res, re.DOTALL)
if match:
onion_address, onion_privatekey = match.groups()
self.privatekeys[onion_address] = onion_privatekey
self.status = "OK (%s onion running)" % len(self.privatekeys)
SiteManager.peer_blacklist.append((onion_address + ".onion", self.fileserver_port))
return onion_address
else:
self.status = "AddOnion error (%s)" % res
self.log.error("Tor addOnion error: %s" % res)
return False
def delOnion(self, address):
res = self.request("DEL_ONION %s" % address)
if "250 OK" in res:
del self.privatekeys[address]
self.status = "OK (%s onion running)" % len(self.privatekeys)
return True
else:
self.status = "DelOnion error (%s)" % res
self.log.error("Tor delOnion error: %s" % res)
self.disconnect()
return False
def request(self, cmd):
with self.lock:
if not self.enabled:
return False
if not self.conn:
if not self.connect():
return ""
return self.send(cmd)
def send(self, cmd, conn=None):
if not conn:
conn = self.conn
self.log.debug("> %s" % cmd)
conn.send("%s\r\n" % cmd)
back = conn.recv(1024 * 64)
self.log.debug("< %s" % back.strip())
return back
def getPrivatekey(self, address):
return self.privatekeys[address]
def getPublickey(self, address):
return CryptRsa.privatekeyToPublickey(self.privatekeys[address])
def getOnion(self, site_address):
with self.lock:
if not self.enabled:
return None
if self.start_onions: # Different onion for every site
onion = self.site_onions.get(site_address)
else: # Same onion for every site
onion = self.site_onions.get("global")
site_address = "global"
if not onion:
self.site_onions[site_address] = self.addOnion()
onion = self.site_onions[site_address]
self.log.debug("Created new hidden service for %s: %s" % (site_address, onion))
return onion
def createSocket(self, onion, port):
if not self.enabled:
return False
self.log.debug("Creating new socket to %s:%s" % (onion, port))
if config.tor == "always": # Every socket is proxied by default
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect((onion, int(port)))
else:
sock = socks.socksocket()
sock.set_proxy(socks.SOCKS5, self.proxy_ip, self.proxy_port)
sock.connect((onion, int(port)))
return sock<|fim▁end|> | self.status = "Connected (%s)" % res_auth
self.conn = conn |
<|file_name|>model_delete_server_metadata_response.go<|end_file_name|><|fim▁begin|>package model
import (
"k8s.io/autoscaler/cluster-autoscaler/cloudprovider/huaweicloud/huaweicloud-sdk-go-v3/core/utils"
"strings"
)<|fim▁hole|>type DeleteServerMetadataResponse struct {
HttpStatusCode int `json:"-"`
}
func (o DeleteServerMetadataResponse) String() string {
data, err := utils.Marshal(o)
if err != nil {
return "DeleteServerMetadataResponse struct{}"
}
return strings.Join([]string{"DeleteServerMetadataResponse", string(data)}, " ")
}<|fim▁end|> |
// Response Object |
<|file_name|>mail_compose_message.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2010-Today OpenERP SA (<http://www.openerp.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
##############################################################################
import base64
import re
from openerp import tools
from openerp.osv import osv
from openerp.osv import fields
from openerp.tools.safe_eval import safe_eval as eval
from openerp.tools.translate import _
# main mako-like expression pattern
EXPRESSION_PATTERN = re.compile('(\$\{.+?\})')
class mail_compose_message(osv.TransientModel):
""" Generic message composition wizard. You may inherit from this wizard
at model and view levels to provide specific features.
The behavior of the wizard depends on the composition_mode field:
- 'reply': reply to a previous message. The wizard is pre-populated
via ``get_message_data``.
- 'comment': new post on a record. The wizard is pre-populated via
``get_record_data``
- 'mass_mail': wizard in mass mailing mode where the mail details can
contain template placeholders that will be merged with actual data
before being sent to each recipient.
"""
_name = 'mail.compose.message'
_inherit = 'mail.message'
_description = 'Email composition wizard'
_log_access = True
def default_get(self, cr, uid, fields, context=None):
""" Handle composition mode. Some details about context keys:
- comment: default mode, model and ID of a record the user comments
- default_model or active_model
- default_res_id or active_id
- reply: active_id of a message the user replies to
- default_parent_id or message_id or active_id: ID of the
mail.message we reply to
- message.res_model or default_model
- message.res_id or default_res_id
- mass_mail: model and IDs of records the user mass-mails
- active_ids: record IDs
- default_model or active_model
"""
if context is None:
context = {}
result = super(mail_compose_message, self).default_get(cr, uid, fields, context=context)
# get some important values from context
composition_mode = context.get('default_composition_mode', context.get('mail.compose.message.mode'))
model = context.get('default_model', context.get('active_model'))
res_id = context.get('default_res_id', context.get('active_id'))
message_id = context.get('default_parent_id', context.get('message_id', context.get('active_id')))
active_ids = context.get('active_ids')
# get default values according to the composition mode
if composition_mode == 'reply':
vals = self.get_message_data(cr, uid, message_id, context=context)
elif composition_mode == 'comment' and model and res_id:
vals = self.get_record_data(cr, uid, model, res_id, context=context)
elif composition_mode == 'mass_mail' and model and active_ids:
vals = {'model': model, 'res_id': res_id}
else:
vals = {'model': model, 'res_id': res_id}
if composition_mode:
vals['composition_mode'] = composition_mode
for field in vals:
if field in fields:
result[field] = vals[field]
# TDE HACK: as mailboxes used default_model='res.users' and default_res_id=uid
# (because of lack of an accessible pid), creating a message on its own
# profile may crash (res_users does not allow writing on it)
# Posting on its own profile works (res_users redirect to res_partner)
# but when creating the mail.message to create the mail.compose.message
# access rights issues may rise
# We therefore directly change the model and res_id
if result.get('model') == 'res.users' and result.get('res_id') == uid:
result['model'] = 'res.partner'
result['res_id'] = self.pool.get('res.users').browse(cr, uid, uid).partner_id.id
return result
def _get_composition_mode_selection(self, cr, uid, context=None):
return [('comment', 'Comment a document'), ('reply', 'Reply to a message'), ('mass_mail', 'Mass mailing')]
_columns = {
'composition_mode': fields.selection(
lambda s, *a, **k: s._get_composition_mode_selection(*a, **k),
string='Composition mode'),
'partner_ids': fields.many2many('res.partner',
'mail_compose_message_res_partner_rel',
'wizard_id', 'partner_id', 'Additional contacts'),
'attachment_ids': fields.many2many('ir.attachment',
'mail_compose_message_ir_attachments_rel',
'wizard_id', 'attachment_id', 'Attachments'),
'filter_id': fields.many2one('ir.filters', 'Filters'),
}
_defaults = {
'composition_mode': 'comment',
'body': lambda self, cr, uid, ctx={}: '',
'subject': lambda self, cr, uid, ctx={}: False,
'partner_ids': lambda self, cr, uid, ctx={}: [],<|fim▁hole|>
def _notify(self, cr, uid, newid, context=None):
""" Override specific notify method of mail.message, because we do
not want that feature in the wizard. """
return
def get_record_data(self, cr, uid, model, res_id, context=None):
""" Returns a defaults-like dict with initial values for the composition
wizard when sending an email related to the document record
identified by ``model`` and ``res_id``.
:param str model: model name of the document record this mail is
related to.
:param int res_id: id of the document record this mail is related to
"""
doc_name_get = self.pool.get(model).name_get(cr, uid, [res_id], context=context)
if doc_name_get:
record_name = doc_name_get[0][1]
else:
record_name = False
return {'model': model, 'res_id': res_id, 'record_name': record_name}
def get_message_data(self, cr, uid, message_id, context=None):
""" Returns a defaults-like dict with initial values for the composition
wizard when replying to the given message (e.g. including the quote
of the initial message, and the correct recipients).
:param int message_id: id of the mail.message to which the user
is replying.
"""
if not message_id:
return {}
if context is None:
context = {}
message_data = self.pool.get('mail.message').browse(cr, uid, message_id, context=context)
# create subject
re_prefix = _('Re:')
reply_subject = tools.ustr(message_data.subject or '')
if not (reply_subject.startswith('Re:') or reply_subject.startswith(re_prefix)) and message_data.subject:
reply_subject = "%s %s" % (re_prefix, reply_subject)
# get partner_ids from original message
partner_ids = [partner.id for partner in message_data.partner_ids] if message_data.partner_ids else []
partner_ids += context.get('default_partner_ids', [])
# update the result
result = {
'record_name': message_data.record_name,
'model': message_data.model,
'res_id': message_data.res_id,
'parent_id': message_data.id,
'subject': reply_subject,
'partner_ids': partner_ids,
}
return result
#------------------------------------------------------
# Wizard validation and send
#------------------------------------------------------
def send_mail(self, cr, uid, ids, context=None):
""" Process the wizard content and proceed with sending the related
email(s), rendering any template patterns on the fly if needed. """
if context is None:
context = {}
active_ids = context.get('active_ids')
is_log = context.get('mail_compose_log', False)
for wizard in self.browse(cr, uid, ids, context=context):
mass_mail_mode = wizard.composition_mode == 'mass_mail'
active_model_pool = self.pool.get(wizard.model if wizard.model else 'mail.thread')
# wizard works in batch mode: [res_id] or active_ids
res_ids = active_ids if mass_mail_mode and wizard.model and active_ids else [wizard.res_id]
for res_id in res_ids:
# default values, according to the wizard options
post_values = {
'subject': wizard.subject,
'body': wizard.body,
'parent_id': wizard.parent_id and wizard.parent_id.id,
'partner_ids': [partner.id for partner in wizard.partner_ids],
'attachments': [(attach.datas_fname or attach.name, base64.b64decode(attach.datas)) for attach in wizard.attachment_ids],
}
# mass mailing: render and override default values
if mass_mail_mode and wizard.model:
email_dict = self.render_message(cr, uid, wizard, res_id, context=context)
new_partner_ids = email_dict.pop('partner_ids', [])
post_values['partner_ids'] += new_partner_ids
new_attachments = email_dict.pop('attachments', [])
post_values['attachments'] += new_attachments
post_values.update(email_dict)
# post the message
subtype = 'mail.mt_comment'
if is_log:
subtype = False
active_model_pool.message_post(cr, uid, [res_id], type='comment', subtype=subtype, context=context, **post_values)
return {'type': 'ir.actions.act_window_close'}
def render_message(self, cr, uid, wizard, res_id, context=None):
""" Generate an email from the template for given (wizard.model, res_id)
pair. This method is meant to be inherited by email_template that
will produce a more complete dictionary. """
return {
'subject': self.render_template(cr, uid, wizard.subject, wizard.model, res_id, context),
'body': self.render_template(cr, uid, wizard.body, wizard.model, res_id, context),
}
def render_template(self, cr, uid, template, model, res_id, context=None):
""" Render the given template text, replace mako-like expressions ``${expr}``
with the result of evaluating these expressions with an evaluation context
containing:
* ``user``: browse_record of the current user
* ``object``: browse_record of the document record this mail is
related to
* ``context``: the context passed to the mail composition wizard
:param str template: the template text to render
:param str model: model name of the document record this mail is related to.
:param int res_id: id of the document record this mail is related to.
"""
if context is None:
context = {}
def merge(match):
exp = str(match.group()[2:-1]).strip()
result = eval(exp, {
'user': self.pool.get('res.users').browse(cr, uid, uid, context=context),
'object': self.pool.get(model).browse(cr, uid, res_id, context=context),
'context': dict(context), # copy context to prevent side-effects of eval
})
return result and tools.ustr(result) or ''
return template and EXPRESSION_PATTERN.sub(merge, template)<|fim▁end|> | } |
<|file_name|>require.js<|end_file_name|><|fim▁begin|>// -- kriskowal Kris Kowal Copyright (C) 2009-2010 MIT License
(function (require, exports) {
/**
* @module
*/
/*whatsupdoc*/
var Q = require("q");
var has = Object.prototype.hasOwnProperty;
var update = function (_object, object) {
for (var key in object) {
if (has.call(object, key)) {
_object[key] = object[key];
}
}
};
var copy = function (object) {
var _object = {};
update(_object, object);
return _object;
}
var enquote = typeof JSON !== "undefined" && JSON.stringify || function (text) {
return text;
};
/**
* Creates a `require` function, and arranges for modules
* to be executed and their exports memoized, in a lexical
* scope that includes:
*
* * `require(id)` with support for identifiers relative to
* the calling module.
* * `require.loader` for direct access to the module
* loader, which can be used in nested requirers.
* * `require.force(id)`
* * `require.once(id, scope)` to execute but not memoize
* a module, with an optional object that owns additional
* free variables to inject into the module's lexical
* scope.
* * `module`
* * `id`
* * `path`
* * `exports`
*
* @param {{loader, modules, debug}} options
* @constructor
* @returns {require(id)}
*/
exports.Require = function (options) {
options = options || {};
var loader = options.loader;
var factories = options.factories || {};
var modules = options.modules || {};
var apis = options.exports || {};
var supportDefine = options.supportDefine;
var sharedScope = options.scope || {};
for (var id in apis)
if (has.call(apis, id))
modules[id] = {"exports": apis[id]};
var load = function (id) {
if (!factories[id]) {
if (!loader) {
return Q.reject("require: Can't load " + enquote(id));
} else {
factories[id] = loader.load(id);
}
}
return factories[id];
};
var require = function (id, baseId, options) {
var module, factory, exports, completed, require;
options = options || {};
id = resolve(id, baseId);
if (has.call(modules, id)) {
module = modules[id];
} else if (has.call(factories, id)) {
factory = factories[id];
module = Module(id, factory.path);
modules[id] = module;
exports = modules[id].exports;
require = Require(id);
scope = {};
update(scope, sharedScope);
update(scope, options.scope || {});
update(scope, {
"require": require,
"exports": exports,
"module": module
});
if (supportDefine)
scope.define = Define(require, exports, module);
try {
var returned = factory(scope);
completed = true;
} finally {
if (!completed) {
delete modules[id];
}
}
if (typeof returned !== "undefined") {
module.exports = returned;
}
} else {
throw new Error("require: Can't load " + enquote(id));
}
return module.exports;
};
// curries require for a module, so its baseId can be assumed
var Require = function (baseId) {
var _require = function (id) { return require(id, baseId); };
_require.async = function (id) { return require.async(id, baseId) };
_require.loader = loader;
_require.main = modules[options.main];
return _require;
};
var Define = function (require, exports, module) {
return function () {
var callback = arguments[arguments.length - 1];
var returned;
if (typeof callback === "function") {
returned = callback(require, exports, module);
} else {
returned = callback;
}
if (typeof returned !== "undefined")
module.exports = returned;
return returned;
};
};
// creates a module object
var Module = function (baseId, path) {
var module = {};
module.exports = {};
module.id = baseId;
module.path = path;
return module;
};
// asynchronously adds module factories to a factory list
var advanceFactories = function (id, factories) {
return Q.when(load(id), function (factory) {
return (factory.requirements || []).reduce(function (factories, requirement) {
requirement = resolve(requirement, id);
return Q.when(factories, function (factories) {
if (has.call(modules, requirement) || has.call(factories, requirement))
return factories;
return advanceFactories(requirement, factories);
});
}, factories);
});
};
require.reload = function (id) {
return Q.when(advanceFactories(id, {}), function (factories) {
return exports.Require({
"loader": loader,
"factories": factories
});
});
};
require.ensure = function (ids, callback) {
var _modules = copy(modules);
var _factories = ids.reduce(function (factories, id) {
return Q.when(factories, function (factories) {
return advanceFactories(id, factories);
});
}, copy(factories));
return Q.when(_factories, function (factories) {
callback(exports.Require({
"loader": loader,
"factories": factories,
"modules": _modules
}));
}, function (reason) {
throw new Error(reason.message || reason);
});
};
require.async = function (id, baseId) {
var _factories = copy(factories);
var _modules = copy(modules);
return Q.when(advanceFactories(id, _factories), function (factories) {
var _require = exports.Require({
"loader": loader,
"factories": factories,
"modules": _modules
});
return _require(id, baseId);
});
};
require.exec = function (id, scope) {
var _factories = copy(factories);
var _modules = copy(modules);
return Q.when(advanceFactories(id, _factories), function (factories) {
var _require = exports.Require({
"loader": loader,
"factories": factories,
"modules": _modules,
"main": id,
"scope": sharedScope,
"supportDefine": supportDefine
});
return _require(id, undefined, {
"scope": scope
});
});
};
require.loader = loader;
return require;
};
exports.resolve = resolve;
function resolve(id, baseId) {
id = String(id);
var ids = id.split("/");
// assert ids.length >= 1 since "".split("") == [""]
var first = ids[0];
if (first === ".." || first === ".") {
var baseIds = baseId.split("/");
baseIds.pop();
ids.unshift.apply(ids, baseIds);
}
var parts = [];
while (ids.length) {
var part = ids.shift();
if (part === ".") {
} else if (part === "..") {
parts.pop();
} else {
parts.push(part);
}
}
return parts.join("/");<|fim▁hole|>}).apply({},
typeof exports !== "undefined" ? [
require,
exports
] : [
(function (global) {
return function (id) {
return global["/" + id];
}
})(this),
this["/require"] = {}
]
);<|fim▁end|> | }
|
<|file_name|>storage.js<|end_file_name|><|fim▁begin|>$(document).delegate('.storage_graph_link', 'click', function(e){
var anchor = this,
el = $(anchor),
id = el.attr('data-status');
if(e.ctrlKey || e.metaKey){
return true;
}else{
e.preventDefault();
}
var cell = document.getElementById(id);
var text = el.html();
if (text == '[:: show ::]') {
anchor.innerHTML = '[:: hide ::]';
if (cell.nodeName == 'IMG') { // <img src='...'/>
cell.src=anchor.href;
} else {
$.ajax({
type: "get",
url: anchor.href,
success : function(response, textStatus) {
cell.style.display = 'block';
cell.parentNode.style.display = 'block';
cell.innerHTML = response;
var data = $('#countTrendMeta',cell).text();
graphLineChart($('#countTrend',cell)[0],eval('('+data+')'));
data = $('#longTrendMeta',cell).text();
graphLineChart($('#longTrend',cell)[0],eval('('+data+')'));
data = $('#avgTrendMeta',cell).text();
graphLineChart($('#avgTrend',cell)[0],eval('('+data+')'));
data = $('#errorTrendMeta',cell).text();
graphLineChart($('#errorTrend',cell)[0],eval('('+data+')'));
data = $('#piechartMeta',cell).text();
graphPieChart($('#piechart',cell)[0],eval('('+data+')'));
}
});
}
} else {
anchor.innerHTML = '[:: show ::]';
cell.style.display = 'none';
cell.parentNode.style.display = 'none';<|fim▁hole|><|fim▁end|> | }
}) |
<|file_name|>FilterDSPKernel.hpp<|end_file_name|><|fim▁begin|>/*
<samplecode>
<abstract>
A DSPKernel subclass implementing the realtime signal processing portion of the FilterDemo audio unit.
</abstract>
</samplecode>
*/
#ifndef FilterDSPKernel_hpp
#define FilterDSPKernel_hpp
#import "DSPKernel.hpp"
#import "ParameterRamper.hpp"
#import <vector>
static inline float convertBadValuesToZero(float x) {
/*
Eliminate denormals, not-a-numbers, and infinities.
Denormals will fail the first test (absx > 1e-15), infinities will fail
the second test (absx < 1e15), and NaNs will fail both tests. Zero will
also fail both tests, but since it will get set to zero that is OK.
*/
float absx = fabs(x);
if (absx > 1e-15 && absx < 1e15) {
return x;
}
return 0.0;
}
enum {
FilterParamCutoff = 0,
FilterParamResonance = 1
};
static inline double squared(double x) {
return x * x;
}
/*
FilterDSPKernel
Performs our filter signal processing.
As a non-ObjC class, this is safe to use from render thread.
*/
class FilterDSPKernel : public DSPKernel {
public:
// MARK: Types
struct FilterState {
float x1 = 0.0;
float x2 = 0.0;
float y1 = 0.0;
float y2 = 0.0;
void clear() {
x1 = 0.0;
x2 = 0.0;
y1 = 0.0;
y2 = 0.0;<|fim▁hole|> These filters work by feedback. If an infinity or NaN should come
into the filter input, the feedback variables can become infinity
or NaN which will cause the filter to stop operating. This function
clears out any bad numbers in the feedback variables.
*/
x1 = convertBadValuesToZero(x1);
x2 = convertBadValuesToZero(x2);
y1 = convertBadValuesToZero(y1);
y2 = convertBadValuesToZero(y2);
}
};
struct BiquadCoefficients {
float a1 = 0.0;
float a2 = 0.0;
float b0 = 0.0;
float b1 = 0.0;
float b2 = 0.0;
void calculateLopassParams(double frequency, double resonance) {
/*
The transcendental function calls here could be replaced with
interpolated table lookups or other approximations.
*/
// Convert from decibels to linear.
double r = pow(10.0, 0.05 * -resonance);
double k = 0.5 * r * sin(M_PI * frequency);
double c1 = (1.0 - k) / (1.0 + k);
double c2 = (1.0 + c1) * cos(M_PI * frequency);
double c3 = (1.0 + c1 - c2) * 0.25;
b0 = float(c3);
b1 = float(2.0 * c3);
b2 = float(c3);
a1 = float(-c2);
a2 = float(c1);
}
// Arguments in Hertz.
double magnitudeForFrequency( double inFreq) {
// Cast to Double.
double _b0 = double(b0);
double _b1 = double(b1);
double _b2 = double(b2);
double _a1 = double(a1);
double _a2 = double(a2);
// Frequency on unit circle in z-plane.
double zReal = cos(M_PI * inFreq);
double zImaginary = sin(M_PI * inFreq);
// Zeros response.
double numeratorReal = (_b0 * (squared(zReal) - squared(zImaginary))) + (_b1 * zReal) + _b2;
double numeratorImaginary = (2.0 * _b0 * zReal * zImaginary) + (_b1 * zImaginary);
double numeratorMagnitude = sqrt(squared(numeratorReal) + squared(numeratorImaginary));
// Poles response.
double denominatorReal = squared(zReal) - squared(zImaginary) + (_a1 * zReal) + _a2;
double denominatorImaginary = (2.0 * zReal * zImaginary) + (_a1 * zImaginary);
double denominatorMagnitude = sqrt(squared(denominatorReal) + squared(denominatorImaginary));
// Total response.
double response = numeratorMagnitude / denominatorMagnitude;
return response;
}
};
// MARK: Member Functions
FilterDSPKernel() {}
void init(int channelCount, double inSampleRate) {
channelStates.resize(channelCount);
sampleRate = float(inSampleRate);
nyquist = 0.5 * sampleRate;
inverseNyquist = 1.0 / nyquist;
dezipperRampDuration = (AUAudioFrameCount)floor(0.02 * sampleRate);
cutoffRamper.init();
resonanceRamper.init();
}
void reset() {
cutoffRamper.reset();
resonanceRamper.reset();
for (FilterState& state : channelStates) {
state.clear();
}
}
void setParameter(AUParameterAddress address, AUValue value) {
switch (address) {
case FilterParamCutoff:
//cutoffRamper.setUIValue(clamp(value * inverseNyquist, 0.0f, 0.99f));
cutoffRamper.setUIValue(clamp(value * inverseNyquist, 0.0005444f, 0.9070295f));
break;
case FilterParamResonance:
resonanceRamper.setUIValue(clamp(value, -20.0f, 20.0f));
break;
}
}
AUValue getParameter(AUParameterAddress address) {
switch (address) {
case FilterParamCutoff:
// Return the goal. It is not thread safe to return the ramping value.
//return (cutoffRamper.getUIValue() * nyquist);
return roundf((cutoffRamper.getUIValue() * nyquist) * 100) / 100;
case FilterParamResonance:
return resonanceRamper.getUIValue();
default: return 12.0f * inverseNyquist;
}
}
void startRamp(AUParameterAddress address, AUValue value, AUAudioFrameCount duration) override {
switch (address) {
case FilterParamCutoff:
cutoffRamper.startRamp(clamp(value * inverseNyquist, 12.0f * inverseNyquist, 0.99f), duration);
break;
case FilterParamResonance:
resonanceRamper.startRamp(clamp(value, -20.0f, 20.0f), duration);
break;
}
}
void setBuffers(AudioBufferList* inBufferList, AudioBufferList* outBufferList) {
inBufferListPtr = inBufferList;
outBufferListPtr = outBufferList;
}
void process(AUAudioFrameCount frameCount, AUAudioFrameCount bufferOffset) override {
int channelCount = int(channelStates.size());
cutoffRamper.dezipperCheck(dezipperRampDuration);
resonanceRamper.dezipperCheck(dezipperRampDuration);
// For each sample.
for (int frameIndex = 0; frameIndex < frameCount; ++frameIndex) {
/*
The filter coefficients are updated every sample! This is very
expensive. You probably want to do things differently.
*/
double cutoff = double(cutoffRamper.getAndStep());
double resonance = double(resonanceRamper.getAndStep());
coeffs.calculateLopassParams(cutoff, resonance);
int frameOffset = int(frameIndex + bufferOffset);
for (int channel = 0; channel < channelCount; ++channel) {
FilterState& state = channelStates[channel];
float* in = (float*)inBufferListPtr->mBuffers[channel].mData + frameOffset;
float* out = (float*)outBufferListPtr->mBuffers[channel].mData + frameOffset;
float x0 = *in;
float y0 = (coeffs.b0 * x0) + (coeffs.b1 * state.x1) + (coeffs.b2 * state.x2) - (coeffs.a1 * state.y1) - (coeffs.a2 * state.y2);
*out = y0;
state.x2 = state.x1;
state.x1 = x0;
state.y2 = state.y1;
state.y1 = y0;
}
}
// Squelch any blowups once per cycle.
for (int channel = 0; channel < channelCount; ++channel) {
channelStates[channel].convertBadStateValuesToZero();
}
}
// MARK: Member Variables
private:
std::vector<FilterState> channelStates;
BiquadCoefficients coeffs;
float sampleRate = 44100.0;
float nyquist = 0.5 * sampleRate;
float inverseNyquist = 1.0 / nyquist;
AUAudioFrameCount dezipperRampDuration;
AudioBufferList* inBufferListPtr = nullptr;
AudioBufferList* outBufferListPtr = nullptr;
public:
// Parameters.
ParameterRamper cutoffRamper = 400.0 / 44100.0;
ParameterRamper resonanceRamper = 20.0;
};
#endif /* FilterDSPKernel_hpp */<|fim▁end|> | }
void convertBadStateValuesToZero() {
/* |
<|file_name|>sol-rust.rs<|end_file_name|><|fim▁begin|>use std::io;
use std::io::prelude::*;<|fim▁hole|> let stdin = io::stdin();
for line in stdin.lock().lines() {
let s = line.unwrap();
println!("{}", &s[4..]);
}
}<|fim▁end|> |
fn main() { |
<|file_name|>convert.py<|end_file_name|><|fim▁begin|># This file is part of Korman.
#
# Korman is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Korman is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Korman. If not, see <http://www.gnu.org/licenses/>.
import bpy
from collections import defaultdict
from contextlib import ExitStack
import functools
from pathlib import Path
from ..helpers import TemporaryObject
from ..korlib import ConsoleToggler
from PyHSPlasma import *
from . import animation
from . import camera
from . import decal
from . import explosions
from . import etlight
from . import image
from . import locman
from . import logger
from . import manager
from . import mesh
from . import outfile
from . import physics
from . import rtlight
from . import utils
class Exporter:
def __init__(self, op):
self._op = op # Blender export operator
self._objects = []
self.actors = set()
self.want_node_trees = defaultdict(set)
self.exported_nodes = {}
def run(self):
log = logger.ExportVerboseLogger if self._op.verbose else logger.ExportProgressLogger
with ConsoleToggler(self._op.show_console), log(self._op.filepath) as self.report, ExitStack() as self.exit_stack:
# Step 0: Init export resmgr and stuff
self.mgr = manager.ExportManager(self)
self.mesh = mesh.MeshConverter(self)
self.physics = physics.PhysicsConverter(self)
self.light = rtlight.LightConverter(self)
self.animation = animation.AnimationConverter(self)
self.output = outfile.OutputFiles(self, self._op.filepath)
self.camera = camera.CameraConverter(self)
self.image = image.ImageCache(self)
self.locman = locman.LocalizationConverter(self)
self.decal = decal.DecalConverter(self)
self.oven = etlight.LightBaker(mesh=self.mesh, report=self.report)
# Step 0.8: Init the progress mgr
self.mesh.add_progress_presteps(self.report)
self.report.progress_add_step("Collecting Objects")
self.report.progress_add_step("Verify Competence")
self.report.progress_add_step("Touching the Intangible")
self.report.progress_add_step("Harvesting Actors")
if self._op.lighting_method != "skip":
etlight.LightBaker.add_progress_steps(self.report)
self.report.progress_add_step("Exporting Scene Objects")
self.report.progress_add_step("Exporting Logic Nodes")
self.report.progress_add_step("Finalizing Plasma Logic")
self.report.progress_add_step("Handling Snakes")
self.report.progress_add_step("Exporting Textures")
self.report.progress_add_step("Composing Geometry")
self.report.progress_add_step("Saving Age Files")
self.report.progress_start("EXPORTING AGE")
# Step 0.9: Apply modifiers to all meshes temporarily.
with self.mesh:
# Step 1: Create the age info and the pages
self._export_age_info()
# Step 2: Gather a list of objects that we need to export, given what the user has told
# us to export (both in the Age and Object Properties)... fun
self._collect_objects()
# Step 2.1: Run through all the objects we collected in Step 2 and make sure there
# is no ruddy funny business going on.
self._check_sanity()
# Step 2.2: Run through all the objects again and ask them to "pre_export" themselves.
# In other words, generate any ephemeral Blender objects that need to be exported.
self._pre_export_scene_objects()
# Step 2.5: Run through all the objects we collected in Step 2 and see if any relationships
# that the artist made requires something to have a CoordinateInterface
self._harvest_actors()
# Step 2.9: It is assumed that static lighting is available for the mesh exporter.
# Indeed, in PyPRP it was a manual step. So... BAKE NAO!
self._bake_static_lighting()
# Step 3: Export all the things!
self._export_scene_objects()
# Step 3.1: Ensure referenced logic node trees are exported
self._export_referenced_node_trees()
# Step 3.2: Now that all Plasma Objects (save Mipmaps) are exported, we do any post
# processing that needs to inspect those objects
self._post_process_scene_objects()
# Step 3.3: Ensure any helper Python files are packed
self._pack_ancillary_python()
# Step 4: Finalize...
self.mesh.material.finalize()
self.mesh.finalize()
# Step 5: FINALLY. Let's write the PRPs and crap.
self._save_age()
# Step 5.1: Save out the export report.
# If the export fails and this doesn't save, we have bigger problems than
# these little warnings and notices.
self.report.progress_end()
self.report.save()
# Step 5.2: If any nonfatal errors were encountered during the export, we will
# raise them here, now that everything is finished, to draw attention
# to whatever the problem might be.
self.report.raise_errors()
def _bake_static_lighting(self):
if self._op.lighting_method != "skip":
self.oven.bake_static_lighting(self._objects)
def _collect_objects(self):
scene = bpy.context.scene
self.report.progress_advance()
self.report.progress_range = len(scene.objects)
inc_progress = self.report.progress_increment
# Grab a naive listing of enabled pages
age = scene.world.plasma_age
pages_enabled = frozenset((page.name for page in age.pages if page.enabled and self._op.version in page.version))
all_pages = frozenset((page.name for page in age.pages))
# Because we can have an unnamed or a named default page, we need to see if that is enabled...
for page in age.pages:
if page.seq_suffix == 0:
default_enabled = page.enabled
default_inited = True
break
else:
default_enabled = True
default_inited = False
# Now we loop through the objects with some considerations:
# - The default page may or may not be defined. If it is, it can be disabled. If not, it
# can only ever be enabled.
# - Don't create the Default page unless it is used (implicit or explicit). It is a failure
# to export a useless file.
# - Any arbitrary page can be disabled, so check our frozenset.
# - Also, someone might have specified an invalid page, so keep track of that.
error = explosions.UndefinedPageError()
for obj in scene.objects:
if obj.plasma_object.enabled:
page = obj.plasma_object.page
if not page and not default_inited:
self.mgr.create_page(self.age_name, "Default", 0)
default_inited = True
if (default_enabled and not page) or (page in pages_enabled):
self._objects.append(obj)
elif page not in all_pages:
error.add(page, obj.name)
inc_progress()
error.raise_if_error()
def _check_sanity(self):
self.report.progress_advance()
self.report.progress_range = len(self._objects)
inc_progress = self.report.progress_increment
self.report.msg("\nEnsuring Age is sane...")
for bl_obj in self._objects:
for mod in bl_obj.plasma_modifiers.modifiers:
fn = getattr(mod, "sanity_check", None)
if fn is not None:
fn()
inc_progress()
self.report.msg("... Age is grinning and holding a spatula. Must be OK, then.")
def _export_age_info(self):
# Make life slightly easier...
age_info = bpy.context.scene.world.plasma_age
age_name = self.age_name
mgr = self.mgr
# Generate the plAgeInfo
mgr.AddAge(age_info.export(self))
# Create all the pages we need
ver = self._op.version
for page in age_info.pages:
if page.enabled and ver in page.version:
mgr.create_page(age_name, page.name, page.seq_suffix)
mgr.create_builtins(age_name, age_info.use_texture_page)
def _export_actor(self, so, bo):
"""Exports a Coordinate Interface if we need one"""
if self.has_coordiface(bo):
self._export_coordinate_interface(so, bo)
# If this object has a parent, then we will need to go upstream and add ourselves to the
# parent's CoordinateInterface... Because life just has to be backwards.
parent = bo.parent
if parent is not None:
if parent.plasma_object.enabled:
self.report.msg("Attaching to parent SceneObject '{}'", parent.name, indent=1)
parent_ci = self._export_coordinate_interface(None, parent)
parent_ci.addChild(so.key)
else:
self.report.warn("You have parented Plasma Object '{}' to '{}', which has not been marked for export. \
The object may not appear in the correct location or animate properly.".format(
bo.name, parent.name))
def _export_coordinate_interface(self, so, bl):
"""Ensures that the SceneObject has a CoordinateInterface"""
if so is None:
so = self.mgr.find_create_object(plSceneObject, bl=bl)
if so.coord is None:
ci_cls = bl.plasma_object.ci_type
ci = self.mgr.add_object(ci_cls, bl=bl, so=so)
# Now we have the "fun" work of filling in the CI
ci.localToWorld = utils.matrix44(bl.matrix_basis)
ci.worldToLocal = ci.localToWorld.inverse()
ci.localToParent = utils.matrix44(bl.matrix_local)
ci.parentToLocal = ci.localToParent.inverse()
return ci
return so.coord.object
def _export_scene_objects(self):
self.report.progress_advance()
self.report.progress_range = len(self._objects)
inc_progress = self.report.progress_increment
log_msg = self.report.msg
for bl_obj in self._objects:
log_msg("\n[SceneObject '{}']".format(bl_obj.name))
# First pass: do things specific to this object type.
# note the function calls: to export a MESH, it's _export_mesh_blobj
export_fn = "_export_{}_blobj".format(bl_obj.type.lower())
try:
export_fn = getattr(self, export_fn)
except AttributeError:
self.report.warn("""'{}' is a Plasma Object of Blender type '{}'
... And I have NO IDEA what to do with that! Tossing.""".format(bl_obj.name, bl_obj.type))
continue
log_msg("Blender Object '{}' of type '{}'".format(bl_obj.name, bl_obj.type), indent=1)
# Create a sceneobject if one does not exist.
# Before we call the export_fn, we need to determine if this object is an actor of any
# sort, and barf out a CI.
sceneobject = self.mgr.find_create_object(plSceneObject, bl=bl_obj)
self._export_actor(sceneobject, bl_obj)
export_fn(sceneobject, bl_obj)
# And now we puke out the modifiers...
for mod in bl_obj.plasma_modifiers.modifiers:
log_msg("Exporting '{}' modifier".format(mod.bl_label), indent=1)
mod.export(self, bl_obj, sceneobject)
inc_progress()
def _export_camera_blobj(self, so, bo):
# Hey, guess what? Blender's camera data is utter crap!
# NOTE: Animation export is dependent on camera type, so we'll do that later.
camera = bo.data.plasma_camera
self.camera.export_camera(so, bo, camera.camera_type, camera.settings, camera.transitions)
def _export_empty_blobj(self, so, bo):
self.animation.convert_object_animations(bo, so)
def _export_lamp_blobj(self, so, bo):
self.animation.convert_object_animations(bo, so)
self.light.export_rtlight(so, bo)
def _export_mesh_blobj(self, so, bo):
self.animation.convert_object_animations(bo, so)
if bo.data.materials:
self.mesh.export_object(bo, so)
else:
self.report.msg("No material(s) on the ObData, so no drawables", indent=1)
def _export_font_blobj(self, so, bo):
self.animation.convert_object_animations(bo, so)<|fim▁hole|> self.report.msg("No material(s) on the ObData, so no drawables", indent=1)
def _export_referenced_node_trees(self):
self.report.progress_advance()
self.report.progress_range = len(self.want_node_trees)
inc_progress = self.report.progress_increment
self.report.msg("\nChecking Logic Trees...")
for tree_name, references in self.want_node_trees.items():
self.report.msg("NodeTree '{}'", tree_name, indent=1)
tree = bpy.data.node_groups[tree_name]
for bo, so in references:
tree.export(self, bo, so)
inc_progress()
def _harvest_actors(self):
self.report.progress_advance()
self.report.progress_range = len(self._objects) + len(bpy.data.textures)
inc_progress = self.report.progress_increment
for bl_obj in self._objects:
for mod in bl_obj.plasma_modifiers.modifiers:
if mod.enabled:
self.actors.update(mod.harvest_actors())
inc_progress()
# This is a little hacky, but it's an edge case... I guess?
# We MUST have CoordinateInterfaces for EnvironmentMaps (DCMs, bah)
for texture in bpy.data.textures:
envmap = getattr(texture, "environment_map", None)
if envmap is not None:
viewpt = envmap.viewpoint_object
if viewpt is not None:
self.actors.add(viewpt.name)
inc_progress()
def has_coordiface(self, bo):
if bo.type in {"CAMERA", "EMPTY", "LAMP"}:
return True
if bo.parent is not None:
return True
if bo.name in self.actors:
return True
if bo.plasma_object.has_transform_animation:
return True
for mod in bo.plasma_modifiers.modifiers:
if mod.enabled:
if mod.requires_actor:
return True
return False
def _post_process_scene_objects(self):
self.report.progress_advance()
self.report.progress_range = len(self._objects)
inc_progress = self.report.progress_increment
self.report.msg("\nPost-Processing SceneObjects...")
mat_mgr = self.mesh.material
for bl_obj in self._objects:
sceneobject = self.mgr.find_object(plSceneObject, bl=bl_obj)
if sceneobject is None:
# no SO? fine then. turd.
continue
# Synchronization is applied for the root SO and all animated layers (WTF)
# So, we have to keep in mind shared layers (whee) in the synch options kode
net = bl_obj.plasma_net
net.propagate_synch_options(sceneobject, sceneobject)
for mat in mat_mgr.get_materials(bl_obj):
for layer in mat.object.layers:
layer = layer.object
if isinstance(layer, plLayerAnimation):
net.propagate_synch_options(sceneobject, layer)
# Modifiers don't have to expose post-processing, but if they do, run it
for mod in bl_obj.plasma_modifiers.modifiers:
proc = getattr(mod, "post_export", None)
if proc is not None:
self.report.msg("Post processing '{}' modifier '{}'", bl_obj.name, mod.bl_label, indent=1)
proc(self, bl_obj, sceneobject)
inc_progress()
def _pre_export_scene_objects(self):
self.report.progress_advance()
self.report.progress_range = len(self._objects)
inc_progress = self.report.progress_increment
self.report.msg("\nGenerating export dependency objects...")
# New objects may be generate during this process; they will be appended at the end.
new_objects = []
@functools.singledispatch
def handle_temporary(temporary, parent):
raise RuntimeError("Temporary object of type '{}' generated by '{}' was unhandled".format(temporary.__class__, parent.name))
@handle_temporary.register(bpy.types.Object)
def _(temporary, parent):
self.exit_stack.enter_context(TemporaryObject(temporary, bpy.data.objects.remove))
self.report.msg("'{}': generated Object '{}' (Plasma Object: {})", parent.name,
temporary.name, temporary.plasma_object.enabled, indent=1)
if temporary.plasma_object.enabled:
new_objects.append(temporary)
# If the object is marked as a Plasma Object, be sure that we go into the same page
# as the requestor, unless the modifier decided it knows better.
if not temporary.plasma_object.property_set("page"):
temporary.plasma_object.page = parent.plasma_object.page
# Wow, recursively generated objects. Aren't you special?
for mod in temporary.plasma_modifiers.modifiers:
mod.sanity_check()
do_pre_export(temporary)
@handle_temporary.register(bpy.types.NodeTree)
def _(temporary, parent):
self.exit_stack.enter_context(TemporaryObject(temporary, bpy.data.node_groups.remove))
self.report.msg("'{}' generated NodeTree '{}'", parent.name, temporary.name)
if temporary.bl_idname == "PlasmaNodeTree":
parent_so = self.mgr.find_create_object(plSceneObject, bl=parent)
self.want_node_trees[temporary.name].add((parent, parent_so))
def do_pre_export(bo):
for mod in bo.plasma_modifiers.modifiers:
for i in filter(None, mod.pre_export(self, bo)):
handle_temporary(i, bo)
for bl_obj in self._objects:
do_pre_export(bl_obj)
inc_progress()
self.report.msg("... {} new object(s) were generated!", len(new_objects))
self._objects += new_objects
def _pack_ancillary_python(self):
texts = bpy.data.texts
self.report.progress_advance()
self.report.progress_range = len(texts)
inc_progress = self.report.progress_increment
for i in texts:
if i.name.endswith(".py") and self.output.want_py_text(i):
self.output.add_python_code(i.name, text_id=i)
inc_progress()
def _save_age(self):
self.report.progress_advance()
self.report.msg("\nWriting Age data...")
# If something bad happens in the final flush, it would be a shame to
# simply toss away the potentially freshly regenerated texture cache.
try:
self.locman.save()
self.mgr.save_age()
self.output.save()
finally:
self.image.save()
@property
def age_name(self):
if self._op.dat_only:
return Path(self._op.filepath).stem
else:
return bpy.context.scene.world.plasma_age.age_name
@property
def dat_only(self):
return self._op.dat_only
@property
def envmap_method(self):
return bpy.context.scene.world.plasma_age.envmap_method
@property
def python_method(self):
return bpy.context.scene.world.plasma_age.python_method
@property
def texcache_path(self):
age = bpy.context.scene.world.plasma_age
filepath = age.texcache_path
try:
valid_path = filepath and Path(filepath).is_file()
except OSError:
valid_path = False
if not valid_path:
filepath = bpy.context.blend_data.filepath
if not filepath:
filepath = self._op.filepath
filepath = str(Path(filepath).with_suffix(".ktc"))
age.texcache_path = filepath
return filepath
@property
def texcache_method(self):
return bpy.context.scene.world.plasma_age.texcache_method<|fim▁end|> | with utils.temporary_mesh_object(bo) as meshObj:
if bo.data.materials:
self.mesh.export_object(meshObj, so)
else: |
<|file_name|>ReadonlyableDefinition.ts<|end_file_name|><|fim▁begin|>export abstract class ReadonlyableDefinition {<|fim▁hole|>}<|fim▁end|> | isReadonly: boolean; |
<|file_name|>error.rs<|end_file_name|><|fim▁begin|>use std::c_str::CString;
use std::error;
use std::fmt;
use std::str;
use libc;
use libc::c_int;
use {raw, ErrorCode};
/// A structure to represent errors coming out of libgit2.
pub struct Error {
raw: raw::git_error,
}
impl Error {
/// Returns the last error, or `None` if one is not available.
pub fn last_error() -> Option<Error> {
::init();
let mut raw = raw::git_error {
message: 0 as *mut libc::c_char,
klass: 0,
};
if unsafe { raw::giterr_detach(&mut raw) } == 0 {
Some(Error { raw: raw })
} else {
None
}
}
/// Creates a new error from the given string as the error.
pub fn from_str(s: &str) -> Error {
::init();
Error {
raw: raw::git_error {
message: unsafe { s.to_c_str().into_inner() as *mut _ },
klass: raw::GIT_ERROR as libc::c_int,
}
}
}
/// Return the error code associated with this error.<|fim▁hole|> pub fn code(&self) -> ErrorCode {
match self.raw_code() {
raw::GIT_OK => super::ErrorCode::GenericError,
raw::GIT_ERROR => super::ErrorCode::GenericError,
raw::GIT_ENOTFOUND => super::ErrorCode::NotFound,
raw::GIT_EEXISTS => super::ErrorCode::Exists,
raw::GIT_EAMBIGUOUS => super::ErrorCode::Ambiguous,
raw::GIT_EBUFS => super::ErrorCode::BufSize,
raw::GIT_EUSER => super::ErrorCode::User,
raw::GIT_EBAREREPO => super::ErrorCode::BareRepo,
raw::GIT_EUNBORNBRANCH => super::ErrorCode::UnbornBranch,
raw::GIT_EUNMERGED => super::ErrorCode::Unmerged,
raw::GIT_ENONFASTFORWARD => super::ErrorCode::NotFastForward,
raw::GIT_EINVALIDSPEC => super::ErrorCode::InvalidSpec,
raw::GIT_EMERGECONFLICT => super::ErrorCode::MergeConflict,
raw::GIT_ELOCKED => super::ErrorCode::Locked,
raw::GIT_EMODIFIED => super::ErrorCode::Modified,
raw::GIT_PASSTHROUGH => super::ErrorCode::GenericError,
raw::GIT_ITEROVER => super::ErrorCode::GenericError,
}
}
/// Return the raw error code associated with this error.
pub fn raw_code(&self) -> raw::git_error_code {
macro_rules! check( ($($e:ident),*) => (
$(if self.raw.klass == raw::$e as c_int { raw::$e }) else *
else {
raw::GIT_ERROR
}
) );
check!(
GIT_OK,
GIT_ERROR,
GIT_ENOTFOUND,
GIT_EEXISTS,
GIT_EAMBIGUOUS,
GIT_EBUFS,
GIT_EUSER,
GIT_EBAREREPO,
GIT_EUNBORNBRANCH,
GIT_EUNMERGED,
GIT_ENONFASTFORWARD,
GIT_EINVALIDSPEC,
GIT_EMERGECONFLICT,
GIT_ELOCKED,
GIT_EMODIFIED,
GIT_PASSTHROUGH,
GIT_ITEROVER
)
}
/// Return the message associated with this error
pub fn message(&self) -> String {
let cstr = unsafe { CString::new(self.raw.message as *const _, false) };
String::from_utf8_lossy(cstr.as_bytes_no_nul()).to_string()
}
}
impl error::Error for Error {
fn description(&self) -> &str {
unsafe { str::from_c_str(self.raw.message as *const _) }
}
fn detail(&self) -> Option<String> { Some(self.message()) }
}
impl fmt::Show for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
try!(write!(f, "[{}] ", self.raw.klass));
let cstr = unsafe { CString::new(self.raw.message as *const _, false) };
f.write(cstr.as_bytes_no_nul())
}
}
impl Drop for Error {
fn drop(&mut self) {
unsafe { libc::free(self.raw.message as *mut libc::c_void) }
}
}
unsafe impl Send for Error {}<|fim▁end|> | |
<|file_name|>high-contrast-light.src.js<|end_file_name|><|fim▁begin|>/**
* @license Highcharts JS v9.1.0 (2021-05-04)
* @module highcharts/themes/high-contrast-light
* @requires highcharts
*<|fim▁hole|> *
* License: www.highcharts.com/license
*/
'use strict';
import '../../Extensions/Themes/HighContrastLight.js';<|fim▁end|> | * (c) 2009-2021 Highsoft AS |
<|file_name|>networking.go<|end_file_name|><|fim▁begin|>package api
type Networking struct {
AmazonVPC AmazonVPC `yaml:"amazonVPC,omitempty"`
SelfHosting SelfHosting `yaml:"selfHosting,omitempty"`
}
type SelfHosting struct {
Type string `yaml:"type"`
Typha bool `yaml:"typha"`<|fim▁hole|> CalicoNodeImage Image `yaml:"calicoNodeImage"`
CalicoCniImage Image `yaml:"calicoCniImage"`
FlannelImage Image `yaml:"flannelImage"`
FlannelCniImage Image `yaml:"flannelCniImage"`
TyphaImage Image `yaml:"typhaImage"`
}<|fim▁end|> | |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Enum wrappers to be able to select different channel implementations at runtime.
mod ipc;
mod mpsc;
use ::webgl::WebGLMsg;
use serde::{Deserialize, Serialize};
use servo_config::opts;
use std::fmt;
lazy_static! {
static ref IS_MULTIPROCESS: bool = {
opts::multiprocess()
};
}
#[derive(Clone, Deserialize, Serialize)]
pub enum WebGLSender<T: Serialize> {
Ipc(ipc::WebGLSender<T>),
Mpsc(mpsc::WebGLSender<T>),
}
impl<T: Serialize> fmt::Debug for WebGLSender<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "WebGLSender(..)")
}
}
impl<T: Serialize> WebGLSender<T> {
#[inline]
pub fn send(&self, msg: T) -> WebGLSendResult {
match *self {
WebGLSender::Ipc(ref sender) => {
sender.send(msg).map_err(|_| ())
},
WebGLSender::Mpsc(ref sender) => {
sender.send(msg).map_err(|_| ())
}
}
}
}
pub type WebGLSendResult = Result<(), ()>;
<|fim▁hole|>pub enum WebGLReceiver<T> where T: for<'de> Deserialize<'de> + Serialize {
Ipc(ipc::WebGLReceiver<T>),
Mpsc(mpsc::WebGLReceiver<T>),
}
impl<T> WebGLReceiver<T> where T: for<'de> Deserialize<'de> + Serialize {
pub fn recv(&self) -> Result<T, ()> {
match *self {
WebGLReceiver::Ipc(ref receiver) => {
receiver.recv().map_err(|_| ())
},
WebGLReceiver::Mpsc(ref receiver) => {
receiver.recv().map_err(|_| ())
}
}
}
}
pub fn webgl_channel<T>() -> Result<(WebGLSender<T>, WebGLReceiver<T>), ()>
where T: for<'de> Deserialize<'de> + Serialize {
if *IS_MULTIPROCESS {
ipc::webgl_channel().map(|(tx, rx)| (WebGLSender::Ipc(tx), WebGLReceiver::Ipc(rx)))
.map_err(|_| ())
} else {
mpsc::webgl_channel().map(|(tx, rx)| (WebGLSender::Mpsc(tx), WebGLReceiver::Mpsc(rx)))
}
}
#[derive(Clone, Deserialize, Serialize)]
pub struct WebGLChan(pub WebGLSender<WebGLMsg>);
impl WebGLChan {
#[inline]
pub fn send(&self, msg: WebGLMsg) -> WebGLSendResult {
self.0.send(msg)
}
}
#[derive(Clone, Deserialize, Serialize)]
pub struct WebGLPipeline(pub WebGLChan);
impl WebGLPipeline {
pub fn channel(&self) -> WebGLChan {
self.0.clone()
}
}<|fim▁end|> | |
<|file_name|>e127.py<|end_file_name|><|fim▁begin|>from __future__ import print_function, division
import matplotlib
matplotlib.use('Agg') # Must be before importing matplotlib.pyplot or pylab!
from neuralnilm import Net, RealApplianceSource, BLSTMLayer, SubsampleLayer, DimshuffleLayer
from lasagne.nonlinearities import sigmoid, rectify
from lasagne.objectives import crossentropy, mse
from lasagne.init import Uniform, Normal
from lasagne.layers import LSTMLayer, DenseLayer, Conv1DLayer, ReshapeLayer
from lasagne.updates import adagrad, nesterov_momentum
from functools import partial
import os
from neuralnilm.source import standardise
from neuralnilm.experiment import run_experiment
from neuralnilm.net import TrainingError
import __main__
NAME = os.path.splitext(os.path.split(__main__.__file__)[1])[0]
PATH = "/homes/dk3810/workspace/python/neuralnilm/figures"
SAVE_PLOT_INTERVAL = 250
GRADIENT_STEPS = 100
"""
e103
Discovered that bottom layer is hardly changing. So will try
just a single lstm layer
e104
standard init
lower learning rate
e106<|fim▁hole|>is e107 but with batch size of 5
e109
Normal(1) for LSTM
e110
* Back to Uniform(5) for LSTM
* Using nntools eb17bd923ef9ff2cacde2e92d7323b4e51bb5f1f
RESULTS: Seems to run fine again!
e111
* Try with nntools head
* peepholes=False
RESULTS: appears to be working well. Haven't seen a NaN,
even with training rate of 0.1
e112
* n_seq_per_batch = 50
e114
* Trying looking at layer by layer training again.
* Start with single LSTM layer
e115
* Learning rate = 1
e116
* Standard inits
e117
* Uniform(1) init
e119
* Learning rate 10
# Result: didn't work well!
e120
* init: Normal(1)
* not as good as Uniform(5)
e121
* Uniform(25)
e122
* Just 10 cells
* Uniform(5)
e125
* Pre-train lower layers
"""
def exp_a(name):
source = RealApplianceSource(
filename='/data/dk3810/ukdale.h5',
appliances=[
['fridge freezer', 'fridge', 'freezer'],
'hair straighteners',
'television'
# 'dish washer',
# ['washer dryer', 'washing machine']
],
max_appliance_powers=[300, 500, 200], #, 2500, 2400],
on_power_thresholds=[20, 20, 20], #, 20, 20],
max_input_power=1000,
min_on_durations=[60, 60, 60], #, 1800, 1800],
window=("2013-06-01", "2014-07-01"),
seq_length=1000,
output_one_appliance=False,
boolean_targets=False,
min_off_duration=60,
train_buildings=[1],
validation_buildings=[1],
skip_probability=0,
n_seq_per_batch=50
)
net = Net(
experiment_name=name,
source=source,
save_plot_interval=SAVE_PLOT_INTERVAL,
loss_function=crossentropy,
updates=partial(nesterov_momentum, learning_rate=1.0),
layers_config=[
{
'type': LSTMLayer,
'num_units': 50,
'W_in_to_cell': Uniform(25),
'gradient_steps': GRADIENT_STEPS,
'peepholes': False
},
{
'type': DenseLayer,
'num_units': source.n_outputs,
'nonlinearity': sigmoid
}
],
layer_changes={
501: {
'remove_from': -3,
'new_layers':
[
{
'type': LSTMLayer,
'num_units': 50,
'W_in_to_cell': Uniform(1),
'gradient_steps': GRADIENT_STEPS,
'peepholes': False
},
{
'type': DenseLayer,
'num_units': source.n_outputs,
'nonlinearity': sigmoid
}
]
}
}
)
return net
def init_experiment(experiment):
full_exp_name = NAME + experiment
func_call = 'exp_{:s}(full_exp_name)'.format(experiment)
print("***********************************")
print("Preparing", full_exp_name, "...")
net = eval(func_call)
return net
def main():
for experiment in list('a'):
full_exp_name = NAME + experiment
path = os.path.join(PATH, full_exp_name)
try:
net = init_experiment(experiment)
run_experiment(net, path, epochs=5000)
except KeyboardInterrupt:
break
except TrainingError as e:
print("EXCEPTION:", e)
if __name__ == "__main__":
main()<|fim▁end|> | lower learning rate to 0.001
e108 |
<|file_name|>TestConformColorsCssProcessor.java<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2010. All rights reserved.
*/
package ro.isdc.wro.model.resource.processor;
import static org.junit.Assert.assertEquals;
import java.io.File;
import java.net.URL;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import ro.isdc.wro.config.Context;
import ro.isdc.wro.model.resource.ResourceType;
import ro.isdc.wro.model.resource.processor.impl.css.ConformColorsCssProcessor;
import ro.isdc.wro.util.WroTestUtils;
/**
* TestConformColorsCssProcessor.
*
* @author Alex Objelean
* @created Created on Aug 15, 2010
*/
public class TestConformColorsCssProcessor {
private ResourcePreProcessor processor;
@BeforeClass
public static void onBeforeClass() {
assertEquals(0, Context.countActive());
}
@AfterClass
public static void onAfterClass() {
assertEquals(0, Context.countActive());
}
@Before
public void setUp() {
processor = new ConformColorsCssProcessor();
}
@Test
public void testFromFolder()
throws Exception {
final URL url = getClass().getResource("conformColors");
final File testFolder = new File(url.getFile(), "test");
final File expectedFolder = new File(url.getFile(), "expected");
WroTestUtils.compareFromDifferentFoldersByExtension(testFolder, expectedFolder, "css", processor);
}
@Test<|fim▁hole|> WroTestUtils.assertProcessorSupportResourceTypes(processor, ResourceType.CSS);
}
}<|fim▁end|> | public void shouldSupportCorrectResourceTypes() { |
<|file_name|>config.py<|end_file_name|><|fim▁begin|>import os
basedir = os.path.abspath(os.path.dirname(__file__))
class Config:
SECRET_KEY = os.environ.get('SECRET_KEY') or 'hard to guess string'
SQLALCHEMY_COMMIT_ON_TEARDOWN = True
MAIL_SERVER = 'smtp.googlemail.com'
MAIL_PORT = 587
MAIL_USE_TLS = True
MAIL_USERNAME = '[email protected]'<|fim▁hole|> FLASKY_ADMIN = '[email protected]'
@staticmethod
def init_app(app):
pass
class DevelopmentConfig(Config):
DEBUG = True
SQLALCHEMY_DATABASE_URI = 'postgres://uaaalnaflmsjnp:pLyQ5JRVbro0WCgXuMVorfqSjY@ec2-54-227-255-240.compute-1.amazonaws.com:5432/d8hosmtv1eijgp'
class TestingConfig(Config):
TESTING = True
SQLALCHEMY_DATABASE_URI = 'postgres://uaaalnaflmsjnp:pLyQ5JRVbro0WCgXuMVorfqSjY@ec2-54-227-255-240.compute-1.amazonaws.com:5432/d8hosmtv1eijgp'
class ProductionConfig(Config):
SQLALCHEMY_DATABASE_URI = 'postgres://uaaalnaflmsjnp:pLyQ5JRVbro0WCgXuMVorfqSjY@ec2-54-227-255-240.compute-1.amazonaws.com:5432/d8hosmtv1eijgp'
config = {
'development': DevelopmentConfig,
'testing': TestingConfig,
'production': ProductionConfig,
'default': DevelopmentConfig
}<|fim▁end|> | MAIL_PASSWORD = 'Airjeff3'
FLASKY_MAIL_SUBJECT_PREFIX = '[JeffPD]'
FLASKY_MAIL_SENDER = '[email protected]' |
<|file_name|>userassist.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""Windows UserAssist information collector."""
import codecs
import logging
from winregrc import data_format
from winregrc import errors
from winregrc import interface
class UserAssistEntry(object):
"""UserAssist entry.
Attributes:
guid (str): GUID.
name (str): name.
value_name (str): name of the Windows Registry value.
"""
def __init__(self, guid=None, name=None, value_name=None):
"""Initializes an UserAssist entry.
Args:
guid (Optional[str]): GUID.
name (Optional[str]): name.
value_name (Optional[str]): name of the Windows Registry value.
"""
super(UserAssistEntry, self).__init__()
self.guid = guid
self.name = name
self.value_name = value_name
class UserAssistDataParser(data_format.BinaryDataFormat):
"""UserAssist data parser."""
_DEFINITION_FILE = 'userassist.yaml'
# pylint: disable=missing-type-doc
def _DebugPrintEntry(self, format_version, user_assist_entry):
"""Prints UserAssist entry value debug information.
Args:
format_version (int): format version.
user_assist_entry (user_assist_entry_v3|user_assist_entry_v5):
UserAssist entry.
"""
value_string = '0x{0:08x}'.format(user_assist_entry.unknown1)
self._DebugPrintValue('Unknown1', value_string)
self._DebugPrintDecimalValue(
'Number of executions', user_assist_entry.number_of_executions)
if format_version == 5:
self._DebugPrintDecimalValue(
'Application focus count',
user_assist_entry.application_focus_count)
self._DebugPrintDecimalValue(
'Application focus duration',
user_assist_entry.application_focus_duration)
value_string = '{0:.2f}'.format(user_assist_entry.unknown2)
self._DebugPrintValue('Unknown2', value_string)
value_string = '{0:.2f}'.format(user_assist_entry.unknown3)
self._DebugPrintValue('Unknown3', value_string)
value_string = '{0:.2f}'.format(user_assist_entry.unknown4)
self._DebugPrintValue('Unknown4', value_string)
value_string = '{0:.2f}'.format(user_assist_entry.unknown5)
self._DebugPrintValue('Unknown5', value_string)
value_string = '{0:.2f}'.format(user_assist_entry.unknown6)
self._DebugPrintValue('Unknown6', value_string)
value_string = '{0:.2f}'.format(user_assist_entry.unknown7)
self._DebugPrintValue('Unknown7', value_string)
value_string = '{0:.2f}'.format(user_assist_entry.unknown8)
self._DebugPrintValue('Unknown8', value_string)
value_string = '{0:.2f}'.format(user_assist_entry.unknown9)
self._DebugPrintValue('Unknown9', value_string)
value_string = '{0:.2f}'.format(user_assist_entry.unknown10)
self._DebugPrintValue('Unknown10', value_string)
value_string = '{0:.2f}'.format(user_assist_entry.unknown11)
self._DebugPrintValue('Unknown11', value_string)
value_string = '0x{0:08x}'.format(user_assist_entry.unknown12)<|fim▁hole|>
self._DebugPrintFiletimeValue(
'Last execution time', user_assist_entry.last_execution_time)
if format_version == 5:
value_string = '0x{0:08x}'.format(user_assist_entry.unknown13)
self._DebugPrintValue('Unknown13', value_string)
self._DebugPrintText('\n')
# pylint: disable=missing-return-type-doc
def ParseEntry(self, format_version, entry_data):
"""Parses an UserAssist entry.
Args:
format_version (int): format version.
entry_data (bytes): entry data.
Returns:
user_assist_entry_v3|user_assist_entry_v5: UserAssist entry.
Raises:
ParseError: if the value data could not be parsed.
"""
if format_version == 3:
data_type_map = self._GetDataTypeMap('user_assist_entry_v3')
elif format_version == 5:
data_type_map = self._GetDataTypeMap('user_assist_entry_v5')
entry_data_size = data_type_map.GetByteSize()
if entry_data_size != len(entry_data):
raise errors.ParseError((
'Version: {0:d} size mismatch (calculated: {1:d}, '
'stored: {2:d}).').format(
format_version, entry_data_size, len(entry_data)))
try:
user_assist_entry = self._ReadStructureFromByteStream(
entry_data, 0, data_type_map, 'UserAssist entry')
except (ValueError, errors.ParseError) as exception:
raise errors.ParseError(
'Unable to parse UserAssist entry value with error: {0!s}'.format(
exception))
if self._debug:
self._DebugPrintEntry(format_version, user_assist_entry)
return user_assist_entry
class UserAssistCollector(interface.WindowsRegistryKeyCollector):
"""Windows UserAssist information collector.
Returns:
user_assist_entries (list[UserAssistEntry]): UserAssist entries.
"""
_USER_ASSIST_KEY = (
'HKEY_CURRENT_USER\\Software\\Microsoft\\Windows\\CurrentVersion\\'
'Explorer\\UserAssist')
def __init__(self, debug=False, output_writer=None):
"""Initializes a Windows UserAssist information collector.
Args:
debug (Optional[bool]): True if debug information should be printed.
output_writer (Optional[OutputWriter]): output writer.
"""
super(UserAssistCollector, self).__init__(debug=debug)
self._output_writer = output_writer
self._parser = UserAssistDataParser(
debug=debug, output_writer=output_writer)
self.user_assist_entries = []
def _CollectUserAssistFromKey(self, guid_subkey):
"""Collects the UserAssist information from a GUID sub key.
Args:
guid_subkey (dfwinreg.WinRegistryKey): UserAssist GUID Registry key.
"""
version_value = guid_subkey.GetValueByName('Version')
if not version_value:
logging.warning('Missing Version value in sub key: {0:s}'.format(
guid_subkey.name))
return
format_version = version_value.GetDataAsObject()
if self._debug:
self._output_writer.WriteValue('GUID', guid_subkey.name)
self._output_writer.WriteIntegerValueAsDecimal(
'Format version', format_version)
self._output_writer.WriteText('\n')
count_subkey = guid_subkey.GetSubkeyByName('Count')
for value in count_subkey.GetValues():
if self._debug:
self._output_writer.WriteValue('Original name', value.name)
try:
# Note that Python 2 codecs.decode() does not support keyword arguments
# such as encodings='rot-13'.
value_name = codecs.decode(value.name, 'rot-13')
except UnicodeEncodeError:
characters = []
for character in value.name:
if ord(character) < 128:
try:
character = codecs.decode(character, 'rot-13')
characters.append(character)
except UnicodeEncodeError:
characters.append(character)
else:
characters.append(character)
value_name = ''.join(characters)
if self._debug:
self._output_writer.WriteValue('Converted name', value_name)
self._output_writer.WriteDebugData('Value data:', value.data)
if value_name != 'UEME_CTLSESSION':
user_assist_entry = self._parser.ParseEntry(format_version, value.data)
user_assist_entry = UserAssistEntry(
guid=guid_subkey.name, name=value_name, value_name=value.name)
self.user_assist_entries.append(user_assist_entry)
def Collect(self, registry): # pylint: disable=arguments-differ
"""Collects the UserAssist information.
Args:
registry (dfwinreg.WinRegistry): Windows Registry.
Returns:
bool: True if the UserAssist key was found, False if not.
"""
user_assist_key = registry.GetKeyByPath(self._USER_ASSIST_KEY)
if not user_assist_key:
return False
for guid_subkey in user_assist_key.GetSubkeys():
self._CollectUserAssistFromKey(guid_subkey)
return True<|fim▁end|> | self._DebugPrintValue('Unknown12', value_string) |
<|file_name|>MINISTM32_STRIVE.py<|end_file_name|><|fim▁begin|>#!/bin/false
# This file is part of Espruino, a JavaScript interpreter for Microcontrollers
#
# Copyright (C) 2013 Gordon Williams <[email protected]>
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
#
# ----------------------------------------------------------------------------------------
# This file contains information for a specific board - the available pins, and where LEDs,
# Buttons, and other in-built peripherals are. It is used to build documentation as well
# as various source and header files for Espruino.
# ----------------------------------------------------------------------------------------
import pinutils;
info = {
'name' : "STRIVE Mini STM32 2.4 inch LCD Board (VET6)",
'variables' : 2800,
'serial_bootloader' : True,
'binary_name' : 'espruino_%v_strive_mini_stm32.bin',
};
chip = {
'part' : "STM32F103VE", #T6
'family' : "STM32F1",
'package' : "LQFP100",<|fim▁hole|> 'speed' : 72,
'usart' : 3,
'spi' : 2,
'i2c' : 2,
'adc' : 3,
'dac' : 2,
};
devices = {
'OSC' : { 'pin_1' : 'D0',
'pin_2' : 'D1' },
'OSC_RTC' : { 'pin_1' : 'C14',
'pin_2' : 'C15' },
'LED1' : { 'pin' : 'B5' },
'BTN1' : { 'pin' : 'B15',
'inverted' : True, # 1 when unpressed, 0 when pressed! (Espruino board is 1 when pressed)
'pinstate': 'IN_PULLUP', # to specify INPUT, OUPUT PULL_UP PULL_DOWN..
},
'USB' : { 'pin_disc' : 'C13',
'pin_dm' : 'A11',
'pin_dp' : 'A12' },
'SD' : { 'pin_cmd' : 'D2',
'pin_d0' : 'C8',
'pin_d1' : 'C9',
'pin_d2' : 'C10',
'pin_d3' : 'C11',
'pin_clk' : 'C12' },
'TOUCHSCREEN' : {
'pin_irq' : 'B6',
'pin_cs' : 'B7',
'pin_sck' : 'A5',
'pin_miso' : 'A6',
'pin_mosi' : 'A7'
},
'LCD' : {
'width' : 320, 'height' : 240, 'bpp' : 16, 'controller' : 'fsmc',
'pin_d0' : 'D14',
'pin_d1' : 'D15',
'pin_d2' : 'D0',
'pin_d3' : 'D1',
'pin_d4' : 'E7',
'pin_d5' : 'E8',
'pin_d6' : 'E9',
'pin_d7' : 'E10',
'pin_d8' : 'E11',
'pin_d9' : 'E12',
'pin_d10' : 'E13',
'pin_d11' : 'E14',
'pin_d12' : 'E15',
'pin_d13' : 'D8',
'pin_d14' : 'D9',
'pin_d15' : 'D10',
'pin_rd' : 'D4',
'pin_wr' : 'D5',
'pin_cs' : 'D7',
'pin_rs' : 'D11',
'pin_reset' : 'E1',
'pin_bl' : 'D13'
},
'JTAG' : {
'pin_MS' : 'A13',
'pin_CK' : 'A14',
'pin_DI' : 'A15'
}
};
# left-right, or top-bottom order
board = {
'top' : [ 'A5', 'B7', 'A7', 'A6', 'B6', '', '', '', '', 'D8', 'E15', 'GND', 'E14', 'E13', 'E12', 'D9', 'E11', 'GND', 'D13', 'D10' ],
'top2' : [ '3V3', 'GND', 'E1', 'D4', 'D5', 'D7', 'D11', 'E10', 'E9', 'D1', 'E8', 'E7', '3V3', 'D0', 'D15', 'D14', 'GND', 'GND', '5V', '5V' ],
'bottom2' : [ '3V3', 'E2', 'E4', 'E6', 'C1', 'C3', 'A1', 'A3', 'C5', 'E0', 'B1', 'C7', 'A0', 'B10', 'A8', 'C6', 'B13', 'B15', '5V', '5V' ],
'bottom' : [ 'GND', 'E3', 'E5', 'GND', 'B0', 'C2', 'A2', 'C4', 'C0', 'B9', 'B11', 'D12', 'D6', 'D3', 'B8', 'B14', 'B12', '3V3', 'GND', 'GND' ],
'left' : [ '3V3', 'B4', 'A15', 'A13', 'A14', '', 'B3', 'RESET', '', '' ],
};
board["left"].reverse()
board["top"].reverse()
board["top2"].reverse()
board["bottom"].reverse()
board["bottom2"].reverse()
board["_css"] = """
#board {
width: 960px;
height: 739px;
left: 100px;
top: 200px;
background-image: url(img/MINISTM32_STRIVE.jpg);
}
#boardcontainer {
height: 1250px;
}
#left {
top: 430px;
right: 800px;
}
#top {
top: 20px;
left: 230px;
}
#top2 {
top: 80px;
left: 230px;
}
#bottom {
top: 680px;
left: 230px;
}
#bottom2 {
top: 610px;
left: 230px;
}
""";
def get_pins():
pins = pinutils.scan_pin_file([], 'stm32f103xe.csv', 6, 10, 11)
return pinutils.only_from_package(pinutils.fill_gaps_in_pin_list(pins), chip["package"])<|fim▁end|> | 'ram' : 64,
'flash' : 512, |
<|file_name|>map1.py<|end_file_name|><|fim▁begin|>class TschunkMap1(): #(TschunkMap):
def __init__(self):
self.img = 'img/map1.png'
self.figure = 'todo'
self.rows = 15<|fim▁hole|> self.cols = 7
self.origin_x = 1
self.origin_y = 13
self.initial_direction = (0, -1)<|fim▁end|> | |
<|file_name|>ObjectListItem.ts<|end_file_name|><|fim▁begin|>/**
* @license
* Copyright Color-Coding Studio. All Rights Reserved.
*
* Use of this source code is governed by an Apache License, Version 2.0
* that can be found in the LICENSE file at http://www.apache.org/licenses/LICENSE-2.0
*/
namespace sap {
export namespace extension {
export namespace m {
/**
* 对象项目
*/
sap.m.ObjectListItem.extend("sap.extension.m.ObjectListItem", {
metadata: {
properties: {
},
events: {}
},
renderer: {
},
});
/**
* 数据对象项目
*/
ObjectListItem.extend("sap.extension.m.DataObjectListItem", {
metadata: {
properties: {
/** 数据信息 */
dataInfo: { type: "any" },
/** 用户字段模式 */
userFieldsMode: { type: "string" },
/** 属性过滤器 */
propertyFilter: { type: "function" },
},
events: {}
},
renderer: {
},
/**
* 获取数据信息
*/
getDataInfo(this: DataObjectListItem): { code: string, name?: string } | string | Function | shell.bo.IBizObjectInfo {
return this.getProperty("dataInfo");
},
/**
* 设置数据信息
* @param value 数据信息
*/
setDataInfo(this: DataObjectListItem, value: { code: string, name?: string } | string | Function | shell.bo.IBizObjectInfo): DataObjectListItem {
return this.setProperty("dataInfo", value);
},
/** 重构设置 */
applySettings(this: DataObjectListItem, mSettings: any, oScope?: any): DataObjectListItem {
if (ibas.objects.isNull(mSettings.userFieldsMode)) {
mSettings.userFieldsMode = "attribute";
}
ObjectListItem.prototype.applySettings.apply(this, arguments);
// 设置其他属性
let dataInfo: any = this.getDataInfo();
if (typeof dataInfo === "string") {
dataInfo = {
code: dataInfo,
};
} else if (typeof dataInfo === "function") {
dataInfo = {
code: dataInfo.BUSINESS_OBJECT_CODE,
name: ibas.objects.nameOf(dataInfo),
};
}
if (typeof dataInfo === "object"
&& (!ibas.strings.isEmpty(this.getUserFieldsMode()) && !ibas.strings.equalsIgnoreCase(this.getUserFieldsMode(), "none"))) {
if (dataInfo.properties instanceof Array) {
propertyControls.call(this, dataInfo);
} else {
let info: { code: string, name?: string } = dataInfo;
let boRepository: shell.bo.IBORepositoryShell = ibas.boFactory.create(shell.bo.BO_REPOSITORY_SHELL);
boRepository.fetchBizObjectInfo({
user: ibas.variablesManager.getValue(ibas.VARIABLE_NAME_USER_CODE),
boCode: ibas.config.applyVariables(info.code),
boName: info.name,
onCompleted: (opRslt) => {
if (opRslt.resultCode !== 0) {
ibas.logger.log(new Error(opRslt.message));
} else {
propertyControls.call(this, opRslt.resultObjects.firstOrDefault());
}
}
});
}
}
return this;
},
init(this: DataObjectListItem): void {
(<any>ObjectListItem.prototype).init.apply(this, arguments);
this.attachModelContextChange(undefined, function (event: sap.ui.base.Event): void {
let source: any = event.getSource();
if (source instanceof ObjectListItem) {
let content: any = source.getBindingContext();
if (content instanceof sap.ui.model.Context) {
let data: any = content.getObject();
if (!ibas.objects.isNull(data)) {
let userFields: ibas.IUserFields = data.userFields;
if (!ibas.objects.isNull(userFields)) {
for (let item of source.getAttributes()) {
let bindingInfo: any = managedobjects.bindingInfo(item, "bindingValue");
if (!ibas.objects.isNull(bindingInfo)) {
userfields.check(userFields, bindingInfo);
}
}
}
}
}
}
});
}
});
function propertyControls(this: DataObjectListItem, boInfo: shell.bo.IBizObjectInfo): void {
if (!boInfo || !(boInfo.properties instanceof Array)) {
return;
}
let properties: shell.bo.IBizPropertyInfo[] = Object.assign([], boInfo.properties);
for (let item of this.getAttributes()) {
let bindingPath: string = managedobjects.bindingPath(item);
let index: number = properties.findIndex(c => c && ibas.strings.equalsIgnoreCase(c.name, bindingPath));
if (index < 0) {
return;
}
let propertyInfo: shell.bo.IBizPropertyInfo = properties[index];
if (!ibas.objects.isNull(propertyInfo)) {
if (propertyInfo.authorised === ibas.emAuthoriseType.NONE) {
this.removeAttribute(item);
continue;
}
// 修正位置
if (propertyInfo.position > 0) {
let index: number = this.indexOfAttribute(item);
let position: number = propertyInfo.position - 1;
if (position < index) {
this.removeAttribute(item);
this.insertAttribute(item, position);
} else if (position > index) {<|fim▁hole|> this.removeAttribute(item);
this.insertAttribute(item, position - 1);
}
}
properties[index] = null;
}
}
if (this.getUserFieldsMode() === "attribute") {
for (let property of properties) {
if (ibas.objects.isNull(property)) {
continue;
}
if (ibas.objects.isNull(property.authorised)) {
continue;
}
if (property.authorised === ibas.emAuthoriseType.NONE) {
continue;
}
property = factories.newProperty(property, boInfo);
let element: any = factories.newComponent(property, "Object");
if (property.systemed === true && element instanceof sap.m.ObjectAttribute) {
element.setTitle(ibas.i18n.prop(ibas.strings.format("bo_{0}_{1}", boInfo.name, property.name).toLowerCase()));
}
let content: any = this.getBindingContext();
if (content instanceof sap.ui.model.Context) {
let data: any = content.getObject();
if (!ibas.objects.isNull(data)) {
let userFields: ibas.IUserFields = data.userFields;
if (!ibas.objects.isNull(userFields)) {
let bindingInfo: any = managedobjects.bindingInfo(element, "bindingValue");
if (!ibas.objects.isNull(bindingInfo)) {
userfields.check(userFields, bindingInfo);
}
}
}
}
if (property.position > 0) {
this.insertAttribute(element, property.position);
} else {
this.addAttribute(element);
}
}
}
}
}
}
}<|fim▁end|> | |
<|file_name|>StringDistanceExample.java<|end_file_name|><|fim▁begin|>/*-
* #%L
* Simmetrics - Examples
* %%
* Copyright (C) 2014 - 2021 Simmetrics Authors
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package com.github.mpkorstanje.simmetrics.example;
import static com.github.mpkorstanje.simmetrics.builders.StringDistanceBuilder.with;
import com.github.mpkorstanje.simmetrics.StringDistance;
import com.github.mpkorstanje.simmetrics.builders.StringDistanceBuilder;
import com.github.mpkorstanje.simmetrics.metrics.EuclideanDistance;
import com.github.mpkorstanje.simmetrics.metrics.StringDistances;
import com.github.mpkorstanje.simmetrics.tokenizers.Tokenizers;
/**
* The StringDistances utility class contains a predefined list of well
* known distance metrics for strings.
*/
final class StringDistanceExample {
/**
* Two strings can be compared using a predefined distance metric.
*/
static float example01() {
String str1 = "This is a sentence. It is made of words";
String str2 = "This sentence is similar. It has almost the same words";
StringDistance metric = StringDistances.levenshtein();
return metric.distance(str1, str2); // 30.0000
}
/**
* A tokenizer is included when the metric is a set or list metric. For the
* euclidean distance, it is a whitespace tokenizer.
*
* Note that most predefined metrics are setup with a whitespace tokenizer.
*/
static float example02() {
String str1 = "A quirky thing it is. This is a sentence.";
String str2 = "This sentence is similar. A quirky thing it is.";
StringDistance metric = StringDistances.euclideanDistance();
return metric.distance(str1, str2); // 2.0000
}
/**
* Using the string distance builder distance metrics can be customized.
* Instead of a whitespace tokenizer a q-gram tokenizer is used.
*
* For more examples see StringDistanceBuilderExample.
*/<|fim▁hole|>
StringDistance metric =
StringDistanceBuilder.with(new EuclideanDistance<>())
.tokenize(Tokenizers.qGram(3))
.build();
return metric.distance(str1, str2); // 4.8989
}
}<|fim▁end|> | static float example03() {
String str1 = "A quirky thing it is. This is a sentence.";
String str2 = "This sentence is similar. A quirky thing it is."; |
<|file_name|>MediaGallery.js<|end_file_name|><|fim▁begin|>import React, {Component, PropTypes} from 'react';
import {View, ListView, Image, CameraRoll, TouchableHighlight, StyleSheet} from 'react-native';
import MessageDao from '../../dao/MessageDao';
import LoadingSpinner from '../common/LoadingSpinner';
import Icon from 'react-native-vector-icons/MaterialIcons';
import MediaRenderer from './MediaRenderer';
import {InteractionManager} from 'react-native';
import { Dimensions } from 'react-native';
class MediaGallery extends Component {<|fim▁hole|> }
componentDidMount(){
let threadId = this.props.threadId;
InteractionManager.runAfterInteractions(() => {
requestAnimationFrame(() => {
this.openGalleryForThread(threadId);
});
});
}
reloadMedia(){
let threadId = this.props.threadId;
this.openGalleryForThread(threadId);
}
openGalleryForThread(threadId){
let mediaResult = MessageDao.getMediasForThread(threadId);
this.setState({
mediasForThread: mediaResult.mediasForThread,
isLoading: false
});
}
render() {
const {router} = this.props;
let imagesDS = new ListView.DataSource({
rowHasChanged: (r1, r2) => r1 !== r2});
//let lotOfImages = [];
//const images = this.state.mediasForThread;
//lotOfImages = lotOfImages.concat(images, images, images);
//imagesDS = imagesDS.cloneWithRows(lotOfImages);
imagesDS = imagesDS.cloneWithRows(this.state.mediasForThread);
if(this.state.isLoading){
return(
<View style={[styles.loadingContainer]}>
<LoadingSpinner size="large"/>
</View>
);
}
else{
return (
<View style={[styles.container]}>
<ListView contentContainerStyle={styles.imageGrid}
enableEmptySections={true}
dataSource={imagesDS}
renderRow={(media) => this.renderMedia(media)}
initialListSize={15}
scrollRenderAheadDistance={500}
pagingEnabled={true}
pageSize={1}
removeClippedSubviews={true} />
</View>
);
}
}
renderMedia(media){
return(
<TouchableHighlight onPress={() => this.openMediaViewer(media)}>
<View>
<MediaRenderer media={media}
router={this.props.router}
threadId={this.props.threadId}
mediaViewerEnabled={true}
mediaStyle={styles.image}/>
</View>
</TouchableHighlight>
);
}
openMediaViewer(media){
this.props.router.toMediaViewer({selectedMedia: media, threadId: this.props.threadId});
}
}
const styles = StyleSheet.create({
container: {
flex: 1,
backgroundColor: 'black',
borderRadius: 4,
borderWidth: 0.5,
borderColor: '#d6d7da',
paddingBottom: 10
},
loadingContainer:{
flex: 1,
justifyContent: 'center',
alignItems: 'center',
backgroundColor: 'black',
height: Dimensions.get('window').height,
width: Dimensions.get('window').width,
},
image: {
width: 100,
height: 100,
margin: 2,
justifyContent: 'center',
},
imageGrid: {
flexDirection: 'row',
flexWrap: 'wrap',
justifyContent: 'space-around',
marginBottom: 50
},
});
MediaRenderer.propTypes = {
router: PropTypes.object.isRequired,
threadId: PropTypes.number.isRequired,
};
export default MediaGallery;<|fim▁end|> |
constructor(props, context) {
super(props, context);
this.state = {mediasForThread: [], isLoading: true}; |
<|file_name|>_sizesrc.py<|end_file_name|><|fim▁begin|>import _plotly_utils.basevalidators
class SizesrcValidator(_plotly_utils.basevalidators.SrcValidator):
def __init__(
self, plotly_name="sizesrc", parent_name="surface.hoverlabel.font", **kwargs
):
super(SizesrcValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,<|fim▁hole|> **kwargs
)<|fim▁end|> | edit_type=kwargs.pop("edit_type", "none"), |
<|file_name|>task_html.py<|end_file_name|><|fim▁begin|>class TagException(Exception):
pass
class ArgumentException(Exception):
pass
class Tag(object):
__slots__ = ['_name', '_attributes', '_parent', '_previous_sibling', '_next_sibling', '_first_child',
'_last_child', '_children']
def __init__(self, name, attr=None):
self._name = name
if attr is None or not isinstance(attr, dict):
self._attributes = {}
else:
self._attributes = attr
self._parent = None
self._previous_sibling = None
self._next_sibling = None
self._first_child = None
self._last_child = None
self._children = list()
@property
def parent(self):
return self._parent
@parent.setter
def parent(self, value):
self._parent = value
@parent.deleter
def parent(self):
del self._parent
@property
def previous_sibling(self):
return self._previous_sibling
<|fim▁hole|> self._previous_sibling = value
@previous_sibling.deleter
def previous_sibling(self):
del self._previous_sibling
@property
def next_sibling(self):
return self._next_sibling
@next_sibling.setter
def next_sibling(self, value):
self._next_sibling = value
@next_sibling.deleter
def next_sibling(self):
del self._next_sibling
@property
def first_child(self):
raise TagException('Not a container tag!')
@property
def last_child(self):
raise TagException('Not a container tag!')
def __getattribute__(self, attr):
try:
return super().__getattribute__(attr)
except AttributeError:
return self._attributes.get(attr)
def __setattr__(self, name, val):
try:
super().__setattr__(name, val)
except AttributeError:
self._attributes[name] = val
def __delattr__(self, name):
try:
super().__delattr__(name)
except AttributeError:
if self._attributes.get(name):
self._attributes.pop(name)
def __str__(self):
result = '<' + self._name
for key, value in self._attributes.items():
result += ' ' + key + '="' + value + '"'
result += '>'
return result
class ContainerTag(Tag):
__slots__ = ['children']
def __init__(self, name, attr=None):
super().__init__(name, attr)
self.children = self.generator_of_children()
@property
def first_child(self):
return self._first_child
@property
def last_child(self):
return self._last_child
def generator_of_children(self):
for child in self._children:
yield child
def append_child(self, tag):
if not issubclass(type(tag), Tag):
raise TypeError("Argument isn't subclass of Tag.")
self._children.append(tag)
index_of_last_child = len(self._children) - 1
self._last_child = self._children[index_of_last_child]
self._last_child.parent = self
if len(self._children) == 1:
self._first_child = self.last_child
def insert_before(self, tag, next_sibling):
if not issubclass(type(tag), Tag):
raise TypeError("Argument isn't subclass of Tag.")
if next_sibling not in self._children:
self.append_child(tag)
return
index_inserted_elemnt = self._children.index(next_sibling)
self._children.insert(index_inserted_elemnt, tag)
index_of_last_child = len(self._children) - 1
self._last_child = self._children[index_of_last_child]
self._children[index_inserted_elemnt].parent = self
if index_inserted_elemnt == 0:
self._first_child = self._children[index_inserted_elemnt]
def __str__(self):
result = '<' + self._name
for key, value in self._attributes.items():
result += ' ' + key + '="' + value + '"'
result += '>'
for item in self.children:
result += str(item)
result += '</' + self._name + '>'
return result
if __name__ == '__main__':
img_1 = Tag('img')
img_1.src = '/python-developer.svg'
img_1.alt = 'Python Разработчик'
img_2 = Tag('img')
img_2.src = '/php-developer.svg'
img_2.alt = 'PHP Разработчик'
img_3 = Tag('img')
img_3.src = '/java-developer.svg'
img_3.alt = 'Java Разработчик'
div = ContainerTag('div')
div.append_child(img_1)
div.append_child(img_2)
div.insert_before(img_3, img_1)
print(div)<|fim▁end|> | @previous_sibling.setter
def previous_sibling(self, value): |
<|file_name|>CustomerPersistenceIntegrationTest.java<|end_file_name|><|fim▁begin|>/*
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*
* See also http://www.apache.org/licenses/LICENSE-2.0.html for an
* explanation of the license and how it is applied.
*/
package org.mifos.customers.persistence;
import static org.mifos.application.meeting.util.helpers.MeetingType.CUSTOMER_MEETING;
import static org.mifos.application.meeting.util.helpers.RecurrenceType.WEEKLY;
import static org.mifos.framework.util.helpers.TestObjectFactory.EVERY_WEEK;
import java.sql.Date;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.GregorianCalendar;
import java.util.List;
import junit.framework.Assert;
import org.mifos.accounts.business.AccountActionDateEntity;
import org.mifos.accounts.business.AccountBO;
import org.mifos.accounts.business.AccountFeesEntity;
import org.mifos.accounts.business.AccountStateEntity;
import org.mifos.accounts.business.AccountTestUtils;
import org.mifos.accounts.exceptions.AccountException;
import org.mifos.accounts.fees.business.AmountFeeBO;
import org.mifos.accounts.fees.business.FeeBO;
import org.mifos.accounts.fees.util.helpers.FeeCategory;
import org.mifos.accounts.loan.business.LoanBO;
import org.mifos.accounts.productdefinition.business.LoanOfferingBO;
import org.mifos.accounts.productdefinition.business.SavingsOfferingBO;
import org.mifos.accounts.productdefinition.util.helpers.RecommendedAmountUnit;
import org.mifos.accounts.savings.business.SavingsBO;
import org.mifos.accounts.util.helpers.AccountState;
import org.mifos.accounts.util.helpers.AccountStateFlag;
import org.mifos.accounts.util.helpers.AccountTypes;
import org.mifos.application.master.business.MifosCurrency;
import org.mifos.application.meeting.business.MeetingBO;
import org.mifos.application.meeting.exceptions.MeetingException;
import org.mifos.application.meeting.persistence.MeetingPersistence;
import org.mifos.application.meeting.util.helpers.RecurrenceType;
import org.mifos.application.servicefacade.CollectionSheetCustomerDto;
import org.mifos.application.util.helpers.YesNoFlag;
import org.mifos.config.AccountingRulesConstants;
import org.mifos.config.ConfigurationManager;
import org.mifos.core.CurrencyMismatchException;
import org.mifos.customers.business.CustomerAccountBO;
import org.mifos.customers.business.CustomerBO;
import org.mifos.customers.business.CustomerBOTestUtils;
import org.mifos.customers.business.CustomerNoteEntity;
import org.mifos.customers.business.CustomerPerformanceHistoryView;
import org.mifos.customers.business.CustomerSearch;
import org.mifos.customers.business.CustomerStatusEntity;
import org.mifos.customers.business.CustomerView;
import org.mifos.customers.center.business.CenterBO;
import org.mifos.customers.checklist.business.CheckListBO;
import org.mifos.customers.checklist.business.CustomerCheckListBO;
import org.mifos.customers.checklist.util.helpers.CheckListConstants;
import org.mifos.customers.client.business.AttendanceType;
import org.mifos.customers.client.business.ClientBO;
import org.mifos.customers.client.util.helpers.ClientConstants;
import org.mifos.customers.group.BasicGroupInfo;
import org.mifos.customers.group.business.GroupBO;
import org.mifos.customers.personnel.business.PersonnelBO;
import org.mifos.customers.personnel.util.helpers.PersonnelConstants;
import org.mifos.customers.util.helpers.ChildrenStateType;
import org.mifos.customers.util.helpers.CustomerLevel;
import org.mifos.customers.util.helpers.CustomerStatus;
import org.mifos.customers.util.helpers.CustomerStatusFlag;
import org.mifos.framework.MifosIntegrationTestCase;
import org.mifos.framework.TestUtils;
import org.mifos.framework.exceptions.ApplicationException;
import org.mifos.framework.exceptions.PersistenceException;
import org.mifos.framework.exceptions.SystemException;
import org.mifos.framework.hibernate.helper.QueryResult;
import org.mifos.framework.hibernate.helper.StaticHibernateUtil;
import org.mifos.framework.util.helpers.Money;
import org.mifos.framework.util.helpers.TestObjectFactory;
import org.mifos.security.util.UserContext;
public class CustomerPersistenceIntegrationTest extends MifosIntegrationTestCase {
public CustomerPersistenceIntegrationTest() throws Exception {
super();
}
private MeetingBO meeting;
private CustomerBO center;
private ClientBO client;
private CustomerBO group2;
private CustomerBO group;
private AccountBO account;
private LoanBO groupAccount;
private LoanBO clientAccount;
private SavingsBO centerSavingsAccount;
private SavingsBO groupSavingsAccount;
private SavingsBO clientSavingsAccount;
private SavingsOfferingBO savingsOffering;
private final CustomerPersistence customerPersistence = new CustomerPersistence();
@Override
protected void setUp() throws Exception {
super.setUp();
}
@Override
public void tearDown() throws Exception {
try {
TestObjectFactory.cleanUp(centerSavingsAccount);
TestObjectFactory.cleanUp(groupSavingsAccount);
TestObjectFactory.cleanUp(clientSavingsAccount);
TestObjectFactory.cleanUp(groupAccount);
TestObjectFactory.cleanUp(clientAccount);
TestObjectFactory.cleanUp(account);
TestObjectFactory.cleanUp(client);
TestObjectFactory.cleanUp(group2);
TestObjectFactory.cleanUp(group);
TestObjectFactory.cleanUp(center);
StaticHibernateUtil.closeSession();
} catch (Exception e) {
// Throwing from tearDown will tend to mask the real failure.
e.printStackTrace();
}
super.tearDown();
}
public void testGetTotalAmountForAllClientsOfGroupForSingleCurrency() throws Exception {
MeetingBO meeting = TestObjectFactory.createMeeting(TestObjectFactory.getTypicalMeeting());
center = createCenter("new_center");
group = TestObjectFactory.createWeeklyFeeGroupUnderCenter("Group", CustomerStatus.GROUP_ACTIVE, center);
client = TestObjectFactory.createClient("client1", CustomerStatus.CLIENT_ACTIVE, group);
AccountBO clientAccount1 = getLoanAccount(client, meeting, "fdbdhgsgh", "54hg", TestUtils.RUPEE);
AccountBO clientAccount2 = getLoanAccount(client, meeting, "fasdfdsfasdf", "1qwe", TestUtils.RUPEE);
Money amount = customerPersistence.getTotalAmountForAllClientsOfGroup(group.getOffice().getOfficeId(),
AccountState.LOAN_ACTIVE_IN_GOOD_STANDING, group.getSearchId() + ".%");
Assert.assertEquals(new Money(TestUtils.RUPEE, "600"), amount);
TestObjectFactory.cleanUp(clientAccount1);
TestObjectFactory.cleanUp(clientAccount2);
}
/*
* When trying to sum amounts across loans with different currencies, we should get an exception
*/
public void testGetTotalAmountForAllClientsOfGroupForMultipleCurrencies() throws Exception {
ConfigurationManager configMgr = ConfigurationManager.getInstance();
configMgr.setProperty(AccountingRulesConstants.ADDITIONAL_CURRENCY_CODES, TestUtils.EURO.getCurrencyCode());
AccountBO clientAccount1;
AccountBO clientAccount2;
try {
MeetingBO meeting = TestObjectFactory.createMeeting(TestObjectFactory.getTypicalMeeting());
center = createCenter("new_center");
group = TestObjectFactory.createWeeklyFeeGroupUnderCenter("Group", CustomerStatus.GROUP_ACTIVE, center);
client = TestObjectFactory.createClient("client1", CustomerStatus.CLIENT_ACTIVE, group);
clientAccount1 = getLoanAccount(client, meeting, "fdbdhgsgh", "54hg", TestUtils.RUPEE);
clientAccount2 = getLoanAccount(client, meeting, "fasdfdsfasdf", "1qwe", TestUtils.EURO);
try {
customerPersistence.getTotalAmountForAllClientsOfGroup(group.getOffice().getOfficeId(),
AccountState.LOAN_ACTIVE_IN_GOOD_STANDING, group.getSearchId() + ".%");
fail("didn't get the expected CurrencyMismatchException");
} catch (CurrencyMismatchException e) {
// if we got here then we got the exception we were expecting
assertNotNull(e);
} catch (Exception e) {
fail("didn't get the expected CurrencyMismatchException");
}
} finally {
configMgr.clearProperty(AccountingRulesConstants.ADDITIONAL_CURRENCY_CODES);
}
TestObjectFactory.cleanUp(clientAccount1);
TestObjectFactory.cleanUp(clientAccount2);
}
/*
* When trying to sum amounts across loans with different currencies, we should get an exception
*/
public void testGetTotalAmountForGroupForMultipleCurrencies() throws Exception {
ConfigurationManager configMgr = ConfigurationManager.getInstance();
configMgr.setProperty(AccountingRulesConstants.ADDITIONAL_CURRENCY_CODES, TestUtils.EURO.getCurrencyCode());
GroupBO group1;
AccountBO account1;
AccountBO account2;
try {
CustomerPersistence customerPersistence = new CustomerPersistence();
meeting = TestObjectFactory.createMeeting(TestObjectFactory.getNewMeetingForToday(WEEKLY, EVERY_WEEK,
CUSTOMER_MEETING));
center = TestObjectFactory.createWeeklyFeeCenter("Center", meeting);
group1 = TestObjectFactory.createWeeklyFeeGroupUnderCenter("Group1", CustomerStatus.GROUP_ACTIVE, center);
account1 = getLoanAccount(group1, meeting, "adsfdsfsd", "3saf", TestUtils.RUPEE);
account2 = getLoanAccount(group1, meeting, "adspp", "kkaf", TestUtils.EURO);
try {
customerPersistence.getTotalAmountForGroup(group1.getCustomerId(),
AccountState.LOAN_ACTIVE_IN_GOOD_STANDING);
fail("didn't get the expected CurrencyMismatchException");
} catch (CurrencyMismatchException e) {
// if we got here then we got the exception we were expecting
assertNotNull(e);
} catch (Exception e) {
fail("didn't get the expected CurrencyMismatchException");
}
} finally {
configMgr.clearProperty(AccountingRulesConstants.ADDITIONAL_CURRENCY_CODES);
}
TestObjectFactory.cleanUp(account1);
TestObjectFactory.cleanUp(account2);
TestObjectFactory.cleanUp(group1);
}
public void testGetTotalAmountForGroup() throws Exception {
CustomerPersistence customerPersistence = new CustomerPersistence();
meeting = TestObjectFactory.createMeeting(TestObjectFactory.getNewMeetingForToday(WEEKLY, EVERY_WEEK,
CUSTOMER_MEETING));
center = TestObjectFactory.createWeeklyFeeCenter("Center", meeting);
GroupBO group1 = TestObjectFactory.createWeeklyFeeGroupUnderCenter("Group1", CustomerStatus.GROUP_ACTIVE, center);
AccountBO account1 = getLoanAccount(group1, meeting, "adsfdsfsd", "3saf");
AccountBO account2 = getLoanAccount(group1, meeting, "adspp", "kkaf");
Money amount = customerPersistence.getTotalAmountForGroup(group1.getCustomerId(),
AccountState.LOAN_ACTIVE_IN_GOOD_STANDING);
Assert.assertEquals(new Money(getCurrency(), "600"), amount);
AccountBO account3 = getLoanAccountInActiveBadStanding(group1, meeting, "adsfdsfsd1", "4sa");
AccountBO account4 = getLoanAccountInActiveBadStanding(group1, meeting, "adspp2", "kaf5");
Money amount2 = customerPersistence.getTotalAmountForGroup(group1.getCustomerId(),
AccountState.LOAN_ACTIVE_IN_BAD_STANDING);
Assert.assertEquals(new Money(getCurrency(), "600"), amount2);
TestObjectFactory.cleanUp(account1);
TestObjectFactory.cleanUp(account2);
TestObjectFactory.cleanUp(account3);
TestObjectFactory.cleanUp(account4);
TestObjectFactory.cleanUp(group1);
}
public void testGetTotalAmountForAllClientsOfGroup() throws Exception {
MeetingBO meeting = TestObjectFactory.createMeeting(TestObjectFactory.getTypicalMeeting());
center = createCenter("new_center");
group = TestObjectFactory.createWeeklyFeeGroupUnderCenter("Group", CustomerStatus.GROUP_ACTIVE, center);
client = TestObjectFactory.createClient("client1", CustomerStatus.CLIENT_ACTIVE, group);
AccountBO clientAccount1 = getLoanAccount(client, meeting, "fdbdhgsgh", "54hg");
AccountBO clientAccount2 = getLoanAccount(client, meeting, "fasdfdsfasdf", "1qwe");
Money amount = customerPersistence.getTotalAmountForAllClientsOfGroup(group.getOffice().getOfficeId(),
AccountState.LOAN_ACTIVE_IN_GOOD_STANDING, group.getSearchId() + ".%");
Assert.assertEquals(new Money(getCurrency(), "600"), amount);
clientAccount1.changeStatus(AccountState.LOAN_ACTIVE_IN_BAD_STANDING.getValue(), null, "none");
clientAccount2.changeStatus(AccountState.LOAN_ACTIVE_IN_BAD_STANDING.getValue(), null, "none");
TestObjectFactory.updateObject(clientAccount1);
TestObjectFactory.updateObject(clientAccount2);
StaticHibernateUtil.commitTransaction();
Money amount2 = customerPersistence.getTotalAmountForAllClientsOfGroup(group.getOffice().getOfficeId(),
AccountState.LOAN_ACTIVE_IN_BAD_STANDING, group.getSearchId() + ".%");
Assert.assertEquals(new Money(getCurrency(), "600"), amount2);
TestObjectFactory.cleanUp(clientAccount1);
TestObjectFactory.cleanUp(clientAccount2);
}
public void testGetAllBasicGroupInfo() throws Exception {
CustomerPersistence customerPersistence = new CustomerPersistence();
center = createCenter("new_center");
group = TestObjectFactory.createWeeklyFeeGroupUnderCenter("Group", CustomerStatus.GROUP_ACTIVE, center);
GroupBO newGroup = TestObjectFactory.createWeeklyFeeGroupUnderCenter("newGroup", CustomerStatus.GROUP_HOLD, center);
GroupBO newGroup2 = TestObjectFactory.createWeeklyFeeGroupUnderCenter("newGroup2", CustomerStatus.GROUP_CANCELLED,
center);
GroupBO newGroup3 = TestObjectFactory.createWeeklyFeeGroupUnderCenter("newGroup3", CustomerStatus.GROUP_CLOSED, center);
GroupBO newGroup4 = TestObjectFactory.createWeeklyFeeGroupUnderCenter("newGroup4", CustomerStatus.GROUP_PARTIAL, center);
GroupBO newGroup5 = TestObjectFactory.createWeeklyFeeGroupUnderCenter("newGroup5", CustomerStatus.GROUP_PENDING, center);
List<BasicGroupInfo> groupInfos = customerPersistence.getAllBasicGroupInfo();
Assert.assertEquals(2, groupInfos.size());
Assert.assertEquals(group.getDisplayName(), groupInfos.get(0).getGroupName());
Assert.assertEquals(group.getSearchId(), groupInfos.get(0).getSearchId());
Assert.assertEquals(group.getOffice().getOfficeId(), groupInfos.get(0).getBranchId());
Assert.assertEquals(group.getCustomerId(), groupInfos.get(0).getGroupId());
Assert.assertEquals(newGroup.getDisplayName(), groupInfos.get(1).getGroupName());
Assert.assertEquals(newGroup.getSearchId(), groupInfos.get(1).getSearchId());
Assert.assertEquals(newGroup.getOffice().getOfficeId(), groupInfos.get(1).getBranchId());
Assert.assertEquals(newGroup.getCustomerId(), groupInfos.get(1).getGroupId());
TestObjectFactory.cleanUp(newGroup);
TestObjectFactory.cleanUp(newGroup2);
TestObjectFactory.cleanUp(newGroup3);
TestObjectFactory.cleanUp(newGroup4);
TestObjectFactory.cleanUp(newGroup5);
}
public void testCustomersUnderLO() throws Exception {
CustomerPersistence customerPersistence = new CustomerPersistence();
meeting = TestObjectFactory.createMeeting(TestObjectFactory.getTypicalMeeting());
center = TestObjectFactory.createWeeklyFeeCenter("Center_Active", meeting);
List<CustomerView> customers = customerPersistence.getActiveParentList(Short.valueOf("1"), CustomerLevel.CENTER
.getValue(), Short.valueOf("3"));
Assert.assertEquals(1, customers.size());
}
public void testActiveCustomersUnderParent() throws Exception {
CustomerPersistence customerPersistence = new CustomerPersistence();
createCustomers(CustomerStatus.GROUP_ACTIVE, CustomerStatus.CLIENT_ACTIVE);
List<CustomerView> customers = customerPersistence.getChildrenForParent(center.getCustomerId(), center
.getSearchId(), center.getOffice().getOfficeId());
Assert.assertEquals(2, customers.size());
}
public void testOnHoldCustomersUnderParent() throws Exception {
CustomerPersistence customerPersistence = new CustomerPersistence();
createCustomers(CustomerStatus.GROUP_HOLD, CustomerStatus.CLIENT_HOLD);
List<CustomerView> customers = customerPersistence.getChildrenForParent(center.getCustomerId(), center
.getSearchId(), center.getOffice().getOfficeId());
Assert.assertEquals(2, customers.size());
}
public void testGetLastMeetingDateForCustomer() throws Exception {
CustomerPersistence customerPersistence = new CustomerPersistence();
meeting = TestObjectFactory.createMeeting(TestObjectFactory.getNewMeetingForToday(WEEKLY, EVERY_WEEK,
CUSTOMER_MEETING));
center = TestObjectFactory.createWeeklyFeeCenter("Center", meeting);
group = TestObjectFactory.createWeeklyFeeGroupUnderCenter("Group", CustomerStatus.GROUP_ACTIVE, center);
account = getLoanAccount(group, meeting, "adsfdsfsd", "3saf");
// Date actionDate = new Date(2006,03,13);
Date meetingDate = customerPersistence.getLastMeetingDateForCustomer(center.getCustomerId());
Assert.assertEquals(new Date(getMeetingDates(meeting).getTime()).toString(), meetingDate.toString());
}
public void testGetChildernOtherThanClosed() throws Exception {
CustomerPersistence customerPersistence = new CustomerPersistence();
center = createCenter();
group = TestObjectFactory.createWeeklyFeeGroupUnderCenter("Group1", CustomerStatus.GROUP_ACTIVE, center);
client = TestObjectFactory.createClient("client1", CustomerStatus.CLIENT_ACTIVE, group);
ClientBO client2 = TestObjectFactory.createClient("client2", CustomerStatus.CLIENT_CLOSED, group);
ClientBO client3 = TestObjectFactory.createClient("client3", CustomerStatus.CLIENT_CANCELLED, group);
ClientBO client4 = TestObjectFactory.createClient("client4", CustomerStatus.CLIENT_PENDING, group);
List<CustomerBO> customerList = customerPersistence.getChildren(center.getSearchId(), center.getOffice()
.getOfficeId(), CustomerLevel.CLIENT, ChildrenStateType.OTHER_THAN_CLOSED);
Assert.assertEquals(new Integer("3").intValue(), customerList.size());
for (CustomerBO customer : customerList) {
if (customer.getCustomerId().intValue() == client3.getCustomerId().intValue()) {
Assert.assertTrue(true);
}
}
TestObjectFactory.cleanUp(client2);
TestObjectFactory.cleanUp(client3);
TestObjectFactory.cleanUp(client4);
}
public void testGetChildernActiveAndHold() throws Exception {
CustomerPersistence customerPersistence = new CustomerPersistence();
center = createCenter();
group = TestObjectFactory.createWeeklyFeeGroupUnderCenter("Group1", CustomerStatus.GROUP_ACTIVE, center);
client = TestObjectFactory.createClient("client1", CustomerStatus.CLIENT_ACTIVE, group);
ClientBO client2 = TestObjectFactory.createClient("client2", CustomerStatus.CLIENT_PARTIAL, group);
ClientBO client3 = TestObjectFactory.createClient("client3", CustomerStatus.CLIENT_PENDING, group);
ClientBO client4 = TestObjectFactory.createClient("client4", CustomerStatus.CLIENT_HOLD, group);
List<CustomerBO> customerList = customerPersistence.getChildren(center.getSearchId(), center.getOffice()
.getOfficeId(), CustomerLevel.CLIENT, ChildrenStateType.ACTIVE_AND_ONHOLD);
Assert.assertEquals(new Integer("2").intValue(), customerList.size());
for (CustomerBO customer : customerList) {
if (customer.getCustomerId().intValue() == client.getCustomerId().intValue()) {
Assert.assertTrue(true);
}
if (customer.getCustomerId().intValue() == client4.getCustomerId().intValue()) {
Assert.assertTrue(true);
}
}
TestObjectFactory.cleanUp(client2);
TestObjectFactory.cleanUp(client3);
TestObjectFactory.cleanUp(client4);
}
public void testGetChildernOtherThanClosedAndCancelled() throws Exception {
CustomerPersistence customerPersistence = new CustomerPersistence();
center = createCenter();
group = TestObjectFactory.createWeeklyFeeGroupUnderCenter("Group1", CustomerStatus.GROUP_ACTIVE, center);
client = TestObjectFactory.createClient("client1", CustomerStatus.CLIENT_ACTIVE, group);
ClientBO client2 = TestObjectFactory.createClient("client2", CustomerStatus.CLIENT_CLOSED, group);
ClientBO client3 = TestObjectFactory.createClient("client3", CustomerStatus.CLIENT_CANCELLED, group);
ClientBO client4 = TestObjectFactory.createClient("client4", CustomerStatus.CLIENT_PENDING, group);
List<CustomerBO> customerList = customerPersistence.getChildren(center.getSearchId(), center.getOffice()
.getOfficeId(), CustomerLevel.CLIENT, ChildrenStateType.OTHER_THAN_CANCELLED_AND_CLOSED);
Assert.assertEquals(new Integer("2").intValue(), customerList.size());
for (CustomerBO customer : customerList) {
if (customer.getCustomerId().equals(client4.getCustomerId())) {
Assert.assertTrue(true);
}
}
TestObjectFactory.cleanUp(client2);
TestObjectFactory.cleanUp(client3);
TestObjectFactory.cleanUp(client4);
}
public void testGetAllChildern() throws Exception {
CustomerPersistence customerPersistence = new CustomerPersistence();
center = createCenter();
group = TestObjectFactory.createWeeklyFeeGroupUnderCenter("Group1", CustomerStatus.GROUP_ACTIVE, center);
client = TestObjectFactory.createClient("client1", CustomerStatus.CLIENT_ACTIVE, group);
ClientBO client2 = TestObjectFactory.createClient("client2", CustomerStatus.CLIENT_CLOSED, group);
ClientBO client3 = TestObjectFactory.createClient("client3", CustomerStatus.CLIENT_CANCELLED, group);
ClientBO client4 = TestObjectFactory.createClient("client4", CustomerStatus.CLIENT_PENDING, group);
List<CustomerBO> customerList = customerPersistence.getChildren(center.getSearchId(), center.getOffice()
.getOfficeId(), CustomerLevel.CLIENT, ChildrenStateType.ALL);
Assert.assertEquals(new Integer("4").intValue(), customerList.size());
for (CustomerBO customer : customerList) {
if (customer.getCustomerId().equals(client2.getCustomerId())) {
Assert.assertTrue(true);
}
}
TestObjectFactory.cleanUp(client2);
TestObjectFactory.cleanUp(client3);
TestObjectFactory.cleanUp(client4);
}
public void testRetrieveSavingsAccountForCustomer() throws Exception {
java.util.Date currentDate = new java.util.Date();
CustomerPersistence customerPersistence = new CustomerPersistence();
center = createCenter();
group = TestObjectFactory.createWeeklyFeeGroupUnderCenter("Group1", CustomerStatus.GROUP_ACTIVE, center);
savingsOffering = TestObjectFactory.createSavingsProduct("SavingPrd1", "S", currentDate, RecommendedAmountUnit.COMPLETE_GROUP);
UserContext user = new UserContext();
user.setId(PersonnelConstants.SYSTEM_USER);
account = TestObjectFactory.createSavingsAccount("000100000000020", group, AccountState.SAVINGS_ACTIVE,
currentDate, savingsOffering, user);
StaticHibernateUtil.closeSession();
List<SavingsBO> savingsList = customerPersistence.retrieveSavingsAccountForCustomer(group.getCustomerId());
Assert.assertEquals(1, savingsList.size());
account = savingsList.get(0);
group = account.getCustomer();
center = group.getParentCustomer();
}
public void testNumberOfMeetingsAttended() throws Exception {
center = createCenter();
group = TestObjectFactory.createWeeklyFeeGroupUnderCenter("Group", CustomerStatus.GROUP_ACTIVE, center);
client = TestObjectFactory.createClient("Client", CustomerStatus.CLIENT_ACTIVE, group);
client.handleAttendance(new Date(System.currentTimeMillis()), AttendanceType.ABSENT);
client.handleAttendance(new Date(System.currentTimeMillis()), AttendanceType.PRESENT);
Calendar currentDate = new GregorianCalendar();
currentDate.roll(Calendar.DATE, 1);
client.handleAttendance(new Date(currentDate.getTimeInMillis()), AttendanceType.LATE);
StaticHibernateUtil.commitTransaction();
CustomerPerformanceHistoryView customerPerformanceHistoryView = customerPersistence.numberOfMeetings(true,
client.getCustomerId());
Assert.assertEquals(2, customerPerformanceHistoryView.getMeetingsAttended().intValue());
StaticHibernateUtil.closeSession();
}
public void testNumberOfMeetingsMissed() throws Exception {
center = createCenter();
group = TestObjectFactory.createWeeklyFeeGroupUnderCenter("Group", CustomerStatus.GROUP_ACTIVE, center);
client = TestObjectFactory.createClient("Client", CustomerStatus.CLIENT_ACTIVE, group);
client.handleAttendance(new Date(System.currentTimeMillis()), AttendanceType.PRESENT);
client.handleAttendance(new Date(System.currentTimeMillis()), AttendanceType.ABSENT);
Calendar currentDate = new GregorianCalendar();
currentDate.roll(Calendar.DATE, 1);
client.handleAttendance(new Date(currentDate.getTimeInMillis()), AttendanceType.APPROVED_LEAVE);
StaticHibernateUtil.commitTransaction();
CustomerPerformanceHistoryView customerPerformanceHistoryView = customerPersistence.numberOfMeetings(false,
client.getCustomerId());
Assert.assertEquals(2, customerPerformanceHistoryView.getMeetingsMissed().intValue());
StaticHibernateUtil.closeSession();
}
public void testLastLoanAmount() throws PersistenceException, AccountException {
Date startDate = new Date(System.currentTimeMillis());
center = createCenter();
group = TestObjectFactory.createWeeklyFeeGroupUnderCenter("Group", CustomerStatus.GROUP_ACTIVE, center);
client = TestObjectFactory.createClient("Client", CustomerStatus.CLIENT_ACTIVE, group);
LoanOfferingBO loanOffering = TestObjectFactory.createLoanOffering(startDate, center.getCustomerMeeting()
.getMeeting());
LoanBO loanBO = TestObjectFactory.createLoanAccount("42423142341", client,
AccountState.LOAN_ACTIVE_IN_GOOD_STANDING, startDate, loanOffering);
StaticHibernateUtil.commitTransaction();
StaticHibernateUtil.closeSession();
account = (AccountBO) StaticHibernateUtil.getSessionTL().get(LoanBO.class, loanBO.getAccountId());
AccountStateEntity accountStateEntity = new AccountStateEntity(AccountState.LOAN_CLOSED_OBLIGATIONS_MET);
account.setUserContext(TestObjectFactory.getContext());
account.changeStatus(accountStateEntity.getId(), null, "");
TestObjectFactory.updateObject(account);
CustomerPersistence customerPersistence = new CustomerPersistence();
CustomerPerformanceHistoryView customerPerformanceHistoryView = customerPersistence.getLastLoanAmount(client
.getCustomerId());
Assert.assertEquals("300.0", customerPerformanceHistoryView.getLastLoanAmount());
}
public void testFindBySystemId() throws Exception {
center = createCenter();
group = TestObjectFactory.createWeeklyFeeGroupUnderCenter("Group_Active_test", CustomerStatus.GROUP_ACTIVE, center);
GroupBO groupBO = (GroupBO) customerPersistence.findBySystemId(group.getGlobalCustNum());
Assert.assertEquals(groupBO.getDisplayName(), group.getDisplayName());
}
public void testGetBySystemId() throws Exception {
center = createCenter();
group = TestObjectFactory.createWeeklyFeeGroupUnderCenter("Group_Active_test", CustomerStatus.GROUP_ACTIVE, center);
GroupBO groupBO = (GroupBO) customerPersistence.findBySystemId(group.getGlobalCustNum(), group
.getCustomerLevel().getId());
Assert.assertEquals(groupBO.getDisplayName(), group.getDisplayName());
}
public void testOptionalCustomerStates() throws Exception {
Assert.assertEquals(Integer.valueOf(0).intValue(), customerPersistence.getCustomerStates(Short.valueOf("0"))
.size());
}
public void testCustomerStatesInUse() throws Exception {
Assert.assertEquals(Integer.valueOf(14).intValue(), customerPersistence.getCustomerStates(Short.valueOf("1"))
.size());
}
public void testGetCustomersWithUpdatedMeetings() throws Exception {
center = createCenter();
group = TestObjectFactory.createWeeklyFeeGroupUnderCenter("Group1", CustomerStatus.GROUP_ACTIVE, center);
CustomerBOTestUtils.setUpdatedFlag(group.getCustomerMeeting(), YesNoFlag.YES.getValue());
TestObjectFactory.updateObject(group);
List<Integer> customerIds = customerPersistence.getCustomersWithUpdatedMeetings();
Assert.assertEquals(1, customerIds.size());
}
public void testRetrieveAllLoanAccountUnderCustomer() throws PersistenceException {
MeetingBO meeting = TestObjectFactory.createMeeting(TestObjectFactory.getTypicalMeeting());
center = createCenter("center");
group = TestObjectFactory.createWeeklyFeeGroupUnderCenter("Group", CustomerStatus.GROUP_ACTIVE, center);
CenterBO center1 = createCenter("center1");
GroupBO group1 = TestObjectFactory.createWeeklyFeeGroupUnderCenter("Group1", CustomerStatus.GROUP_ACTIVE, center1);
client = TestObjectFactory.createClient("client1", CustomerStatus.CLIENT_ACTIVE, group);
ClientBO client2 = TestObjectFactory.createClient("client2", CustomerStatus.CLIENT_ACTIVE, group);
ClientBO client3 = TestObjectFactory.createClient("client3", CustomerStatus.CLIENT_ACTIVE, group1);
account = getLoanAccount(group, meeting, "cdfggdfs", "1qdd");
AccountBO account1 = getLoanAccount(client, meeting, "fdbdhgsgh", "54hg");
AccountBO account2 = getLoanAccount(client2, meeting, "fasdfdsfasdf", "1qwe");
AccountBO account3 = getLoanAccount(client3, meeting, "fdsgdfgfd", "543g");
AccountBO account4 = getLoanAccount(group1, meeting, "fasdf23", "3fds");
CustomerBOTestUtils.setCustomerStatus(client2, new CustomerStatusEntity(CustomerStatus.CLIENT_CLOSED));
TestObjectFactory.updateObject(client2);
client2 = TestObjectFactory.getClient(client2.getCustomerId());
CustomerBOTestUtils.setCustomerStatus(client3, new CustomerStatusEntity(CustomerStatus.CLIENT_CANCELLED));
TestObjectFactory.updateObject(client3);
client3 = TestObjectFactory.getClient(client3.getCustomerId());
List<AccountBO> loansForCenter = customerPersistence.retrieveAccountsUnderCustomer(center.getSearchId(), Short
.valueOf("3"), Short.valueOf("1"));
Assert.assertEquals(3, loansForCenter.size());
List<AccountBO> loansForGroup = customerPersistence.retrieveAccountsUnderCustomer(group.getSearchId(), Short
.valueOf("3"), Short.valueOf("1"));
Assert.assertEquals(3, loansForGroup.size());
List<AccountBO> loansForClient = customerPersistence.retrieveAccountsUnderCustomer(client.getSearchId(), Short
.valueOf("3"), Short.valueOf("1"));
Assert.assertEquals(1, loansForClient.size());
TestObjectFactory.cleanUp(account4);
TestObjectFactory.cleanUp(account3);
TestObjectFactory.cleanUp(account2);
TestObjectFactory.cleanUp(account1);
TestObjectFactory.cleanUp(client3);
TestObjectFactory.cleanUp(client2);
TestObjectFactory.cleanUp(group1);
TestObjectFactory.cleanUp(center1);
}
public void testRetrieveAllSavingsAccountUnderCustomer() throws Exception {
center = createCenter("new_center");
group = TestObjectFactory.createWeeklyFeeGroupUnderCenter("Group", CustomerStatus.GROUP_ACTIVE, center);
CenterBO center1 = createCenter("new_center1");
GroupBO group1 = TestObjectFactory.createWeeklyFeeGroupUnderCenter("Group1", CustomerStatus.GROUP_ACTIVE, center1);
client = TestObjectFactory.createClient("client1", CustomerStatus.CLIENT_ACTIVE, group);
ClientBO client2 = TestObjectFactory.createClient("client2", CustomerStatus.CLIENT_CLOSED, group);
ClientBO client3 = TestObjectFactory.createClient("client3", CustomerStatus.CLIENT_CANCELLED, group1);
account = getSavingsAccount(center, "Savings Prd1", "Abc1");
AccountBO account1 = getSavingsAccount(client, "Savings Prd2", "Abc2");
AccountBO account2 = getSavingsAccount(client2, "Savings Prd3", "Abc3");
AccountBO account3 = getSavingsAccount(client3, "Savings Prd4", "Abc4");
AccountBO account4 = getSavingsAccount(group1, "Savings Prd5", "Abc5");
AccountBO account5 = getSavingsAccount(group, "Savings Prd6", "Abc6");
AccountBO account6 = getSavingsAccount(center1, "Savings Prd7", "Abc7");
List<AccountBO> savingsForCenter = customerPersistence.retrieveAccountsUnderCustomer(center.getSearchId(),
Short.valueOf("3"), Short.valueOf("2"));
Assert.assertEquals(4, savingsForCenter.size());
List<AccountBO> savingsForGroup = customerPersistence.retrieveAccountsUnderCustomer(group.getSearchId(), Short
.valueOf("3"), Short.valueOf("2"));
Assert.assertEquals(3, savingsForGroup.size());
List<AccountBO> savingsForClient = customerPersistence.retrieveAccountsUnderCustomer(client.getSearchId(),
Short.valueOf("3"), Short.valueOf("2"));
Assert.assertEquals(1, savingsForClient.size());
TestObjectFactory.cleanUp(account3);
TestObjectFactory.cleanUp(account2);
TestObjectFactory.cleanUp(account1);
TestObjectFactory.cleanUp(client3);
TestObjectFactory.cleanUp(client2);
TestObjectFactory.cleanUp(account4);
TestObjectFactory.cleanUp(account5);
TestObjectFactory.cleanUp(group1);
TestObjectFactory.cleanUp(account6);
TestObjectFactory.cleanUp(center1);
}
public void testGetAllChildrenForParent() throws NumberFormatException, PersistenceException {
center = createCenter("Center");
group = TestObjectFactory.createWeeklyFeeGroupUnderCenter("Group", CustomerStatus.GROUP_ACTIVE, center);
CenterBO center1 = createCenter("center11");
GroupBO group1 = TestObjectFactory.createWeeklyFeeGroupUnderCenter("Group1", CustomerStatus.GROUP_ACTIVE, center1);
client = TestObjectFactory.createClient("client1", CustomerStatus.CLIENT_ACTIVE, group);
ClientBO client2 = TestObjectFactory.createClient("client2", CustomerStatus.CLIENT_CLOSED, group);
ClientBO client3 = TestObjectFactory.createClient("client3", CustomerStatus.CLIENT_CANCELLED, group1);
List<CustomerBO> customerList1 = customerPersistence.getAllChildrenForParent(center.getSearchId(), Short
.valueOf("3"), CustomerLevel.CENTER.getValue());
Assert.assertEquals(2, customerList1.size());
List<CustomerBO> customerList2 = customerPersistence.getAllChildrenForParent(center.getSearchId(), Short
.valueOf("3"), CustomerLevel.GROUP.getValue());
Assert.assertEquals(1, customerList2.size());
TestObjectFactory.cleanUp(client3);
TestObjectFactory.cleanUp(client2);
TestObjectFactory.cleanUp(group1);
TestObjectFactory.cleanUp(center1);
}
public void testGetChildrenForParent() throws NumberFormatException, SystemException, ApplicationException {
center = createCenter("center");
group = TestObjectFactory.createWeeklyFeeGroupUnderCenter("Group", CustomerStatus.GROUP_ACTIVE, center);
CenterBO center1 = createCenter("center1");
GroupBO group1 = TestObjectFactory.createWeeklyFeeGroupUnderCenter("Group1", CustomerStatus.GROUP_ACTIVE, center1);
client = TestObjectFactory.createClient("client1", CustomerStatus.CLIENT_ACTIVE, group);
ClientBO client2 = TestObjectFactory.createClient("client2", CustomerStatus.CLIENT_CLOSED, group);
ClientBO client3 = TestObjectFactory.createClient("client3", CustomerStatus.CLIENT_CANCELLED, group1);
List<Integer> customerIds = customerPersistence.getChildrenForParent(center.getSearchId(), Short.valueOf("3"));
Assert.assertEquals(3, customerIds.size());
CustomerBO customer = TestObjectFactory.getCustomer(customerIds.get(0));
Assert.assertEquals("Group", customer.getDisplayName());
customer = TestObjectFactory.getCustomer(customerIds.get(1));
Assert.assertEquals("client1", customer.getDisplayName());
customer = TestObjectFactory.getCustomer(customerIds.get(2));
Assert.assertEquals("client2", customer.getDisplayName());
TestObjectFactory.cleanUp(client3);
TestObjectFactory.cleanUp(client2);
TestObjectFactory.cleanUp(group1);
TestObjectFactory.cleanUp(center1);
}
public void testGetCustomers() throws NumberFormatException, SystemException, ApplicationException {
center = createCenter("center");
group = TestObjectFactory.createWeeklyFeeGroupUnderCenter("Group", CustomerStatus.GROUP_ACTIVE, center);
CenterBO center1 = createCenter("center11");
GroupBO group1 = TestObjectFactory.createWeeklyFeeGroupUnderCenter("Group1", CustomerStatus.GROUP_ACTIVE, center1);
client = TestObjectFactory.createClient("client1", CustomerStatus.CLIENT_ACTIVE, group);
ClientBO client2 = TestObjectFactory.createClient("client2", CustomerStatus.CLIENT_CLOSED, group);
ClientBO client3 = TestObjectFactory.createClient("client3", CustomerStatus.CLIENT_CANCELLED, group1);
List<Integer> customerIds = customerPersistence.getCustomers(CustomerLevel.CENTER.getValue());
Assert.assertEquals(2, customerIds.size());
TestObjectFactory.cleanUp(client3);
TestObjectFactory.cleanUp(client2);
TestObjectFactory.cleanUp(group1);
TestObjectFactory.cleanUp(center1);
}
public void testGetCustomerChecklist() throws NumberFormatException, SystemException, ApplicationException,
Exception {
center = createCenter("center");
group = TestObjectFactory.createWeeklyFeeGroupUnderCenter("Group", CustomerStatus.GROUP_ACTIVE, center);
client = TestObjectFactory.createClient("client1", CustomerStatus.CLIENT_ACTIVE, group);
CustomerCheckListBO checklistCenter = TestObjectFactory.createCustomerChecklist(center.getCustomerLevel()
.getId(), center.getCustomerStatus().getId(), CheckListConstants.STATUS_ACTIVE);
CustomerCheckListBO checklistClient = TestObjectFactory.createCustomerChecklist(client.getCustomerLevel()
.getId(), client.getCustomerStatus().getId(), CheckListConstants.STATUS_INACTIVE);
CustomerCheckListBO checklistGroup = TestObjectFactory.createCustomerChecklist(
group.getCustomerLevel().getId(), group.getCustomerStatus().getId(), CheckListConstants.STATUS_ACTIVE);
StaticHibernateUtil.closeSession();
Assert.assertEquals(1, customerPersistence.getStatusChecklist(center.getCustomerStatus().getId(),
center.getCustomerLevel().getId()).size());
client = (ClientBO) StaticHibernateUtil.getSessionTL().get(ClientBO.class,
Integer.valueOf(client.getCustomerId()));
group = (GroupBO) StaticHibernateUtil.getSessionTL().get(GroupBO.class, Integer.valueOf(group.getCustomerId()));
center = (CenterBO) StaticHibernateUtil.getSessionTL().get(CenterBO.class,
Integer.valueOf(center.getCustomerId()));
checklistCenter = (CustomerCheckListBO) StaticHibernateUtil.getSessionTL().get(CheckListBO.class,
new Short(checklistCenter.getChecklistId()));
checklistClient = (CustomerCheckListBO) StaticHibernateUtil.getSessionTL().get(CheckListBO.class,
new Short(checklistClient.getChecklistId()));
checklistGroup = (CustomerCheckListBO) StaticHibernateUtil.getSessionTL().get(CheckListBO.class,
new Short(checklistGroup.getChecklistId()));
TestObjectFactory.cleanUp(checklistCenter);
TestObjectFactory.cleanUp(checklistClient);
TestObjectFactory.cleanUp(checklistGroup);
}
public void testRetrieveAllCustomerStatusList() throws NumberFormatException, SystemException, ApplicationException {
center = createCenter();
Assert.assertEquals(2, customerPersistence.retrieveAllCustomerStatusList(center.getCustomerLevel().getId())
.size());
}
public void testCustomerCountByOffice() throws Exception {
int count = customerPersistence.getCustomerCountForOffice(CustomerLevel.CENTER, Short.valueOf("3"));
Assert.assertEquals(0, count);
center = createCenter();
count = customerPersistence.getCustomerCountForOffice(CustomerLevel.CENTER, Short.valueOf("3"));
Assert.assertEquals(1, count);
}
public void testGetAllCustomerNotes() throws Exception {
center = createCenter();
center.addCustomerNotes(TestObjectFactory.getCustomerNote("Test Note", center));
TestObjectFactory.updateObject(center);
Assert.assertEquals(1, customerPersistence.getAllCustomerNotes(center.getCustomerId()).getSize());
for (CustomerNoteEntity note : center.getCustomerNotes()) {
Assert.assertEquals("Test Note", note.getComment());
Assert.assertEquals(center.getPersonnel().getPersonnelId(), note.getPersonnel().getPersonnelId());
}
center = (CenterBO) StaticHibernateUtil.getSessionTL().get(CenterBO.class,
Integer.valueOf(center.getCustomerId()));
}
public void testGetAllCustomerNotesWithZeroNotes() throws Exception {
center = createCenter();
Assert.assertEquals(0, customerPersistence.getAllCustomerNotes(center.getCustomerId()).getSize());
Assert.assertEquals(0, center.getCustomerNotes().size());
}
public void testGetFormedByPersonnel() throws NumberFormatException, SystemException, ApplicationException {
center = createCenter();
Assert.assertEquals(1, customerPersistence.getFormedByPersonnel(ClientConstants.LOAN_OFFICER_LEVEL,
center.getOffice().getOfficeId()).size());
}
public void testGetAllClosedAccounts() throws Exception {
getCustomer();
groupAccount.changeStatus(AccountState.LOAN_CANCELLED.getValue(), AccountStateFlag.LOAN_WITHDRAW.getValue(),
"WITHDRAW LOAN ACCOUNT");
clientAccount.changeStatus(AccountState.LOAN_CLOSED_WRITTEN_OFF.getValue(), null, "WITHDRAW LOAN ACCOUNT");
clientSavingsAccount.changeStatus(AccountState.SAVINGS_CANCELLED.getValue(), AccountStateFlag.SAVINGS_REJECTED
.getValue(), "WITHDRAW LOAN ACCOUNT");
TestObjectFactory.updateObject(groupAccount);
TestObjectFactory.updateObject(clientAccount);
TestObjectFactory.updateObject(clientSavingsAccount);
StaticHibernateUtil.commitTransaction();
Assert.assertEquals(1, customerPersistence.getAllClosedAccount(client.getCustomerId(),
AccountTypes.LOAN_ACCOUNT.getValue()).size());
Assert.assertEquals(1, customerPersistence.getAllClosedAccount(group.getCustomerId(),
AccountTypes.LOAN_ACCOUNT.getValue()).size());
Assert.assertEquals(1, customerPersistence.getAllClosedAccount(client.getCustomerId(),
AccountTypes.SAVINGS_ACCOUNT.getValue()).size());
}
public void testGetAllClosedAccountsWhenNoAccountsClosed() throws Exception {
getCustomer();
Assert.assertEquals(0, customerPersistence.getAllClosedAccount(client.getCustomerId(),
AccountTypes.LOAN_ACCOUNT.getValue()).size());
Assert.assertEquals(0, customerPersistence.getAllClosedAccount(group.getCustomerId(),
AccountTypes.LOAN_ACCOUNT.getValue()).size());
Assert.assertEquals(0, customerPersistence.getAllClosedAccount(client.getCustomerId(),
AccountTypes.SAVINGS_ACCOUNT.getValue()).size());
}
public void testGetLOForCustomer() throws PersistenceException {
createCustomers(CustomerStatus.GROUP_ACTIVE, CustomerStatus.CLIENT_ACTIVE);
Short LO = customerPersistence.getLoanOfficerForCustomer(center.getCustomerId());
Assert.assertEquals(center.getPersonnel().getPersonnelId(), LO);
}
public void testUpdateLOsForAllChildren() {
createCustomers(CustomerStatus.GROUP_ACTIVE, CustomerStatus.CLIENT_ACTIVE);
Assert.assertEquals(center.getPersonnel().getPersonnelId(), group.getPersonnel().getPersonnelId());
Assert.assertEquals(center.getPersonnel().getPersonnelId(), client.getPersonnel().getPersonnelId());
StaticHibernateUtil.startTransaction();
PersonnelBO newLO = TestObjectFactory.getPersonnel(Short.valueOf("2"));
new CustomerPersistence().updateLOsForAllChildren(newLO.getPersonnelId(), center.getSearchId(), center
.getOffice().getOfficeId());
StaticHibernateUtil.commitTransaction();
StaticHibernateUtil.closeSession();
center = TestObjectFactory.getCenter(center.getCustomerId());
group = TestObjectFactory.getGroup(group.getCustomerId());
client = TestObjectFactory.getClient(client.getCustomerId());
Assert.assertEquals(newLO.getPersonnelId(), group.getPersonnel().getPersonnelId());
Assert.assertEquals(newLO.getPersonnelId(), client.getPersonnel().getPersonnelId());
}
public void testUpdateLOsForAllChildrenAccounts() throws Exception {
createCustomers(CustomerStatus.GROUP_ACTIVE, CustomerStatus.CLIENT_ACTIVE);
Assert.assertEquals(center.getPersonnel().getPersonnelId(), group.getPersonnel().getPersonnelId());
Assert.assertEquals(center.getPersonnel().getPersonnelId(), client.getPersonnel().getPersonnelId());
StaticHibernateUtil.startTransaction();
PersonnelBO newLO = TestObjectFactory.getPersonnel(Short.valueOf("2"));
new CustomerPersistence().updateLOsForAllChildrenAccounts(newLO.getPersonnelId(), center.getSearchId(), center
.getOffice().getOfficeId());
StaticHibernateUtil.commitTransaction();
StaticHibernateUtil.closeSession();
client = TestObjectFactory.getClient(client.getCustomerId());
for (AccountBO account : client.getAccounts()) {
Assert.assertEquals(newLO.getPersonnelId(), account.getPersonnel().getPersonnelId());
}
}
public void testCustomerDeleteMeeting() throws Exception {
MeetingBO meeting = TestObjectFactory.createMeeting(TestObjectFactory.getTypicalMeeting());
client = TestObjectFactory.createClient("myClient", meeting, CustomerStatus.CLIENT_PENDING);
StaticHibernateUtil.closeSession();
client = TestObjectFactory.getClient(client.getCustomerId());
customerPersistence.deleteCustomerMeeting(client);
CustomerBOTestUtils.setCustomerMeeting(client, null);
StaticHibernateUtil.commitTransaction();
StaticHibernateUtil.closeSession();
client = TestObjectFactory.getClient(client.getCustomerId());
Assert.assertNull(client.getCustomerMeeting());
}
public void testDeleteMeeting() throws Exception {
MeetingBO meeting = TestObjectFactory.createMeeting(TestObjectFactory.getTypicalMeeting());
StaticHibernateUtil.closeSession();
meeting = new MeetingPersistence().getMeeting(meeting.getMeetingId());
customerPersistence.deleteMeeting(meeting);
StaticHibernateUtil.commitTransaction();
StaticHibernateUtil.closeSession();
meeting = new MeetingPersistence().getMeeting(meeting.getMeetingId());
Assert.assertNull(meeting);
}
public void testSearchWithOfficeId() throws Exception {
createCustomers(CustomerStatus.GROUP_ACTIVE, CustomerStatus.CLIENT_ACTIVE);
StaticHibernateUtil.commitTransaction();
QueryResult queryResult = new CustomerPersistence().search("C", Short.valueOf("3"), Short.valueOf("1"), Short
.valueOf("1"));
Assert.assertNotNull(queryResult);
Assert.assertEquals(2, queryResult.getSize());
Assert.assertEquals(2, queryResult.get(0, 10).size());
}
public void testSearchWithoutOfficeId() throws Exception {
createCustomers(CustomerStatus.GROUP_ACTIVE, CustomerStatus.CLIENT_ACTIVE);
StaticHibernateUtil.commitTransaction();
QueryResult queryResult = new CustomerPersistence().search("C", Short.valueOf("0"), Short.valueOf("1"), Short
.valueOf("1"));
Assert.assertNotNull(queryResult);
Assert.assertEquals(2, queryResult.getSize());
Assert.assertEquals(2, queryResult.get(0, 10).size());
}
public void testSearchWithGlobalNo() throws Exception {
createCustomers(CustomerStatus.GROUP_ACTIVE, CustomerStatus.CLIENT_ACTIVE);
StaticHibernateUtil.commitTransaction();
QueryResult queryResult = new CustomerPersistence().search(group.getGlobalCustNum(), Short.valueOf("3"), Short
.valueOf("1"), Short.valueOf("1"));
Assert.assertNotNull(queryResult);
Assert.assertEquals(1, queryResult.getSize());
Assert.assertEquals(1, queryResult.get(0, 10).size());
}
public void testSearchWithGovernmentId() throws Exception {
createCustomersWithGovernmentId(CustomerStatus.GROUP_ACTIVE, CustomerStatus.CLIENT_ACTIVE);
StaticHibernateUtil.commitTransaction();
QueryResult queryResult = new CustomerPersistence().search("76346793216", Short.valueOf("3"), Short
.valueOf("1"), Short.valueOf("1"));
Assert.assertNotNull(queryResult);
Assert.assertEquals(1, queryResult.getSize());
Assert.assertEquals(1, queryResult.get(0, 10).size());
}
@SuppressWarnings("unchecked")
public void testSearchWithCancelLoanAccounts() throws Exception {
groupAccount = getLoanAccount();
groupAccount.changeStatus(AccountState.LOAN_CANCELLED.getValue(), AccountStateFlag.LOAN_WITHDRAW.getValue(),
"WITHDRAW LOAN ACCOUNT");
TestObjectFactory.updateObject(groupAccount);
StaticHibernateUtil.commitTransaction();
StaticHibernateUtil.closeSession();
groupAccount = TestObjectFactory.getObject(LoanBO.class, groupAccount.getAccountId());
center = TestObjectFactory.getCustomer(center.getCustomerId());
group = TestObjectFactory.getCustomer(group.getCustomerId());
QueryResult queryResult = new CustomerPersistence().search(group.getGlobalCustNum(), Short.valueOf("3"), Short
.valueOf("1"), Short.valueOf("1"));
Assert.assertNotNull(queryResult);
Assert.assertEquals(1, queryResult.getSize());
List results = queryResult.get(0, 10);
Assert.assertEquals(1, results.size());
CustomerSearch customerSearch = (CustomerSearch) results.get(0);
Assert.assertEquals(0, customerSearch.getLoanGlobalAccountNum().size());
}
public void testSearchWithAccountGlobalNo() throws Exception {
getCustomer();
StaticHibernateUtil.commitTransaction();
QueryResult queryResult = new CustomerPersistence().search(groupAccount.getGlobalAccountNum(), Short
.valueOf("3"), Short.valueOf("1"), Short.valueOf("1"));
Assert.assertNotNull(queryResult);
Assert.assertEquals(1, queryResult.getSize());
Assert.assertEquals(1, queryResult.get(0, 10).size());
}
public void testSearchGropAndClient() throws Exception {
createCustomers(CustomerStatus.GROUP_ACTIVE, CustomerStatus.CLIENT_ACTIVE);
StaticHibernateUtil.commitTransaction();
QueryResult queryResult = new CustomerPersistence().searchGroupClient("C", Short.valueOf("1"));
Assert.assertNotNull(queryResult);
Assert.assertEquals(1, queryResult.getSize());
Assert.assertEquals(1, queryResult.get(0, 10).size());<|fim▁hole|> }
public void testSearchGropAndClientForLoNoResults() throws Exception {
meeting = TestObjectFactory.createMeeting(TestObjectFactory.getTypicalMeeting());
center = TestObjectFactory.createWeeklyFeeCenter("Center", meeting, Short.valueOf("3"), Short.valueOf("3"));
group = TestObjectFactory.createGroupUnderCenter("Group", CustomerStatus.GROUP_ACTIVE, "1234", true,
new java.util.Date(), null, null, null, Short.valueOf("3"), center);
StaticHibernateUtil.commitTransaction();
QueryResult queryResult = new CustomerPersistence().searchGroupClient("C", Short.valueOf("3"));
Assert.assertNotNull(queryResult);
Assert.assertEquals(0, queryResult.getSize());
Assert.assertEquals(0, queryResult.get(0, 10).size());
}
public void testSearchGropAndClientForLo() throws Exception {
meeting = TestObjectFactory.createMeeting(TestObjectFactory.getTypicalMeeting());
center = TestObjectFactory.createWeeklyFeeCenter("Center", meeting, Short.valueOf("3"), Short.valueOf("3"));
group = TestObjectFactory.createGroupUnderCenter("Group", CustomerStatus.GROUP_ACTIVE, "1234", true,
new java.util.Date(), null, null, null, Short.valueOf("3"), center);
StaticHibernateUtil.commitTransaction();
QueryResult queryResult = new CustomerPersistence().searchGroupClient("G", Short.valueOf("3"));
Assert.assertNotNull(queryResult);
Assert.assertEquals(1, queryResult.getSize());
Assert.assertEquals(1, queryResult.get(0, 10).size());
}
public void testSearchCustForSavings() throws Exception {
createCustomers(CustomerStatus.GROUP_ACTIVE, CustomerStatus.CLIENT_ACTIVE);
StaticHibernateUtil.commitTransaction();
QueryResult queryResult = new CustomerPersistence().searchCustForSavings("C", Short.valueOf("1"));
Assert.assertNotNull(queryResult);
Assert.assertEquals(2, queryResult.getSize());
Assert.assertEquals(2, queryResult.get(0, 10).size());
}
public void testGetCustomerAccountsForFee() throws Exception {
groupAccount = getLoanAccount();
FeeBO periodicFee = TestObjectFactory.createPeriodicAmountFee("ClientPeridoicFee", FeeCategory.CENTER, "5",
RecurrenceType.WEEKLY, Short.valueOf("1"));
AccountFeesEntity accountFee = new AccountFeesEntity(center.getCustomerAccount(), periodicFee,
((AmountFeeBO) periodicFee).getFeeAmount().getAmountDoubleValue());
CustomerAccountBO customerAccount = center.getCustomerAccount();
AccountTestUtils.addAccountFees(accountFee, customerAccount);
TestObjectFactory.updateObject(customerAccount);
StaticHibernateUtil.commitTransaction();
StaticHibernateUtil.closeSession();
// check for the account fee
List<AccountBO> accountList = new CustomerPersistence().getCustomerAccountsForFee(periodicFee.getFeeId());
Assert.assertNotNull(accountList);
Assert.assertEquals(1, accountList.size());
Assert.assertTrue(accountList.get(0) instanceof CustomerAccountBO);
// get all objects again
groupAccount = TestObjectFactory.getObject(LoanBO.class, groupAccount.getAccountId());
group = TestObjectFactory.getCustomer(group.getCustomerId());
center = TestObjectFactory.getCustomer(center.getCustomerId());
}
public void testRetrieveCustomerAccountActionDetails() throws Exception {
center = createCenter();
Assert.assertNotNull(center.getCustomerAccount());
List<AccountActionDateEntity> actionDates = new CustomerPersistence().retrieveCustomerAccountActionDetails(
center.getCustomerAccount().getAccountId(), new java.sql.Date(System.currentTimeMillis()));
Assert.assertEquals("The size of the due insallments is ", actionDates.size(), 1);
}
public void testGetActiveCentersUnderUser() throws Exception {
MeetingBO meeting = TestObjectFactory.createMeeting(TestObjectFactory.getTypicalMeeting());
center = TestObjectFactory.createWeeklyFeeCenter("center", meeting, Short.valueOf("1"), Short.valueOf("1"));
PersonnelBO personnel = TestObjectFactory.getPersonnel(Short.valueOf("1"));
List<CustomerBO> customers = new CustomerPersistence().getActiveCentersUnderUser(personnel);
Assert.assertNotNull(customers);
Assert.assertEquals(1, customers.size());
}
public void testgetGroupsUnderUser() throws Exception {
MeetingBO meeting = TestObjectFactory.createMeeting(TestObjectFactory.getTypicalMeeting());
center = TestObjectFactory.createWeeklyFeeCenter("center", meeting, Short.valueOf("1"), Short.valueOf("1"));
group = TestObjectFactory.createWeeklyFeeGroupUnderCenter("Group", CustomerStatus.GROUP_ACTIVE, center);
group2 = TestObjectFactory.createWeeklyFeeGroupUnderCenter("Group33", CustomerStatus.GROUP_CANCELLED, center);
PersonnelBO personnel = TestObjectFactory.getPersonnel(Short.valueOf("1"));
List<CustomerBO> customers = new CustomerPersistence().getGroupsUnderUser(personnel);
Assert.assertNotNull(customers);
Assert.assertEquals(1, customers.size());
}
@SuppressWarnings("unchecked")
public void testSearchForActiveInBadStandingLoanAccount() throws Exception {
groupAccount = getLoanAccount();
groupAccount.changeStatus(AccountState.LOAN_ACTIVE_IN_BAD_STANDING.getValue(), null, "Changing to badStanding");
TestObjectFactory.updateObject(groupAccount);
StaticHibernateUtil.closeSession();
groupAccount = TestObjectFactory.getObject(LoanBO.class, groupAccount.getAccountId());
center = TestObjectFactory.getCustomer(center.getCustomerId());
group = TestObjectFactory.getCustomer(group.getCustomerId());
QueryResult queryResult = new CustomerPersistence().search(group.getGlobalCustNum(), Short.valueOf("3"), Short
.valueOf("1"), Short.valueOf("1"));
Assert.assertNotNull(queryResult);
Assert.assertEquals(1, queryResult.getSize());
List results = queryResult.get(0, 10);
Assert.assertEquals(1, results.size());
CustomerSearch customerSearch = (CustomerSearch) results.get(0);
Assert.assertEquals(1, customerSearch.getLoanGlobalAccountNum().size());
}
public void testGetCustomersByLevelId() throws Exception {
createCustomers(CustomerStatus.GROUP_ACTIVE, CustomerStatus.CLIENT_ACTIVE);
StaticHibernateUtil.commitTransaction();
List<CustomerBO> client = new CustomerPersistence().getCustomersByLevelId(Short.parseShort("1"));
Assert.assertNotNull(client);
Assert.assertEquals(1, client.size());
List<CustomerBO> group = new CustomerPersistence().getCustomersByLevelId(Short.parseShort("2"));
Assert.assertNotNull(group);
Assert.assertEquals(1, group.size());
List<CustomerBO> center = new CustomerPersistence().getCustomersByLevelId(Short.parseShort("3"));
Assert.assertNotNull(center);
Assert.assertEquals(1, center.size());
}
public void testFindCustomerWithNoAssocationsLoadedReturnsActiveCenter() throws Exception {
meeting = TestObjectFactory.createMeeting(TestObjectFactory.getNewMeetingForToday(WEEKLY,
EVERY_WEEK, CUSTOMER_MEETING));
center = TestObjectFactory.createWeeklyFeeCenter("Active Center", meeting);
verifyCustomerLoaded(center.getCustomerId(), center.getDisplayName());
}
public void testFindCustomerWithNoAssocationsLoadedDoesntReturnInactiveCenter() throws Exception {
meeting = TestObjectFactory.createMeeting(TestObjectFactory.getNewMeetingForToday(WEEKLY,
EVERY_WEEK, CUSTOMER_MEETING));
center = TestObjectFactory.createWeeklyFeeCenter("Inactive Center", meeting);
center.changeStatus(CustomerStatus.CENTER_INACTIVE, CustomerStatusFlag.GROUP_CANCEL_BLACKLISTED, "Made Inactive");
StaticHibernateUtil.commitTransaction();
StaticHibernateUtil.closeSession();
center = (CenterBO) StaticHibernateUtil.getSessionTL().get(CenterBO.class, center.getCustomerId());
verifyCustomerNotLoaded(center.getCustomerId(), center.getDisplayName());
}
public void testFindCustomerWithNoAssocationsLoadedReturnsActiveGroup() throws Exception {
meeting = TestObjectFactory.createMeeting(TestObjectFactory.getNewMeetingForToday(WEEKLY,
EVERY_WEEK, CUSTOMER_MEETING));
center = TestObjectFactory.createWeeklyFeeCenter("Active Center", meeting);
group = TestObjectFactory.createWeeklyFeeGroupUnderCenter("Active Group", CustomerStatus.GROUP_ACTIVE,
center);
verifyCustomerLoaded(group.getCustomerId(), group.getDisplayName());
}
public void testFindCustomerWithNoAssocationsLoadedReturnsHoldGroup() throws Exception {
meeting = TestObjectFactory.createMeeting(TestObjectFactory.getNewMeetingForToday(WEEKLY,
EVERY_WEEK, CUSTOMER_MEETING));
center = TestObjectFactory.createWeeklyFeeCenter("Active Center", meeting);
group = TestObjectFactory.createWeeklyFeeGroupUnderCenter("Hold Group", CustomerStatus.GROUP_HOLD,
center);
verifyCustomerLoaded(group.getCustomerId(), group.getDisplayName());
}
public void testFindCustomerWithNoAssocationsLoadedDoesntReturnClosedGroup() throws Exception {
meeting = TestObjectFactory.createMeeting(TestObjectFactory.getNewMeetingForToday(WEEKLY,
EVERY_WEEK, CUSTOMER_MEETING));
center = TestObjectFactory.createWeeklyFeeCenter("Active Center", meeting);
group = TestObjectFactory.createWeeklyFeeGroupUnderCenter("Closed Group", CustomerStatus.GROUP_CLOSED,
center);
verifyCustomerNotLoaded(group.getCustomerId(), group.getDisplayName());
}
public void testFindCustomerWithNoAssocationsLoadedReturnsActiveClient() throws Exception {
meeting = TestObjectFactory.createMeeting(TestObjectFactory.getNewMeetingForToday(WEEKLY,
EVERY_WEEK, CUSTOMER_MEETING));
center = TestObjectFactory.createWeeklyFeeCenter("Active Center", meeting);
group = TestObjectFactory.createWeeklyFeeGroupUnderCenter("Active Group", CustomerStatus.GROUP_ACTIVE,
center);
client = TestObjectFactory.createClient("Active Client", CustomerStatus.CLIENT_ACTIVE, group);
verifyCustomerLoaded(client.getCustomerId(), client.getDisplayName());
}
public void testFindCustomerWithNoAssocationsLoadedReturnsHoldClient() throws Exception {
meeting = TestObjectFactory.createMeeting(TestObjectFactory.getNewMeetingForToday(WEEKLY,
EVERY_WEEK, CUSTOMER_MEETING));
center = TestObjectFactory.createWeeklyFeeCenter("Active Center", meeting);
group = TestObjectFactory.createWeeklyFeeGroupUnderCenter("Active Group", CustomerStatus.GROUP_ACTIVE,
center);
client = TestObjectFactory.createClient("Hold Client", CustomerStatus.CLIENT_HOLD, group);
verifyCustomerLoaded(client.getCustomerId(), client.getDisplayName());
}
public void testFindCustomerWithNoAssocationsLoadedDoesntReturnClosedClient() throws Exception {
meeting = TestObjectFactory.createMeeting(TestObjectFactory.getNewMeetingForToday(WEEKLY,
EVERY_WEEK, CUSTOMER_MEETING));
center = TestObjectFactory.createWeeklyFeeCenter("Active Center", meeting);
group = TestObjectFactory.createWeeklyFeeGroupUnderCenter("Active Group", CustomerStatus.GROUP_ACTIVE,
center);
client = TestObjectFactory.createClient("Closed Client", CustomerStatus.CLIENT_CLOSED, group);
verifyCustomerNotLoaded(client.getCustomerId(), client.getDisplayName());
}
private void verifyCustomerLoaded(Integer customerId, String customerName) {
CollectionSheetCustomerDto collectionSheetCustomerDto = customerPersistence
.findCustomerWithNoAssocationsLoaded(customerId);
Assert.assertNotNull(customerName + " was not returned", collectionSheetCustomerDto);
Assert.assertEquals(collectionSheetCustomerDto.getCustomerId(), customerId);
}
private void verifyCustomerNotLoaded(Integer customerId, String customerName) {
CollectionSheetCustomerDto collectionSheetCustomerDto = customerPersistence
.findCustomerWithNoAssocationsLoaded(customerId);
Assert.assertNull(customerName + " was returned", collectionSheetCustomerDto);
}
private AccountBO getSavingsAccount(final CustomerBO customer, final String prdOfferingname, final String shortName)
throws Exception {
Date startDate = new Date(System.currentTimeMillis());
SavingsOfferingBO savingsOffering = TestObjectFactory.createSavingsProduct(prdOfferingname, shortName,
startDate, RecommendedAmountUnit.COMPLETE_GROUP);
return TestObjectFactory.createSavingsAccount("432434", customer, Short.valueOf("16"), startDate,
savingsOffering);
}
private void getCustomer() throws Exception {
Date startDate = new Date(System.currentTimeMillis());
MeetingBO meeting = TestObjectFactory.createMeeting(TestObjectFactory.getTypicalMeeting());
center = TestObjectFactory.createWeeklyFeeCenter("Center", meeting);
group = TestObjectFactory.createWeeklyFeeGroupUnderCenter("Group", CustomerStatus.GROUP_ACTIVE, center);
client = TestObjectFactory.createClient("Client", CustomerStatus.CLIENT_ACTIVE, group);
LoanOfferingBO loanOffering1 = TestObjectFactory.createLoanOffering("Loanwer", "43fs", startDate, meeting);
LoanOfferingBO loanOffering2 = TestObjectFactory.createLoanOffering("Loancd123", "vfr", startDate, meeting);
groupAccount = TestObjectFactory.createLoanAccount("42423142341", group,
AccountState.LOAN_ACTIVE_IN_GOOD_STANDING, startDate, loanOffering1);
clientAccount = TestObjectFactory.createLoanAccount("3243", client, AccountState.LOAN_ACTIVE_IN_GOOD_STANDING,
startDate, loanOffering2);
MeetingBO meetingIntCalc = TestObjectFactory.createMeeting(TestObjectFactory.getTypicalMeeting());
MeetingBO meetingIntPost = TestObjectFactory.createMeeting(TestObjectFactory.getTypicalMeeting());
SavingsOfferingBO savingsOffering = TestObjectFactory.createSavingsProduct("SavingPrd12", "abc1", startDate,
RecommendedAmountUnit.COMPLETE_GROUP, meetingIntCalc, meetingIntPost);
SavingsOfferingBO savingsOffering1 = TestObjectFactory.createSavingsProduct("SavingPrd11", "abc2", startDate,
RecommendedAmountUnit.COMPLETE_GROUP, meetingIntCalc, meetingIntPost);
centerSavingsAccount = TestObjectFactory.createSavingsAccount("432434", center, Short.valueOf("16"), startDate,
savingsOffering);
clientSavingsAccount = TestObjectFactory.createSavingsAccount("432434", client, Short.valueOf("16"), startDate,
savingsOffering1);
}
private void createCustomers(final CustomerStatus groupStatus, final CustomerStatus clientStatus) {
meeting = TestObjectFactory.createMeeting(TestObjectFactory.getTypicalMeeting());
center = TestObjectFactory.createWeeklyFeeCenter("Center", meeting);
group = TestObjectFactory.createWeeklyFeeGroupUnderCenter("Group", groupStatus, center);
client = TestObjectFactory.createClient("Client", clientStatus, group);
}
private void createCustomersWithGovernmentId(final CustomerStatus groupStatus, final CustomerStatus clientStatus) {
meeting = TestObjectFactory.createMeeting(TestObjectFactory.getTypicalMeeting());
center = TestObjectFactory.createWeeklyFeeCenter("Center", meeting);
group = TestObjectFactory.createWeeklyFeeGroupUnderCenter("Group", groupStatus, center);
client = TestObjectFactory.createClient("Client", clientStatus, group, TestObjectFactory.getFees(), "76346793216", new java.util.Date(1222333444000L));
}
private static java.util.Date getMeetingDates(final MeetingBO meeting) {
List<java.util.Date> dates = new ArrayList<java.util.Date>();
try {
dates = meeting.getAllDates(new java.util.Date(System.currentTimeMillis()));
} catch (MeetingException e) {
e.printStackTrace();
}
return dates.get(dates.size() - 1);
}
private CenterBO createCenter() {
return createCenter("Center_Active_test");
}
private CenterBO createCenter(final String name) {
MeetingBO meeting = TestObjectFactory.createMeeting(TestObjectFactory.getNewMeetingForToday(WEEKLY, EVERY_WEEK,
CUSTOMER_MEETING));
return TestObjectFactory.createWeeklyFeeCenter(name, meeting);
}
private LoanBO getLoanAccount() {
Date startDate = new Date(System.currentTimeMillis());
MeetingBO meeting = TestObjectFactory.createMeeting(TestObjectFactory.getNewMeetingForToday(WEEKLY, EVERY_WEEK,
CUSTOMER_MEETING));
center = TestObjectFactory.createWeeklyFeeCenter("Center", meeting);
group = TestObjectFactory.createWeeklyFeeGroupUnderCenter("Group", CustomerStatus.GROUP_ACTIVE, center);
LoanOfferingBO loanOffering = TestObjectFactory.createLoanOffering(startDate, meeting);
return TestObjectFactory.createLoanAccount("42423142341", group, AccountState.LOAN_ACTIVE_IN_GOOD_STANDING,
startDate, loanOffering);
}
private AccountBO getLoanAccount(final CustomerBO group, final MeetingBO meeting, final String offeringName,
final String shortName) {
Date startDate = new Date(System.currentTimeMillis());
LoanOfferingBO loanOffering = TestObjectFactory.createLoanOffering(offeringName, shortName, startDate, meeting);
return TestObjectFactory.createLoanAccount("42423142341", group, AccountState.LOAN_ACTIVE_IN_GOOD_STANDING,
startDate, loanOffering);
}
private AccountBO getLoanAccount(final CustomerBO group, final MeetingBO meeting, final String offeringName,
final String shortName, MifosCurrency currency) {
Date startDate = new Date(System.currentTimeMillis());
LoanOfferingBO loanOffering = TestObjectFactory.createLoanOffering(offeringName, shortName, startDate, meeting, currency);
return TestObjectFactory.createLoanAccount("42423142341", group, AccountState.LOAN_ACTIVE_IN_GOOD_STANDING,
startDate, loanOffering);
}
private AccountBO getLoanAccountInActiveBadStanding(final CustomerBO group, final MeetingBO meeting,
final String offeringName, final String shortName) {
Date startDate = new Date(System.currentTimeMillis());
LoanOfferingBO loanOffering = TestObjectFactory.createLoanOffering(offeringName, shortName, startDate, meeting);
return TestObjectFactory.createLoanAccount("42423141111", group, AccountState.LOAN_ACTIVE_IN_BAD_STANDING,
startDate, loanOffering);
}
}<|fim▁end|> | |
<|file_name|>static_files_handler.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Serves static content for "static_dir" and "static_files" handlers."""
import base64
import errno
import httplib
import mimetypes
import os
import os.path
import re
import zlib
from google.appengine.api import appinfo
from google.appengine.tools import augment_mimetypes
from google.appengine.tools.devappserver2 import errors
from google.appengine.tools.devappserver2 import url_handler
_FILE_MISSING_ERRNO_CONSTANTS = frozenset([errno.ENOENT, errno.ENOTDIR])
# Run at import time so we only do this once.
augment_mimetypes.init()
class StaticContentHandler(url_handler.UserConfiguredURLHandler):
"""Abstract base class for subclasses serving static content."""
# Associate the full path of a static file with a 2-tuple containing the:
# - mtime at which the file was last read from disk
# - a etag constructed from a hash of the file's contents
# Statting a small file to retrieve its mtime is approximately 20x faster than
# reading it to generate a hash of its contents.
_filename_to_mtime_and_etag = {}
def __init__(self, root_path, url_map, url_pattern):
"""Initializer for StaticContentHandler.
Args:
root_path: A string containing the full path of the directory containing
the application's app.yaml file.
url_map: An appinfo.URLMap instance containing the configuration for this
handler.
url_pattern: A re.RegexObject that matches URLs that should be handled by
this handler. It may also optionally bind groups.
"""
super(StaticContentHandler, self).__init__(url_map, url_pattern)
self._root_path = root_path
def _get_mime_type(self, path):
"""Returns the mime type for the file at the given path."""
if self._url_map.mime_type is not None:
return self._url_map.mime_type
_, extension = os.path.splitext(path)
return mimetypes.types_map.get(extension, 'application/octet-stream')
def _handle_io_exception(self, start_response, e):<|fim▁hole|>
Args:
start_response: A function with semantics defined in PEP-333. This
function will be called with a status appropriate to the given
exception.
e: An instance of OSError or IOError used to generate an HTTP status.
Returns:
An emply iterable.
"""
if e.errno in _FILE_MISSING_ERRNO_CONSTANTS:
start_response('404 Not Found', [])
else:
start_response('403 Forbidden', [])
return []
@staticmethod
def _calculate_etag(data):
return base64.b64encode(str(zlib.crc32(data)))
def _handle_path(self, full_path, environ, start_response):
"""Serves the response to a request for a particular file.
Note that production App Engine treats all methods as "GET" except "HEAD".
Unless set explicitly, the "Expires" and "Cache-Control" headers are
deliberately different from their production values to make testing easier.
If set explicitly then the values are preserved because the user may
reasonably want to test for them.
Args:
full_path: A string containing the absolute path to the file to serve.
environ: An environ dict for the current request as defined in PEP-333.
start_response: A function with semantics defined in PEP-333.
Returns:
An iterable over strings containing the body of the HTTP response.
"""
data = None
if full_path in self._filename_to_mtime_and_etag:
last_mtime, etag = self._filename_to_mtime_and_etag[full_path]
else:
last_mtime = etag = None
user_headers = self._url_map.http_headers or appinfo.HttpHeadersDict()
if_match = environ.get('HTTP_IF_MATCH')
if_none_match = environ.get('HTTP_IF_NONE_MATCH')
try:
mtime = os.path.getmtime(full_path)
except (OSError, IOError) as e:
# RFC-2616 section 14.24 says:
# If none of the entity tags match, or if "*" is given and no current
# entity exists, the server MUST NOT perform the requested method, and
# MUST return a 412 (Precondition Failed) response.
if if_match:
start_response('412 Precondition Failed', [])
return []
elif self._url_map.require_matching_file:
return None
else:
return self._handle_io_exception(start_response, e)
if mtime != last_mtime:
try:
data = self._read_file(full_path)
except (OSError, IOError) as e:
return self._handle_io_exception(start_response, e)
etag = self._calculate_etag(data)
self._filename_to_mtime_and_etag[full_path] = mtime, etag
if if_match and not self._check_etag_match(if_match,
etag,
allow_weak_match=False):
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.24
start_response('412 Precondition Failed',
[('ETag', '"%s"' % etag)])
return []
elif if_none_match and self._check_etag_match(if_none_match,
etag,
allow_weak_match=True):
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.26
start_response('304 Not Modified',
[('ETag', '"%s"' % etag)])
return []
else:
if data is None:
try:
data = self._read_file(full_path)
except (OSError, IOError) as e:
return self._handle_io_exception(start_response, e)
etag = self._calculate_etag(data)
self._filename_to_mtime_and_etag[full_path] = mtime, etag
headers = [('Content-length', str(len(data)))]
if user_headers.Get('Content-type') is None:
headers.append(('Content-type', self._get_mime_type(full_path)))
if user_headers.Get('ETag') is None:
headers.append(('ETag', '"%s"' % etag))
if user_headers.Get('Expires') is None:
headers.append(('Expires', 'Fri, 01 Jan 1990 00:00:00 GMT'))
if user_headers.Get('Cache-Control') is None:
headers.append(('Cache-Control', 'no-cache'))
for name, value in user_headers.iteritems():
# "name" will always be unicode due to the way that ValidatedDict works.
headers.append((str(name), value))
start_response('200 OK', headers)
if environ['REQUEST_METHOD'] == 'HEAD':
return []
else:
return [data]
@staticmethod
def _read_file(full_path):
with open(full_path, 'rb') as f:
return f.read()
@staticmethod
def _check_etag_match(etag_headers, etag, allow_weak_match):
"""Checks if an etag header matches a given etag.
Args:
etag_headers: A string representing an e-tag header value e.g.
'"xyzzy", "r2d2xxxx", W/"c3piozzzz"' or '*'.
etag: The etag to match the header to. If None then only the '*' header
with match.
allow_weak_match: If True then weak etags are allowed to match.
Returns:
True if there is a match, False otherwise.
"""
# From RFC-2616:
# entity-tag = [ weak ] opaque-tag
# weak = "W/"
# opaque-tag = quoted-string
# quoted-string = ( <"> *(qdtext | quoted-pair ) <"> )
# qdtext = <any TEXT except <">>
# quoted-pair = "\" CHAR
# TEXT = <any OCTET except CTLs, but including LWS>
# CHAR = <any US-ASCII character (octets 0 - 127)>
# This parsing is not actually correct since it assumes that commas cannot
# appear in etags. But the generated etags do not contain commas so this
# still works.
for etag_header in etag_headers.split(','):
if etag_header.startswith('W/'):
if allow_weak_match:
etag_header = etag_header[2:]
else:
continue
etag_header = etag_header.strip().strip('"')
if etag_header == '*' or etag_header == etag:
return True
return False
@staticmethod
def _is_relative_path_valid(path):
"""Check if the relative path for a file is valid.
To match prod, redirection logic only fires on paths that contain a . or ..
as an entry, but ignores redundant separators. Since Dev App Server simply
passes the path to open, redundant separators are ignored (i.e. path/to/file
and path//to///file both map to the same thing). Since prod uses logic
that treats redundant separators as significant, we need to handle them
specially.
A related problem is that if a redundant separator is placed as the file
relative path, it can be passed to a StaticHandler as an absolute path.
As os.path.join causes an absolute path to throw away previous components
that could allow an attacker to read any file on the file system (i.e.
if there a static directory handle for /static and an attacker asks for the
path '/static//etc/passwd', '/etc/passwd' is passed as the relative path and
calling os.path.join([root_dir, '/etc/passwd']) returns '/etc/passwd'.)
Args:
path: a path relative to a static handler base.
Returns:
bool indicating whether the path is valid or not.
"""
# Note: can't do something like path == os.path.normpath(path) as Windows
# would normalize separators to backslashes.
return not os.path.isabs(path) and '' not in path.split('/')
@staticmethod
def _not_found_404(environ, start_response):
status = httplib.NOT_FOUND
start_response('%d %s' % (status, httplib.responses[status]),
[('Content-Type', 'text/plain')])
return ['%s not found' % environ['PATH_INFO']]
class StaticFilesHandler(StaticContentHandler):
"""Servers content for the "static_files" handler.
For example:
handlers:
- url: /(.*)/(.*)
static_files: \1/\2
upload: (.*)/(.*)
"""
def __init__(self, root_path, url_map):
"""Initializer for StaticFilesHandler.
Args:
root_path: A string containing the full path of the directory containing
the application's app.yaml file.
url_map: An appinfo.URLMap instance containing the configuration for this
handler.
"""
try:
url_pattern = re.compile('%s$' % url_map.url)
except re.error, e:
raise errors.InvalidAppConfigError(
'invalid url %r in static_files handler: %s' % (url_map.url, e))
super(StaticFilesHandler, self).__init__(root_path,
url_map,
url_pattern)
def handle(self, match, environ, start_response):
"""Serves the file content matching the request.
Args:
match: The re.MatchObject containing the result of matching the URL
against this handler's URL pattern.
environ: An environ dict for the current request as defined in PEP-333.
start_response: A function with semantics defined in PEP-333.
Returns:
An iterable over strings containing the body of the HTTP response.
"""
relative_path = match.expand(self._url_map.static_files)
if not self._is_relative_path_valid(relative_path):
if self._url_map.require_matching_file:
return None
else:
return self._not_found_404(environ, start_response)
full_path = os.path.join(self._root_path, relative_path)
return self._handle_path(full_path, environ, start_response)
class StaticDirHandler(StaticContentHandler):
"""Servers content for the "static_files" handler.
For example:
handlers:
- url: /css
static_dir: stylesheets
"""
def __init__(self, root_path, url_map):
"""Initializer for StaticDirHandler.
Args:
root_path: A string containing the full path of the directory containing
the application's app.yaml file.
url_map: An appinfo.URLMap instance containing the configuration for this
handler.
"""
url = url_map.url
# Take a url pattern like "/css" and transform it into a match pattern like
# "/css/(?P<file>.*)$"
if url[-1] != '/':
url += '/'
try:
url_pattern = re.compile('%s(?P<file>.*)$' % url)
except re.error, e:
raise errors.InvalidAppConfigError(
'invalid url %r in static_dir handler: %s' % (url, e))
super(StaticDirHandler, self).__init__(root_path,
url_map,
url_pattern)
def handle(self, match, environ, start_response):
"""Serves the file content matching the request.
Args:
match: The re.MatchObject containing the result of matching the URL
against this handler's URL pattern.
environ: An environ dict for the current request as defined in PEP-333.
start_response: A function with semantics defined in PEP-333.
Returns:
An iterable over strings containing the body of the HTTP response.
"""
relative_path = match.group('file')
if not self._is_relative_path_valid(relative_path):
return self._not_found_404(environ, start_response)
full_path = os.path.join(self._root_path,
self._url_map.static_dir,
relative_path)
return self._handle_path(full_path, environ, start_response)<|fim▁end|> | """Serves the response to an OSError or IOError. |
<|file_name|>typevar.py<|end_file_name|><|fim▁begin|>"""
Type variables for Parametric polymorphism.
Cretonne instructions and instruction transformations can be specified to be
polymorphic by using type variables.
"""
from __future__ import absolute_import
import math
from . import types, is_power_of_two
from copy import copy
try:
from typing import Tuple, Union, Iterable, Any, Set, TYPE_CHECKING # noqa
if TYPE_CHECKING:
from srcgen import Formatter # noqa
Interval = Tuple[int, int]
# An Interval where `True` means 'everything'
BoolInterval = Union[bool, Interval]
# Set of special types: None, False, True, or iterable.
SpecialSpec = Union[bool, Iterable[types.SpecialType]]
except ImportError:
pass
MAX_LANES = 256
MAX_BITS = 64
MAX_BITVEC = MAX_BITS * MAX_LANES
def int_log2(x):
# type: (int) -> int
return int(math.log(x, 2))
def intersect(a, b):
# type: (Interval, Interval) -> Interval
"""
Given two `(min, max)` inclusive intervals, compute their intersection.
Use `(None, None)` to represent the empty interval on input and output.
"""
if a[0] is None or b[0] is None:
return (None, None)
lo = max(a[0], b[0])
assert lo is not None
hi = min(a[1], b[1])
assert hi is not None
if lo <= hi:
return (lo, hi)
else:
return (None, None)
def is_empty(intv):
# type: (Interval) -> bool
return intv is None or intv is False or intv == (None, None)
def encode_bitset(vals, size):
# type: (Iterable[int], int) -> int
"""
Encode a set of values (each between 0 and size) as a bitset of width size.
"""
res = 0
assert is_power_of_two(size) and size <= 64
for v in vals:
assert 0 <= v and v < size
res |= 1 << v
return res
def pp_set(s):
# type: (Iterable[Any]) -> str
"""
Return a consistent string representation of a set (ordering is fixed)
"""
return '{' + ', '.join([repr(x) for x in sorted(s)]) + '}'
def decode_interval(intv, full_range, default=None):
# type: (BoolInterval, Interval, int) -> Interval
"""
Decode an interval specification which can take the following values:
True
Use the `full_range`.
`False` or `None`
An empty interval
(lo, hi)
An explicit interval
"""
if isinstance(intv, tuple):
# mypy bug here: 'builtins.None' object is not iterable
lo, hi = intv
assert is_power_of_two(lo)
assert is_power_of_two(hi)
assert lo <= hi
assert lo >= full_range[0]
assert hi <= full_range[1]
return intv
if intv:
return full_range
else:
return (default, default)
def interval_to_set(intv):
# type: (Interval) -> Set
if is_empty(intv):
return set()
(lo, hi) = intv
assert is_power_of_two(lo)
assert is_power_of_two(hi)
assert lo <= hi
return set([2**i for i in range(int_log2(lo), int_log2(hi)+1)])
def legal_bool(bits):
# type: (int) -> bool
"""
True iff bits is a legal bit width for a bool type.
bits == 1 || bits \in { 8, 16, .. MAX_BITS }
"""
return bits == 1 or \
(bits >= 8 and bits <= MAX_BITS and is_power_of_two(bits))
class TypeSet(object):
"""
A set of types.
We don't allow arbitrary subsets of types, but use a parametrized approach
instead.
Objects of this class can be used as dictionary keys.
Parametrized type sets are specified in terms of ranges:
- The permitted range of vector lanes, where 1 indicates a scalar type.
- The permitted range of integer types.
- The permitted range of floating point types, and
- The permitted range of boolean types.
The ranges are inclusive from smallest bit-width to largest bit-width.
A typeset representing scalar integer types `i8` through `i32`:
>>> TypeSet(ints=(8, 32))
TypeSet(lanes={1}, ints={8, 16, 32})
Passing `True` instead of a range selects all available scalar types:
>>> TypeSet(ints=True)
TypeSet(lanes={1}, ints={8, 16, 32, 64})<|fim▁hole|> >>> TypeSet(floats=True)
TypeSet(lanes={1}, floats={32, 64})
>>> TypeSet(bools=True)
TypeSet(lanes={1}, bools={1, 8, 16, 32, 64})
Similarly, passing `True` for the lanes selects all possible scalar and
vector types:
>>> TypeSet(lanes=True, ints=True)
TypeSet(lanes={1, 2, 4, 8, 16, 32, 64, 128, 256}, ints={8, 16, 32, 64})
Finally, a type set can contain special types (derived from `SpecialType`)
which can't appear as lane types.
:param lanes: `(min, max)` inclusive range of permitted vector lane counts.
:param ints: `(min, max)` inclusive range of permitted scalar integer
widths.
:param floats: `(min, max)` inclusive range of permitted scalar floating
point widths.
:param bools: `(min, max)` inclusive range of permitted scalar boolean
widths.
:param bitvecs : `(min, max)` inclusive range of permitted bitvector
widths.
:param specials: Sequence of special types to appear in the set.
"""
def __init__(
self,
lanes=None, # type: BoolInterval
ints=None, # type: BoolInterval
floats=None, # type: BoolInterval
bools=None, # type: BoolInterval
bitvecs=None, # type: BoolInterval
specials=None # type: SpecialSpec
):
# type: (...) -> None
self.lanes = interval_to_set(decode_interval(lanes, (1, MAX_LANES), 1))
self.ints = interval_to_set(decode_interval(ints, (8, MAX_BITS)))
self.floats = interval_to_set(decode_interval(floats, (32, 64)))
self.bools = interval_to_set(decode_interval(bools, (1, MAX_BITS)))
self.bools = set(filter(legal_bool, self.bools))
self.bitvecs = interval_to_set(decode_interval(bitvecs,
(1, MAX_BITVEC)))
# Allow specials=None, specials=True, specials=(...)
self.specials = set() # type: Set[types.SpecialType]
if isinstance(specials, bool):
if specials:
self.specials = set(types.ValueType.all_special_types)
elif specials:
self.specials = set(specials)
def copy(self):
# type: (TypeSet) -> TypeSet
"""
Return a copy of our self.
"""
n = TypeSet()
n.lanes = copy(self.lanes)
n.ints = copy(self.ints)
n.floats = copy(self.floats)
n.bools = copy(self.bools)
n.bitvecs = copy(self.bitvecs)
n.specials = copy(self.specials)
return n
def typeset_key(self):
# type: () -> Tuple[Tuple, Tuple, Tuple, Tuple, Tuple, Tuple]
"""Key tuple used for hashing and equality."""
return (tuple(sorted(list(self.lanes))),
tuple(sorted(list(self.ints))),
tuple(sorted(list(self.floats))),
tuple(sorted(list(self.bools))),
tuple(sorted(list(self.bitvecs))),
tuple(sorted(s.name for s in self.specials)))
def __hash__(self):
# type: () -> int
h = hash(self.typeset_key())
assert h == getattr(self, 'prev_hash', h), "TypeSet changed!"
self.prev_hash = h
return h
def __eq__(self, other):
# type: (object) -> bool
if isinstance(other, TypeSet):
return self.typeset_key() == other.typeset_key()
else:
return False
def __ne__(self, other):
# type: (object) -> bool
return not self.__eq__(other)
def __repr__(self):
# type: () -> str
s = 'TypeSet(lanes={}'.format(pp_set(self.lanes))
if len(self.ints) > 0:
s += ', ints={}'.format(pp_set(self.ints))
if len(self.floats) > 0:
s += ', floats={}'.format(pp_set(self.floats))
if len(self.bools) > 0:
s += ', bools={}'.format(pp_set(self.bools))
if len(self.bitvecs) > 0:
s += ', bitvecs={}'.format(pp_set(self.bitvecs))
if len(self.specials) > 0:
s += ', specials=[{}]'.format(pp_set(self.specials))
return s + ')'
def emit_fields(self, fmt):
# type: (Formatter) -> None
"""Emit field initializers for this typeset."""
assert len(self.bitvecs) == 0, "Bitvector types are not emitable."
fmt.comment(repr(self))
fields = (('lanes', 16),
('ints', 8),
('floats', 8),
('bools', 8))
for (field, bits) in fields:
vals = [int_log2(x) for x in getattr(self, field)]
fmt.line('{}: BitSet::<u{}>({}),'
.format(field, bits, encode_bitset(vals, bits)))
def __iand__(self, other):
# type: (TypeSet) -> TypeSet
"""
Intersect self with other type set.
>>> a = TypeSet(lanes=True, ints=(16, 32))
>>> a
TypeSet(lanes={1, 2, 4, 8, 16, 32, 64, 128, 256}, ints={16, 32})
>>> b = TypeSet(lanes=(4, 16), ints=True)
>>> a &= b
>>> a
TypeSet(lanes={4, 8, 16}, ints={16, 32})
>>> a = TypeSet(lanes=True, bools=(1, 8))
>>> b = TypeSet(lanes=True, bools=(16, 32))
>>> a &= b
>>> a
TypeSet(lanes={1, 2, 4, 8, 16, 32, 64, 128, 256})
"""
self.lanes.intersection_update(other.lanes)
self.ints.intersection_update(other.ints)
self.floats.intersection_update(other.floats)
self.bools.intersection_update(other.bools)
self.bitvecs.intersection_update(other.bitvecs)
self.specials.intersection_update(other.specials)
return self
def issubset(self, other):
# type: (TypeSet) -> bool
"""
Return true iff self is a subset of other
"""
return self.lanes.issubset(other.lanes) and \
self.ints.issubset(other.ints) and \
self.floats.issubset(other.floats) and \
self.bools.issubset(other.bools) and \
self.bitvecs.issubset(other.bitvecs) and \
self.specials.issubset(other.specials)
def lane_of(self):
# type: () -> TypeSet
"""
Return a TypeSet describing the image of self across lane_of
"""
new = self.copy()
new.lanes = set([1])
new.bitvecs = set()
return new
def as_bool(self):
# type: () -> TypeSet
"""
Return a TypeSet describing the image of self across as_bool
"""
new = self.copy()
new.ints = set()
new.floats = set()
new.bitvecs = set()
if len(self.lanes.difference(set([1]))) > 0:
new.bools = self.ints.union(self.floats).union(self.bools)
if 1 in self.lanes:
new.bools.add(1)
return new
def half_width(self):
# type: () -> TypeSet
"""
Return a TypeSet describing the image of self across halfwidth
"""
new = self.copy()
new.ints = set([x//2 for x in self.ints if x > 8])
new.floats = set([x//2 for x in self.floats if x > 32])
new.bools = set([x//2 for x in self.bools if x > 8])
new.bitvecs = set([x//2 for x in self.bitvecs if x > 1])
new.specials = set()
return new
def double_width(self):
# type: () -> TypeSet
"""
Return a TypeSet describing the image of self across doublewidth
"""
new = self.copy()
new.ints = set([x*2 for x in self.ints if x < MAX_BITS])
new.floats = set([x*2 for x in self.floats if x < MAX_BITS])
new.bools = set(filter(legal_bool,
set([x*2 for x in self.bools if x < MAX_BITS])))
new.bitvecs = set([x*2 for x in self.bitvecs if x < MAX_BITVEC])
new.specials = set()
return new
def half_vector(self):
# type: () -> TypeSet
"""
Return a TypeSet describing the image of self across halfvector
"""
new = self.copy()
new.bitvecs = set()
new.lanes = set([x//2 for x in self.lanes if x > 1])
new.specials = set()
return new
def double_vector(self):
# type: () -> TypeSet
"""
Return a TypeSet describing the image of self across doublevector
"""
new = self.copy()
new.bitvecs = set()
new.lanes = set([x*2 for x in self.lanes if x < MAX_LANES])
new.specials = set()
return new
def to_bitvec(self):
# type: () -> TypeSet
"""
Return a TypeSet describing the image of self across to_bitvec
"""
assert len(self.bitvecs) == 0
all_scalars = self.ints.union(self.floats.union(self.bools))
new = self.copy()
new.lanes = set([1])
new.ints = set()
new.bools = set()
new.floats = set()
new.bitvecs = set([lane_w * nlanes for lane_w in all_scalars
for nlanes in self.lanes])
new.specials = set()
return new
def image(self, func):
# type: (str) -> TypeSet
"""
Return the image of self across the derived function func
"""
if (func == TypeVar.LANEOF):
return self.lane_of()
elif (func == TypeVar.ASBOOL):
return self.as_bool()
elif (func == TypeVar.HALFWIDTH):
return self.half_width()
elif (func == TypeVar.DOUBLEWIDTH):
return self.double_width()
elif (func == TypeVar.HALFVECTOR):
return self.half_vector()
elif (func == TypeVar.DOUBLEVECTOR):
return self.double_vector()
elif (func == TypeVar.TOBITVEC):
return self.to_bitvec()
else:
assert False, "Unknown derived function: " + func
def preimage(self, func):
# type: (str) -> TypeSet
"""
Return the inverse image of self across the derived function func
"""
# The inverse of the empty set is always empty
if (self.size() == 0):
return self
if (func == TypeVar.LANEOF):
new = self.copy()
new.bitvecs = set()
new.lanes = set([2**i for i in range(0, int_log2(MAX_LANES)+1)])
return new
elif (func == TypeVar.ASBOOL):
new = self.copy()
new.bitvecs = set()
if 1 not in self.bools:
new.ints = self.bools.difference(set([1]))
new.floats = self.bools.intersection(set([32, 64]))
# If b1 is not in our typeset, than lanes=1 cannot be in the
# pre-image, as as_bool() of scalars is always b1.
new.lanes = self.lanes.difference(set([1]))
else:
new.ints = set([2**x for x in range(3, 7)])
new.floats = set([32, 64])
return new
elif (func == TypeVar.HALFWIDTH):
return self.double_width()
elif (func == TypeVar.DOUBLEWIDTH):
return self.half_width()
elif (func == TypeVar.HALFVECTOR):
return self.double_vector()
elif (func == TypeVar.DOUBLEVECTOR):
return self.half_vector()
elif (func == TypeVar.TOBITVEC):
new = TypeSet()
# Start with all possible lanes/ints/floats/bools
lanes = interval_to_set(decode_interval(True, (1, MAX_LANES), 1))
ints = interval_to_set(decode_interval(True, (8, MAX_BITS)))
floats = interval_to_set(decode_interval(True, (32, 64)))
bools = interval_to_set(decode_interval(True, (1, MAX_BITS)))
# See which combinations have a size that appears in self.bitvecs
has_t = set() # type: Set[Tuple[str, int, int]]
for l in lanes:
for i in ints:
if i * l in self.bitvecs:
has_t.add(('i', i, l))
for i in bools:
if i * l in self.bitvecs:
has_t.add(('b', i, l))
for i in floats:
if i * l in self.bitvecs:
has_t.add(('f', i, l))
for (t, width, lane) in has_t:
new.lanes.add(lane)
if (t == 'i'):
new.ints.add(width)
elif (t == 'b'):
new.bools.add(width)
else:
assert t == 'f'
new.floats.add(width)
return new
else:
assert False, "Unknown derived function: " + func
def size(self):
# type: () -> int
"""
Return the number of concrete types represented by this typeset
"""
return (len(self.lanes) * (len(self.ints) + len(self.floats) +
len(self.bools) + len(self.bitvecs)) +
len(self.specials))
def concrete_types(self):
# type: () -> Iterable[types.ValueType]
def by(scalar, lanes):
# type: (types.LaneType, int) -> types.ValueType
if (lanes == 1):
return scalar
else:
return scalar.by(lanes)
for nlanes in self.lanes:
for bits in self.ints:
yield by(types.IntType.with_bits(bits), nlanes)
for bits in self.floats:
yield by(types.FloatType.with_bits(bits), nlanes)
for bits in self.bools:
yield by(types.BoolType.with_bits(bits), nlanes)
for bits in self.bitvecs:
assert nlanes == 1
yield types.BVType.with_bits(bits)
for spec in self.specials:
yield spec
def get_singleton(self):
# type: () -> types.ValueType
"""
Return the singleton type represented by self. Can only call on
typesets containing 1 type.
"""
types = list(self.concrete_types())
assert len(types) == 1
return types[0]
def widths(self):
# type: () -> Set[int]
""" Return a set of the widths of all possible types in self"""
scalar_w = self.ints.union(self.floats.union(self.bools))
scalar_w = scalar_w.union(self.bitvecs)
return set(w * l for l in self.lanes for w in scalar_w)
class TypeVar(object):
"""
Type variables can be used in place of concrete types when defining
instructions. This makes the instructions *polymorphic*.
A type variable is restricted to vary over a subset of the value types.
This subset is specified by a set of flags that control the permitted base
types and whether the type variable can assume scalar or vector types, or
both.
:param name: Short name of type variable used in instruction descriptions.
:param doc: Documentation string.
:param ints: Allow all integer base types, or `(min, max)` bit-range.
:param floats: Allow all floating point base types, or `(min, max)`
bit-range.
:param bools: Allow all boolean base types, or `(min, max)` bit-range.
:param scalars: Allow type variable to assume scalar types.
:param simd: Allow type variable to assume vector types, or `(min, max)`
lane count range.
:param bitvecs: Allow all BitVec base types, or `(min, max)` bit-range.
"""
def __init__(
self,
name, # type: str
doc, # type: str
ints=False, # type: BoolInterval
floats=False, # type: BoolInterval
bools=False, # type: BoolInterval
scalars=True, # type: bool
simd=False, # type: BoolInterval
bitvecs=False, # type: BoolInterval
base=None, # type: TypeVar
derived_func=None, # type: str
specials=None # type: SpecialSpec
):
# type: (...) -> None
self.name = name
self.__doc__ = doc
self.is_derived = isinstance(base, TypeVar)
if base:
assert self.is_derived
assert derived_func
self.base = base
self.derived_func = derived_func
self.name = '{}({})'.format(derived_func, base.name)
else:
min_lanes = 1 if scalars else 2
lanes = decode_interval(simd, (min_lanes, MAX_LANES), 1)
self.type_set = TypeSet(
lanes=lanes,
ints=ints,
floats=floats,
bools=bools,
bitvecs=bitvecs,
specials=specials)
@staticmethod
def singleton(typ):
# type: (types.ValueType) -> TypeVar
"""Create a type variable that can only assume a single type."""
scalar = None # type: types.ValueType
if isinstance(typ, types.VectorType):
scalar = typ.base
lanes = (typ.lanes, typ.lanes)
elif isinstance(typ, types.LaneType):
scalar = typ
lanes = (1, 1)
elif isinstance(typ, types.SpecialType):
return TypeVar(typ.name, typ.__doc__, specials=[typ])
else:
assert isinstance(typ, types.BVType)
scalar = typ
lanes = (1, 1)
ints = None
floats = None
bools = None
bitvecs = None
if isinstance(scalar, types.IntType):
ints = (scalar.bits, scalar.bits)
elif isinstance(scalar, types.FloatType):
floats = (scalar.bits, scalar.bits)
elif isinstance(scalar, types.BoolType):
bools = (scalar.bits, scalar.bits)
elif isinstance(scalar, types.BVType):
bitvecs = (scalar.bits, scalar.bits)
tv = TypeVar(
typ.name, typ.__doc__,
ints=ints, floats=floats, bools=bools,
bitvecs=bitvecs, simd=lanes)
return tv
def __str__(self):
# type: () -> str
return "`{}`".format(self.name)
def __repr__(self):
# type: () -> str
if self.is_derived:
return (
'TypeVar({}, base={}, derived_func={})'
.format(self.name, self.base, self.derived_func))
else:
return (
'TypeVar({}, {})'
.format(self.name, self.type_set))
def __hash__(self):
# type: () -> int
if (not self.is_derived):
return object.__hash__(self)
return hash((self.derived_func, self.base))
def __eq__(self, other):
# type: (object) -> bool
if not isinstance(other, TypeVar):
return False
if self.is_derived and other.is_derived:
return (
self.derived_func == other.derived_func and
self.base == other.base)
else:
return self is other
def __ne__(self, other):
# type: (object) -> bool
return not self.__eq__(other)
# Supported functions for derived type variables.
# The names here must match the method names on `ir::types::Type`.
# The camel_case of the names must match `enum OperandConstraint` in
# `instructions.rs`.
LANEOF = 'lane_of'
ASBOOL = 'as_bool'
HALFWIDTH = 'half_width'
DOUBLEWIDTH = 'double_width'
HALFVECTOR = 'half_vector'
DOUBLEVECTOR = 'double_vector'
TOBITVEC = 'to_bitvec'
@staticmethod
def is_bijection(func):
# type: (str) -> bool
return func in [
TypeVar.HALFWIDTH,
TypeVar.DOUBLEWIDTH,
TypeVar.HALFVECTOR,
TypeVar.DOUBLEVECTOR]
@staticmethod
def inverse_func(func):
# type: (str) -> str
return {
TypeVar.HALFWIDTH: TypeVar.DOUBLEWIDTH,
TypeVar.DOUBLEWIDTH: TypeVar.HALFWIDTH,
TypeVar.HALFVECTOR: TypeVar.DOUBLEVECTOR,
TypeVar.DOUBLEVECTOR: TypeVar.HALFVECTOR
}[func]
@staticmethod
def derived(base, derived_func):
# type: (TypeVar, str) -> TypeVar
"""Create a type variable that is a function of another."""
# Safety checks to avoid over/underflows.
ts = base.get_typeset()
assert len(ts.specials) == 0, "Can't derive from special types"
if derived_func == TypeVar.HALFWIDTH:
if len(ts.ints) > 0:
assert min(ts.ints) > 8, "Can't halve all integer types"
if len(ts.floats) > 0:
assert min(ts.floats) > 32, "Can't halve all float types"
if len(ts.bools) > 0:
assert min(ts.bools) > 8, "Can't halve all boolean types"
elif derived_func == TypeVar.DOUBLEWIDTH:
if len(ts.ints) > 0:
assert max(ts.ints) < MAX_BITS,\
"Can't double all integer types."
if len(ts.floats) > 0:
assert max(ts.floats) < MAX_BITS,\
"Can't double all float types."
if len(ts.bools) > 0:
assert max(ts.bools) < MAX_BITS, "Can't double all bool types."
elif derived_func == TypeVar.HALFVECTOR:
assert min(ts.lanes) > 1, "Can't halve a scalar type"
elif derived_func == TypeVar.DOUBLEVECTOR:
assert max(ts.lanes) < MAX_LANES, "Can't double 256 lanes."
return TypeVar(None, None, base=base, derived_func=derived_func)
@staticmethod
def from_typeset(ts):
# type: (TypeSet) -> TypeVar
""" Create a type variable from a type set."""
tv = TypeVar(None, None)
tv.type_set = ts
return tv
def lane_of(self):
# type: () -> TypeVar
"""
Return a derived type variable that is the scalar lane type of this
type variable.
When this type variable assumes a scalar type, the derived type will be
the same scalar type.
"""
return TypeVar.derived(self, self.LANEOF)
def as_bool(self):
# type: () -> TypeVar
"""
Return a derived type variable that has the same vector geometry as
this type variable, but with boolean lanes. Scalar types map to `b1`.
"""
return TypeVar.derived(self, self.ASBOOL)
def half_width(self):
# type: () -> TypeVar
"""
Return a derived type variable that has the same number of vector lanes
as this one, but the lanes are half the width.
"""
return TypeVar.derived(self, self.HALFWIDTH)
def double_width(self):
# type: () -> TypeVar
"""
Return a derived type variable that has the same number of vector lanes
as this one, but the lanes are double the width.
"""
return TypeVar.derived(self, self.DOUBLEWIDTH)
def half_vector(self):
# type: () -> TypeVar
"""
Return a derived type variable that has half the number of vector lanes
as this one, with the same lane type.
"""
return TypeVar.derived(self, self.HALFVECTOR)
def double_vector(self):
# type: () -> TypeVar
"""
Return a derived type variable that has twice the number of vector
lanes as this one, with the same lane type.
"""
return TypeVar.derived(self, self.DOUBLEVECTOR)
def to_bitvec(self):
# type: () -> TypeVar
"""
Return a derived type variable that represent a flat bitvector with
the same size as self
"""
return TypeVar.derived(self, self.TOBITVEC)
def singleton_type(self):
# type: () -> types.ValueType
"""
If the associated typeset has a single type return it. Otherwise return
None
"""
ts = self.get_typeset()
if ts.size() != 1:
return None
return ts.get_singleton()
def free_typevar(self):
# type: () -> TypeVar
"""
Get the free type variable controlling this one.
"""
if self.is_derived:
return self.base.free_typevar()
elif self.singleton_type() is not None:
# A singleton type variable is not a proper free variable.
return None
else:
return self
def rust_expr(self):
# type: () -> str
"""
Get a Rust expression that computes the type of this type variable.
"""
if self.is_derived:
return '{}.{}()'.format(
self.base.rust_expr(), self.derived_func)
elif self.singleton_type():
return self.singleton_type().rust_name()
else:
return self.name
def constrain_types_by_ts(self, ts):
# type: (TypeSet) -> None
"""
Constrain the range of types this variable can assume to a subset of
those in the typeset ts.
"""
if not self.is_derived:
self.type_set &= ts
else:
self.base.constrain_types_by_ts(ts.preimage(self.derived_func))
def constrain_types(self, other):
# type: (TypeVar) -> None
"""
Constrain the range of types this variable can assume to a subset of
those `other` can assume.
"""
if self is other:
return
self.constrain_types_by_ts(other.get_typeset())
def get_typeset(self):
# type: () -> TypeSet
"""
Returns the typeset for this TV. If the TV is derived, computes it
recursively from the derived function and the base's typeset.
"""
if not self.is_derived:
return self.type_set
else:
return self.base.get_typeset().image(self.derived_func)
def get_fresh_copy(self, name):
# type: (str) -> TypeVar
"""
Get a fresh copy of self. Can only be called on free typevars.
"""
assert not self.is_derived
tv = TypeVar.from_typeset(self.type_set.copy())
tv.name = name
return tv<|fim▁end|> | |
<|file_name|>test_ld.py<|end_file_name|><|fim▁begin|>from unittest import mock
from .. import *
from bfg9000.tools.ld import LdLinker
from bfg9000.path import abspath
from bfg9000.versioning import Version
def mock_execute(args, **kwargs):
return 'SEARCH_DIR("/dir1")\nSEARCH_DIR("=/dir2")\n'
class TestLdLinker(CrossPlatformTestCase):
def __init__(self, *args, **kwargs):
super().__init__(clear_variables=True, *args, **kwargs)
def test_flavor(self):
ld = LdLinker(None, self.env, ['ld'], 'version')
self.assertEqual(ld.flavor, 'ld')
def test_lang(self):
class MockBuilder:
lang = 'c++'
ld = LdLinker(MockBuilder(), self.env, ['ld'], 'version')
self.assertEqual(ld.lang, 'c++')
def test_family(self):
class MockBuilder:
family = 'native'
ld = LdLinker(MockBuilder(), self.env, ['ld'], 'version')<|fim▁hole|> def test_gnu_ld(self):
version = 'GNU ld (GNU Binutils for Ubuntu) 2.26.1'
ld = LdLinker(None, self.env, ['ld'], version)
self.assertEqual(ld.brand, 'bfd')
self.assertEqual(ld.version, Version('2.26.1'))
def test_gnu_gold(self):
version = 'GNU gold (GNU Binutils for Ubuntu 2.26.1) 1.11'
ld = LdLinker(None, self.env, ['ld'], version)
self.assertEqual(ld.brand, 'gold')
self.assertEqual(ld.version, Version('1.11'))
def test_unknown_brand(self):
version = 'unknown'
ld = LdLinker(None, self.env, ['ld'], version)
self.assertEqual(ld.brand, 'unknown')
self.assertEqual(ld.version, None)
def test_search_dirs(self):
with mock.patch('bfg9000.shell.execute', mock_execute):
ld = LdLinker(None, self.env, ['ld'], 'version')
self.assertEqual(ld.search_dirs(),
[abspath('/dir1'), abspath('/dir2')])
def test_search_dirs_sysroot(self):
with mock.patch('bfg9000.shell.execute', mock_execute):
ld = LdLinker(None, self.env, ['ld'], 'version')
self.assertEqual(ld.search_dirs(sysroot='/sysroot'),
[abspath('/dir1'), abspath('/sysroot/dir2')])
def test_search_dirs_fail(self):
def mock_bad_execute(*args, **kwargs):
raise OSError()
with mock.patch('bfg9000.shell.execute', mock_bad_execute):
ld = LdLinker(None, self.env, ['ld'], 'version')
self.assertEqual(ld.search_dirs(), [])
self.assertRaises(OSError, lambda: ld.search_dirs(strict=True))<|fim▁end|> | self.assertEqual(ld.family, 'native')
|
<|file_name|>config_test.go<|end_file_name|><|fim▁begin|>// Copyright (c) 2016 VMware, Inc. All Rights Reserved.
//
// This product is licensed to you under the Apache License, Version 2.0 (the "License").
// You may not use this product except in compliance with the License.
//
// This product may include a number of subcomponents with separate copyright notices and
// license terms. Your use of these subcomponents is subject to the terms and conditions
// of the subcomponent's license, as noted in the LICENSE file.
package configuration_test
import (
"io/ioutil"
"os"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
. "github.com/vmware/photon-controller-cli/photon/configuration"
)
var _ = Describe("Config", func() {
BeforeEach(func() {
var err error
UserConfigDir, err = ioutil.TempDir("", "config-test-")
Expect(err).To(BeNil())
})
AfterEach(func() {
err := RemoveConfigFile()
err2 := os.Remove(UserConfigDir)
Expect(err).To(BeNil())
Expect(err2).To(BeNil())
})
Describe("LoadConfig", func() {
Context("when config file does not exist", func() {
BeforeEach(func() {
err := RemoveConfigFile()
Expect(err).To(BeNil())
})
It("retuns empty config and no error", func() {
config, err := LoadConfig()
Expect(err).To(BeNil())
Expect(config).To(BeEquivalentTo(&Configuration{}))
})
})
Context("when config file is not json", func() {
BeforeEach(func() {
nonJson := "<target>http://localhost:9080</target>\n"
err := ChangeConfigFileContents(nonJson)
Expect(err).To(BeNil())
})
It("returns empty config and error", func() {
config, err := LoadConfig()
Expect(err).To(
MatchError("Error loading configuration: invalid character '<' looking for beginning of value"))
Expect(config).To(BeEquivalentTo(&Configuration{}))
})
})
Context("when config file is valid", func() {
var (
configExpected *Configuration
)
BeforeEach(func() {
configExpected = &Configuration{
CloudTarget: "http://localhost:9080",
}
err := SaveConfig(configExpected)
Expect(err).To(BeNil())
})
It("returns the config", func() {
config, err := LoadConfig()
Expect(err).To(BeNil())
Expect(config).To(BeEquivalentTo(configExpected))
})
})
})
Describe("SaveConfig", func() {
Context("when config file does not exist", func() {
BeforeEach(func() {
err := RemoveConfigFile()
Expect(err).To(BeNil())
})
It("saves to file", func() {
configExpected := &Configuration{
CloudTarget: "test-save-1",
}
err := SaveConfig(configExpected)
Expect(err).To(BeNil())
config, err := LoadConfig()
Expect(err).To(BeNil())
Expect(config).To(BeEquivalentTo(configExpected))
})
})
Context("when config file exists", func() {
BeforeEach(func() {
config := "{CloudTarget: \"http://localhost:9080\"}"
err := ChangeConfigFileContents(config)<|fim▁hole|> Expect(err).To(BeNil())
})
It("saves to updates to file", func() {
configExpected := &Configuration{
CloudTarget: "test-write-to-file-2",
}
err := SaveConfig(configExpected)
Expect(err).To(BeNil())
config, err := LoadConfig()
Expect(err).To(BeNil())
Expect(config).To(BeEquivalentTo(configExpected))
})
})
})
})<|fim▁end|> | |
<|file_name|>sharded_jit.py<|end_file_name|><|fim▁begin|># Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from functools import partial
from typing import Callable, Iterable, Optional, Tuple, Union
from absl import logging
import numpy as np
from jax import core
from jax.interpreters import ad
from jax.interpreters import partial_eval as pe
# TODO(skye): separate pmap into it's own module?
from jax.interpreters import mlir
from jax.interpreters import pxla
from jax.interpreters import xla
from jax import linear_util as lu
from jax._src import dispatch
from jax._src.lib import xla_bridge as xb
from jax._src.lib import xla_client as xc
from jax._src.lib.mlir import ir
from jax._src.lib.mlir.dialects import func as func_dialect
from jax._src.api_util import (argnums_partial, flatten_axes, flatten_fun,<|fim▁hole|> _ensure_index_tuple)
import jax._src.util as util
from jax.tree_util import tree_flatten, tree_unflatten
from jax._src.util import (new_name_stack, wrap_name, wraps, safe_map,
safe_zip, HashableFunction)
from jax._src.config import config
xops = xc._xla.ops
def _map(f, *xs):
return tuple(map(f, *xs))
class ResultToPopulate: pass
result_to_populate = ResultToPopulate()
def _avals_to_results_handler(nrep, npart, partitions, out_avals):
handlers = [_aval_to_result_handler(npart, parts, out_aval)
for parts, out_aval in safe_zip(partitions, out_avals)]
def handler(out_bufs):
return [h(bufs) for h, bufs in zip(handlers, out_bufs)]
return handler
def _aval_to_result_handler(npart, parts, aval):
if aval is not core.abstract_unit:
spec = pxla.partitioned_sharding_spec(npart, parts, aval)
indices = pxla.spec_to_indices(aval.shape, spec)
else:
spec = indices = None
return pxla.local_aval_to_result_handler(aval, spec, indices)
@lu.cache
def _sharded_callable(
fun: lu.WrappedFun, nparts: Optional[int],
in_parts: Tuple[pxla.PartitionsOrReplicated, ...],
out_parts_thunk: Callable[[], Tuple[pxla.PartitionsOrReplicated, ...]],
local_in_parts: Optional[Tuple[pxla.PartitionsOrReplicated, ...]],
local_out_parts_thunk: Callable[[], Optional[Tuple[pxla.PartitionsOrReplicated, ...]]],
local_nparts: Optional[int], name: str, *abstract_args):
nrep = 1
if local_in_parts is None:
local_in_parts = in_parts
global_abstract_args = [pxla.get_global_aval(arg, parts, lparts)
for arg, parts, lparts
in safe_zip(abstract_args, in_parts, local_in_parts)]
if logging.vlog_is_on(2):
logging.vlog(2, "abstract_args: %s", abstract_args)
logging.vlog(2, "global_abstract_args: %s", global_abstract_args)
logging.vlog(2, "in_parts: %s", in_parts)
logging.vlog(2, "local_in_parts: %s", local_in_parts)
jaxpr, global_out_avals, consts = pe.trace_to_jaxpr_final(fun, global_abstract_args)
platform = xb.get_backend().platform
if platform not in ["tpu", "gpu"]:
# TODO(skye): fall back to regular jit?
raise ValueError(f"sharded_jit not supported for {platform}")
nparts = pxla.reconcile_num_partitions(jaxpr, nparts)
assert nparts is not None
if nparts > xb.device_count():
raise ValueError(
f"sharded_jit computation requires {nparts} devices, "
f"but only {xb.device_count()} devices are available.")
if xb.local_device_count() < nparts < xb.device_count():
raise NotImplementedError(
f"sharded_jit across multiple hosts must use all available devices. "
f"Got {nparts} out of {xb.device_count()} requested devices "
f"(local device count: {xb.local_device_count()})")
if local_nparts is None:
if nparts > xb.local_device_count():
raise ValueError(
"Specify 'local_nparts' when using cross-process sharded_jit "
"and all inputs and outputs are replicated.")
else:
local_nparts = nparts
if local_nparts > xb.local_device_count():
raise ValueError(
f"sharded_jit computation requires {local_nparts} local devices, "
f"but only {xb.local_device_count()} local devices are available.")
if logging.vlog_is_on(2):
logging.vlog(2, "nparts: %d local_nparts: %d", nparts, local_nparts)
out_parts = out_parts_thunk()
local_out_parts = local_out_parts_thunk()
if local_out_parts is None:
local_out_parts = out_parts
if logging.vlog_is_on(2):
logging.vlog(2, "out_parts: %s", out_parts)
logging.vlog(2, "local_out_parts: %s", local_out_parts)
local_out_avals = [pxla.get_local_aval(out, parts, lparts)
for out, parts, lparts
in safe_zip(global_out_avals, out_parts, local_out_parts)]
log_priority = logging.WARNING if config.jax_log_compiles else logging.DEBUG
logging.log(log_priority,
"Compiling %s for %d devices with args %s.",
fun.__name__, nparts, global_abstract_args)
c = xc.XlaBuilder("spjit_{}".format(fun.__name__))
xla_consts = _map(partial(xla.pyval_to_ir_constant, c), consts)
xla_args = _xla_sharded_args(c, global_abstract_args, in_parts)
axis_env = xla.AxisEnv(nrep, (), ())
ctx = xla.TranslationContext(
c, platform, axis_env, new_name_stack(wrap_name(name, "sharded_jit")))
out_nodes = xla.jaxpr_subcomp(ctx, jaxpr, xla_consts, *xla_args)
out_tuple = xla.with_sharding(c, out_parts, xops.Tuple, c, out_nodes)
built = c.Build(out_tuple)
if nparts <= xb.local_device_count():
devices = xb.local_devices()[:nparts]
else:
assert nparts == xb.device_count()
devices = xb.devices()
device_assignment = np.array([[d for d in devices]])
device_assignment = np.reshape(device_assignment, (-1, nparts))
# device_assignment = None # TODO(skye): replace with default device assignment?
compiled = dispatch.backend_compile(
xb.get_backend(), built,
xb.get_compile_options(nrep, nparts, device_assignment))
input_specs = [
pxla.partitioned_sharding_spec(local_nparts, parts, aval)
for parts, aval in zip(local_in_parts, abstract_args)]
input_indices = [pxla.spec_to_indices(aval.shape, spec)
if spec is not None else None
for aval, spec in zip(abstract_args, input_specs)]
handle_args = partial(pxla.shard_args, compiled.local_devices(),
input_indices)
handle_outs = _avals_to_results_handler(nrep, local_nparts, # type: ignore
local_out_parts, local_out_avals)
return partial(_execute_spatially_partitioned, compiled, handle_args,
handle_outs)
def _sharded_jit_translation_rule(ctx, avals_in, avals_out, *in_nodes,
in_parts, out_parts_thunk, nparts,
name, call_jaxpr, local_in_parts,
local_out_parts_thunk, local_nparts):
subc = xc.XlaBuilder(f"sharded_jit_{name}")
# We assume any extra leading in_nodes are constants and replicate them.
num_extra_nodes = len(in_nodes) - len(in_parts)
assert num_extra_nodes >= 0
in_parts = (None,) * num_extra_nodes + in_parts
args = []
for i, (n, sharding) in enumerate(safe_zip(in_nodes, in_parts)):
# We use xla.set_sharding instead of xla.with_sharding because inlined calls
# shouldn't have shardings set directly on the inputs or outputs.
arg = xla.parameter(subc, i, ctx.builder.GetShape(n))
args.append(xla.set_sharding(subc, arg, sharding))
sub_ctx = ctx.replace(
builder=subc,
name_stack=new_name_stack(wrap_name(name, "sharded_jit")))
out_nodes = xla.jaxpr_subcomp(sub_ctx, call_jaxpr, (), *args)
out_parts = out_parts_thunk()
assert len(out_parts) == len(out_nodes)
out_nodes = [xla.set_sharding(subc, out, sharding)
for out, sharding in safe_zip(out_nodes, out_parts)]
subc = subc.build(xops.Tuple(subc, out_nodes))
return xla.xla_destructure(ctx.builder,
xops.Call(ctx.builder, subc, list(in_nodes)))
def _sharded_jit_lowering(ctx, *in_nodes,
in_parts, out_parts_thunk, nparts,
name, call_jaxpr, local_in_parts,
local_out_parts_thunk, local_nparts):
# We assume any extra leading in_nodes are constants and replicate them.
num_extra_nodes = len(in_nodes) - len(in_parts)
assert num_extra_nodes >= 0
in_parts = (None,) * num_extra_nodes + in_parts
args = []
for ns, sharding in safe_zip(
safe_map(mlir.wrap_singleton_ir_values, in_nodes), in_parts):
if sharding is not None:
args.append(
[mlir.wrap_with_sharding_op(n, xla.sharding_to_proto(sharding))
for n in ns])
else:
args.append(ns)
sub_ctx = ctx.module_context.replace(
name_stack=new_name_stack(wrap_name(name, "sharded_jit")))
fn = mlir.lower_jaxpr_to_fun(sub_ctx, f"sharded_jit_{name}",
core.ClosedJaxpr(call_jaxpr, ()))
output_types = safe_map(mlir.aval_to_ir_types, ctx.avals_out)
flat_output_types = util.flatten(output_types)
call = func_dialect.CallOp(flat_output_types,
ir.FlatSymbolRefAttr.get(fn.name.value),
mlir.flatten_lowering_ir_args(args))
out_nodes = util.unflatten(call.results, safe_map(len, output_types))
out_parts = out_parts_thunk()
outputs = []
for ns, sharding in safe_zip(out_nodes, out_parts):
if sharding is not None:
outputs.append(
[mlir.wrap_with_sharding_op(n, xla.sharding_to_proto(sharding))
for n in ns])
else:
outputs.append(ns)
return outputs
def _execute_spatially_partitioned(compiled, in_handler, out_handler, *args):
input_bufs = in_handler(args)
out_bufs = compiled.execute_sharded_on_local_devices(input_bufs)
return out_handler(out_bufs)
def _xla_sharded_args(c, avals, in_parts):
xla_args = []
for i, (sharding, aval) in enumerate(safe_zip(in_parts, avals)):
param = xla.with_sharding(c, sharding, xla.parameter, c, i,
*xla.aval_to_xla_shapes(aval))
xla_args.append(param)
return xla_args
def _sharded_call_impl(fun, *args, nparts, in_parts, out_parts_thunk,
local_in_parts, local_out_parts_thunk, local_nparts,
name):
compiled_fun = _sharded_callable(fun, nparts, in_parts, out_parts_thunk,
local_in_parts, local_out_parts_thunk,
local_nparts, name,
*map(xla.abstractify, args))
return compiled_fun(*args)
sharded_call_p = core.CallPrimitive("sharded_call")
sharded_call = sharded_call_p.bind
sharded_call_p.def_impl(_sharded_call_impl)
xla.register_translation(sharded_call_p, _sharded_jit_translation_rule)
mlir.register_lowering(sharded_call_p, _sharded_jit_lowering)
class _UnconstrainedPartitionSingleton:
def __str__(self):
return "UNCONSTRAINED"
# Unconstrained sentinel value for PartitionSpec, representing a dimension for
# which the user wants XLA to assign the best partitioning.
# TODO(yashkatariya): May rename to AUTO.
_UNCONSTRAINED_PARTITION = _UnconstrainedPartitionSingleton()
class PartitionSpec(tuple):
"""Tuple of integer specifying how a value should be partitioned.
Each integer corresponds to how many ways a dimension is partitioned. We
create a separate class for this so JAX's pytree utilities can distinguish it
from a tuple that should be treated as a pytree.
"""
def __new__(cls, *partitions):
return tuple.__new__(PartitionSpec, partitions)
def __repr__(self):
return "PartitionSpec%s" % tuple.__repr__(self)
"""A sentinel value representing a dim is unconstrained."""
UNCONSTRAINED = _UNCONSTRAINED_PARTITION
def sharded_jit(
fun: Callable,
in_parts,
out_parts,
num_partitions: Optional[int] = None,
local_in_parts=None,
local_out_parts=None,
local_num_partitions=None,
static_argnums: Union[int, Iterable[int]] = (),
):
"""Like ``jit``, but partitions ``fun`` across multiple devices.
WARNING: this feature is still under active development! It may not work well,
and may change without warning!
`sharded_jit` sets up ``fun`` for just-in-time compilation with XLA, but
unlike ``jit``, the compiled function will run across multiple devices
(e.g. multiple GPUs or multiple TPU cores). This is achieved by spatially
partitioning the data that flows through the computation, so each operation is
run across all devices and each device runs only a shard of the full
data. (Some data can optionally be replicated, which is sometimes more
efficient for small arrays when combined with larger spatially-partitioned
arrays.) Communication between devices is automatically inserted as necessary.
``sharded_jit`` can be useful if the jitted version of ``fun`` would not fit
in a single device's memory, or to speed up ``fun`` by running each operation
in parallel across multiple devices.
Note: ``sharded_jit`` is currently available on TPU only!
Args:
fun: Function to be jitted.
in_parts: Specifications for how each argument to ``fun`` should be
partitioned or replicated. This should be a PartitionSpec indicating into
how many partitions each dimension should be sharded, ``None`` indicating
replication, or (nested) standard Python containers thereof. For example,
``in_parts=PartitionSpec(2,1)`` means all arguments should be partitioned
over two devices across the first dimension;
``in_parts=(PartitionSpec(2,2), PartitionSpec(4,1), None)`` means the
first argument should be partitioned over four devices by splitting both
of its dimensions in half, the second argument should be partitioned over
the four devices across the first dimension, and the third argument is
replicated across the four devices.
All PartitionSpecs in a given ``sharded_jit`` call must correspond to the
same total number of partitions, i.e. the product of all PartitionSpecs
must be equal, and the number of dimensions in the PartitionSpec
corresponding to an array ``a`` should equal ``a.ndim``. Arguments marked
as static using ``static_argnums`` (see below) do not require a
PartitionSpec.
out_parts: The output partitions, i.e. how each output of ``fun`` should be
partitioned or replicated. This follows the same convention as
``in_parts``.
num_partitions: Optional. If set, explicitly specifies the number of devices
``fun`` should partitioned across (rather than inferring it from
``in_parts``, ``out_parts``, and/or any ``with_sharding_constraint``
calls). Setting this should usually be unnecessary, but can be used to
maintain device persistence across multiple sharded_jit calls when some of
those calls only involve replicated values.
local_in_parts: Optional. This should be set when partitioning across
multiple processes, and says how each process's worth of data should be
partitioned (vs. in_parts which is the "global" partitioning across all
processes). This API is likely to change in the future.
local_out_parts: Optional. This should be set when partitioning across
multiple processes, and says how each process's worth of data should be
partitioned (vs. out_parts which is the "global" partitioning across all
processes). This API is likely to change in the future.
local_num_partitions: Optional. Explicitly specifies the numbers of local
devices to partitions across in a multi-process setting. This API is
likely to change in the future.
static_argnums: An int or collection of ints specifying which positional
arguments to treat as static (compile-time constant). Operations that only
depend on static arguments will be constant-folded. Calling the jitted
function with different values for these constants will trigger
recompilation. If the jitted function is called with fewer positional
arguments than indicated by ``static_argnums`` then an error is raised.
Each of the static arguments will be broadcasted to all devices, and
cannot be partitioned - these arguments will be removed from the *args
list before matching each remaining argument with its corresponding
PartitionSpec. Arguments that are not arrays or containers thereof must
be marked as static. Defaults to ``()``.
Returns:
A version of ``fun`` that will be distributed across multiple devices.
"""
if num_partitions is not None:
nparts = num_partitions
else:
nparts = pxla.get_num_partitions(in_parts, out_parts)
if local_num_partitions is not None:
local_nparts = local_num_partitions
else:
local_nparts = pxla.get_num_partitions(local_in_parts, local_out_parts)
static_argnums = _ensure_index_tuple(static_argnums)
@wraps(fun)
def wrapped(*args, **kwargs):
if kwargs:
raise NotImplementedError("sharded_jit over kwargs not yet supported")
f = lu.wrap_init(fun)
if static_argnums:
if max(static_argnums) >= len(args):
raise ValueError(
f"jitted function has static_argnums={static_argnums}"
f" but was called with only {len(args)} positional "
f"argument{'s' if len(args) > 1 else ''}. "
"All static broadcasted arguments must be passed positionally.")
dyn_argnums = [i for i in range(len(args)) if i not in static_argnums]
f, args = argnums_partial(f, dyn_argnums, args)
args_flat, in_tree = tree_flatten((args, kwargs))
in_parts_flat = tuple(flatten_axes("sharded_jit in_parts",
in_tree.children()[0], in_parts))
if local_in_parts is not None:
local_in_parts_flat = tuple(flatten_axes("sharded_jit local_in_parts",
in_tree.children()[0], local_in_parts))
else:
local_in_parts_flat = None
flat_fun, out_tree = flatten_fun(f, in_tree)
# TODO(skye): having a function-typed param in a primitive seems dicey, is
# there a better way?
out_parts_thunk = HashableFunction(
lambda: tuple(flatten_axes("sharded_jit out_parts", out_tree(), out_parts)),
closure=out_parts)
if local_out_parts:
local_out_parts_thunk = HashableFunction(
lambda: tuple(flatten_axes("sharded_jit local_out_parts",
out_tree(), local_out_parts)),
closure=local_out_parts)
else:
local_out_parts_thunk = HashableFunction(lambda: None, closure=None)
out = sharded_call(
flat_fun,
*args_flat,
nparts=nparts,
in_parts=in_parts_flat,
out_parts_thunk=out_parts_thunk,
local_in_parts=local_in_parts_flat,
local_out_parts_thunk=local_out_parts_thunk,
local_nparts=local_nparts,
name=flat_fun.__name__)
return tree_unflatten(out_tree(), out)
return wrapped
def _sharding_constraint_impl(x, partitions):
# TODO(skye): can we also prevent this from being called in other
# non-sharded_jit contexts? (e.g. pmap, control flow)
raise NotImplementedError(
"with_sharding_constraint() should only be called inside sharded_jit()")
def _sharding_constraint_translation_rule(ctx, avals_in, avals_out, x_node,
partitions):
return [xla.set_sharding(ctx.builder, x_node, partitions)]
sharding_constraint_p = core.Primitive("sharding_constraint")
sharding_constraint_p.def_impl(_sharding_constraint_impl)
sharding_constraint_p.def_abstract_eval(lambda x, partitions: x)
ad.deflinear2(sharding_constraint_p,
lambda ct, _, partitions: (with_sharding_constraint(ct, partitions),))
xla.register_translation(sharding_constraint_p,
_sharding_constraint_translation_rule)
def _sharding_constraint_lowering(ctx, x_node, partitions):
return [mlir.wrap_with_sharding_op(x_node, xla.sharding_to_proto(partitions))]
mlir.register_lowering(sharding_constraint_p, _sharding_constraint_lowering)
def with_sharding_constraint(x, partitions: Optional[PartitionSpec]):
"""Identity-like function that specifies how ``x`` should be sharded.
WARNING: this feature is still under active development! It may not work well,
and may change without warning!
This should only be called inside a function transformed by ``sharded_jit``.
It constrains how the function is sharded: regardless of any other specified
partitions, the compiler will make sure that ``x`` is sharded according to
``partitions``. Note that a ``with_sharding_constraint`` call doesn't
necessarily correspond to a reshard, since the compiler is free to achieve
this sharding as long as the constraint is met, e.g. it might insert a reshard
earlier in the computation. Another way to think of this is that the
``with_sharding_constraint`` call may flow "up" the function to preceding
operations as well as "down" to subsequent ones.
``partitions`` must correspond to the same number of total partitions dictated
by the outer ``sharded_jit`` and any other ``with_sharding_constraint`` calls.
In the case where only replication has been specified, any ``partitions`` are
valid.
Example usage:
@partial(sharded_jit, in_parts=None, out_parts=None, num_shards=2
def f(x):
y = x + 1
y = with_sharding_constraint(y, PartitionSpec(2,1))
return y * 2
In this example, the inputs and outputs of ``f`` will be replicated, but the
inner value of ``y`` will be partitioned in half. ``f`` will run on two
devices due to the with_sharding_constraint call.
Args:
x: Array value
partitions: PartitionSpec indicating how ``x`` should be partitioned, or
None for replication.
Returns:
A new version of ``x`` with the specified sharding applied.
"""
return sharding_constraint_p.bind(x, partitions=partitions)<|fim▁end|> | |
<|file_name|>backboneApp.js<|end_file_name|><|fim▁begin|>var Backbone = require('backbone'),
_ = require('underscore');
global.App = {};
App.Task = Backbone.Model.extend({
url : 'http://localhost:9292/tasks/' + this.get('id'),
defaults : {
title: 'Untitled Task 1',
done : false
}
});
App.TaskCollection = Backbone.collection.extend({
model : Task,
urlRoot : 'http://localhost:9292/tasks'
});
App.TaskView = Backbone.View.extend({
tagName : 'li',
className : 'task',
template : _.template(require('./templates/task.html')),
initialize : function() {
this.render();
},
render : function() {
this.$el.html(this.template(this.model.attributes));
this.delegateEvents();
return this;
}
});
App.TaskCollectionView = Backbone.View.extend({
tagName : 'ul',
className : 'task-list',
initialize: function() {
this.render();
},
render : function() {
this.$el.empty();
_.each(this.collection.models, function(task) {
var view = new App.TaskView({model: task});
this.$el.append(view.render().$el);
});<|fim▁hole|> return this;
}
});
App.Router = Backbone.Router.extend({
routes : {
'(/)' : 'showList'
},
showList: function() {
var tasks = new App.TaskCollection();
var view = new App.TaskCollectionView({collection: tasks});
$('body').html(view.render().el);
}
});
module.exports = App;<|fim▁end|> |
this.delegateEvents(); |
<|file_name|>setup_ch.py<|end_file_name|><|fim▁begin|>'''
Created on Jul 19, 2010
@author: jnaous
'''
from django.core.urlresolvers import reverse
from django.test import Client
from common.tests.client import test_get_and_post_form
from django.contrib.auth.models import User
from pyquery import PyQuery as pq
from openflow.plugin.models import OpenFlowInterface, NonOpenFlowConnection
from geni.planetlab.models import PlanetLabNode
try:
from setup_expedient_params import \
SUPERUSER_USERNAME, SUPERUSER_PASSWORD,\
USER_INFO,\
PL_AGGREGATE_INFO,\
OF_AGGREGATE_INFO,\
OF_PL_CONNECTIONS
except ImportError:
print """
Could not import setup_om_params module. Make sure this
module exists and that it contains the following variables:
SUPERUSER_USERNAME, SUPERUSER_PASSWORD,
CH_PASSWORD, CH_USERNAME
"""
raise
def run():
client = Client()
client.login(username=SUPERUSER_USERNAME,
password=SUPERUSER_PASSWORD)
# Add all planetlab aggregates
for pl_agg in PL_AGGREGATE_INFO:
print "adding pl agg %s" % pl_agg["url"]
response = test_get_and_post_form(
client,
reverse("planetlab_aggregate_create"),
pl_agg,
)
print "got response %s" % response
assert response.status_code == 302
for of_agg in OF_AGGREGATE_INFO:
print "adding of agg %s" % of_agg["url"]
response = test_get_and_post_form(
client,
reverse("openflow_aggregate_create"),
of_agg,
del_params=["verify_certs"],
)
assert response.status_code == 302
for cnxn_tuple in OF_PL_CONNECTIONS:
print "adding cnxn %s" % (cnxn_tuple,)
NonOpenFlowConnection.objects.get_or_create(
of_iface=OpenFlowInterface.objects.get(
switch__datapath_id=cnxn_tuple[0],
port_num=cnxn_tuple[1],
),
resource=PlanetLabNode.objects.get(name=cnxn_tuple[2]),
)
client.logout()
for username, info in USER_INFO.items():
# create user
User.objects.create_user(
username=username, email=info["email"], password=info["password"])
client.login(username=username, password=info["password"])
# create project and slice
for project in info["projects"]:
response = test_get_and_post_form(
client, reverse("project_create"),
params=dict(
name=project["name"],
description=project["description"],<|fim▁hole|> assert response.status_code == 302
# This code is missing the project id. Need to get somehow to use reverse.
# for slice in project["slices"]:
# response = test_get_and_post_form(
# client, reverse("slice_create"),
# params=dict(
# name=slice["name"],
# description=slice["description"],
# ),
# )
# assert response.status_code == 302
client.logout()<|fim▁end|> | ),
) |
<|file_name|>test_user_keys.js<|end_file_name|><|fim▁begin|>var assert = require('assert');
var keys = require("cmd/common/keys/user.js");
var userKeysNock = require('test/fixtures/user/fixture_user_keys');
module.exports = {
setUp : function(cb){
return cb();
},
'list keys' : function(cb){
keys({ _ : ['list'] }, function(err, list){
assert.equal(err, null, err);
assert.ok(list);
assert.equal(list.list.length, 1);
return cb();
});
},
'create keys' : function(cb){
keys({ _ : ['add'] }, function(err){
assert.ok(!err, err);
keys({ _ : ['add', 'UserKey'] }, function(err, key){
assert.equal(err, null, err);
assert.ok(key.apiKey);
assert.ok(key.apiKey.label);
assert.ok(key.apiKey.key);
return cb();
});
});
},
'revoke keys' : function(cb){
keys.skipPrompt = true;
keys({ _ : ['delete'] }, function(err){
assert.ok(err);
keys({ _ : ['delete', 'UserKey'] }, function(err, key){
assert.equal(err, null, err);
assert.ok(key.apiKey);
assert.ok(key.apiKey.label);
assert.ok(key.apiKey.key);
return cb();
});
});
},
'update keys' : function (cb) {
keys.skipPrompt = true;
keys({ _ : ['update'] }, function(err){
assert.ok(err);
keys({ _ : ['update', 'UserKey', 'UserKey-Updated'] }, function(err, key){
assert.ok(!err, err);
assert.ok(key.apiKey);
assert.ok(key.apiKey.label);
assert.equal('UserKey-Updated', key.apiKey.label);
keys({ _ : ['update', '1239jncjjcd'] }, function(err){
assert.ok(err);
return cb();
});
});
});
},
'target keys' : function(cb){<|fim▁hole|> keys({ _ : ['target'] }, function(err, r){
assert.equal(err, null);
assert.equal(r, key_val);
return cb();
});
});
},
tearDown : function(cb){
userKeysNock.done();
return cb();
}
};<|fim▁end|> | var key_val = "pviryBwt22iZ0iInufMYBuVV";
keys({ _ : ['target', 'UserKey'] }, function(err, r){
assert.equal(err, null, err);
assert.equal(r, key_val); |
<|file_name|>next-empty.worker.js<|end_file_name|><|fim▁begin|>self.addEventListener("message", nextEmpty);
function isAt(points, x, y) {
for (var i = 0, len = points.length; i < len; i += 2) {
if (points[i] == x && points[i + 1] == y)
return true;
}
return false;
}
function nextEmpty(event) {<|fim▁hole|> var y = 0;
while (true) {
x = parseInt(Math.random() * data.width);
y = parseInt(Math.random() * data.height);
if (!isAt(data.points, x, y))
break;
}
self.postMessage({x: x, y: y});
}<|fim▁end|> | var data = event.data;
var x = 0; |
<|file_name|>developer.js<|end_file_name|><|fim▁begin|>/* ###########################################################################
GLOBAL ASSETS RELEASE v6.0.2
BUILD DATE: 20100224
########################################################################### */
// init values
hele = new Array();
newspause = 4; // time to pause news items in seconds
fx = op = ni = 0;
tp = ns = 1;
nop = .1;
nextf = -1;
mout = "mout";
done = false;
newspause = newspause * 1000;
function featurefade(selectedf){
if (is.docom){
if (done){
done = false;
if (selectedf != 0){
hele['subhover'+selectedf].style.visibility = "visible";
hele['mout'].style.visibility = "visible";
if (fx != 0){
hele['feature'+fx].style.zIndex = 10;
hele['subhover'+fx].style.visibility = "hidden";
}
hele['feature'+selectedf].style.zIndex = 20;
hele['feature'+selectedf].style.visibility = "visible";
fc = fx;
fx = selectedf;
setTimeout('fadein();',1);
}else{
hele['mout'].style.visibility = "hidden";
hele['subhover'+fx].style.visibility = "hidden";
fc = fx;
fx = selectedf;
op = 1;
setTimeout('fadeout();',1);
}
}else{
nextf = selectedf;
}
}
}
function fadein(){
if (!is.op && !is.iemac && !is.oldmoz){
setopacity('feature'+fx,.99);
}else{
setopacity('feature'+fx,1);
}
if (fc != 0){
hele['feature'+fc].style.visibility = "hidden";
}
op = 0;
done = true;
if (nextf != -1){
featurefade(nextf);
nextf = -1;
}
}
function fadeout(){
if (!is.op && !is.iemac && !is.oldmoz){
op = op - .5;
if (op <= 0){
op = 0;
hele['feature'+fc].style.visibility = "hidden";
setopacity('feature'+fc,.99);
done = true;
}else{
setopacity('feature'+fc,op);
setTimeout('fadeout();',50);
}
}else{
hele['feature'+fc].style.visibility = "hidden";
done = true;
}
}<|fim▁hole|> nx = ns + 1;
if(nx > tp){
nx = 1;
}
if (!is.safari && !is.oldmoz && !is.ns6 && !is.iemac){
setopacity('newsitem'+nx,.1);
}else{
var nn = 1;
while (hele['newsitem'+nn]){
if (nn != nx){
hele['newsitem'+nn].style.visibility = "hidden";
}else{
if (!is.oldmoz){
setopacity('newsitem'+nn,.99);
}
hele['newsitem'+nn].style.visibility = "visible";
}
nn++;
}
}
hele['newsitem'+nx].style.visibility = "visible";
hele['newsitem'+ns].style.zIndex = 3;
hele['newsitem'+nx].style.zIndex = 5;
}
if (!is.safari && !is.oldmoz && !is.ns6){
nop = nop + .2;
if (nop >= .99){
nop = .1;
hele['newsitem'+ns].style.visibility = "hidden";
setopacity('newsitem'+ns,.99);
ns++;
if(ns > tp){
ns = 1;
}
setTimeout('fadenews();',newspause);
}else{
setopacity('newsitem'+nx,nop);
setTimeout('fadenews();',130);
}
}else{
ns++;
if(ns > tp){
ns = 1;
}
setTimeout('fadenews();',newspause);
}
}
function setopacity(cobj,opac){
if (document.all && !is.op && !is.iemac){ //ie
hele[cobj].filters.alpha.opacity = opac * 100;
}else{
hele[cobj].style.MozOpacity = opac;
hele[cobj].style.opacity = opac;
}
}
var rollNames=["",""];
function prephome(){
if (is.docom && !hele['newsitem1']){
while (document.getElementById('newsitem'+tp)){
hele['newsitem'+tp] = document.getElementById('newsitem'+tp);
hele['newsitem'+tp].style.left='10px';
if (is.oldmoz){
setopacity('newsitem'+tp,1);
hele['newsitem'+tp].style.visibility = "hidden";
}
if (is.iewin){
hele['newsitem'+tp].style.backgroundImage = 'url(/im/bg_home_b3_iewin.gif)';
}
if (tp == 1){
hele['newsitem1'].style.zIndex = 3;
}
if (is.oldmoz && tp == 1){
hele['newsitem'+tp].style.visibility = "visible";
}
tp++;
}
tp--;
// get names for omniture
if(rollNames[0] == "" && document.getElementById('ipfeature')){
rollNames[0] = document.getElementById('ipfeature').src.replace(/.*\/b1_([^\/.]+_d).jpg/,"$1");;
rollNames[1] = document.getElementById('ipsub1').src.replace(/.*\/b1_([^\/.]+)_p1.gif/,"$1_s");;
rollNames[2] = document.getElementById('ipsub2').src.replace(/.*\/b1_([^\/.]+)_p2.gif/,"$1_s");;
rollNames[3] = document.getElementById('ipsub3').src.replace(/.*\/b1_([^\/.]+)_p3.gif/,"$1_s");;
}
var sf = 1;
while (document.getElementById('subhover'+sf)){
hele['subhover'+sf] = document.getElementById('subhover'+sf);
hele['feature'+sf] = document.getElementById('feature'+sf);
if (!is.op && !is.iemac && !is.oldmoz){
setopacity('feature'+sf,1);
}
sf++;
}
hele['mout'] = document.getElementById('mout');
if (tp > 0){
setTimeout('fadenews();',newspause);
}
}
// for old code with new page
if (!document.getElementById('mtopics')){
movin();
}
}
// omniture code
function customlink(thisfeature) {
if(window.s_account){
s_linkType='o';
s_linkName=thisfeature;
s_lnk=s_co(this);
s_gs(s_account);
}
}
// legacy function
var rollCount=[0,0,0,0];
function sendRollData() {}<|fim▁end|> |
function fadenews(){
if (nop == .1){ |
<|file_name|>DP20150401B.py<|end_file_name|><|fim▁begin|>"""
[2015-04-01] Challenge #208 [Intermediate] ASCII Gradient Generator
https://www.reddit.com/r/dailyprogrammer/comments/3104wu/20150401_challenge_208_intermediate_ascii/
# [](#IntermediateIcon) _(Intermediate)_: ASCII Gradient Generator
A linear colour gradient is where an image transitions through a range of colours, [like
this](http://i.imgur.com/IPwnI8X.png). A gradient doesn't need to be directly horizontal or vertical - it can be
[diagonal](http://i.imgur.com/D4trkEk.png) too, or only be [longer or shorter](http://i.imgur.com/8CHx95i.png) than
usual. It can also cycle through [as many colours as you like](http://i.imgur.com/Br3xwXM.png).
A radial colour gradient is a similar concept, except the colours move [radially outwards like
this](http://i.imgur.com/C6SE6m3.png), rather than linearly across. Radial gradients can also be in [different
positions or with different colours](http://i.imgur.com/S19EOu3.png).
To describe a gradient, you need two things - the colours in it, and its location. Describing the location of a radial
gradient is easy: for a radial gradient [like this](http://i.imgur.com/dTvfj7f.png), you only need to know the center
of the gradient (the red dot), and the radius from the center at which the gradient finishes (`r`). To locate a linear
gradient [like this](http://i.imgur.com/kyZTQnK.png), you need to know two points - the start (red) and end (green)
location. The gradient colours run perpendicular to the line joining the start and end points.
Today, we won't be dealing with colours. Instead, we'll be dealing with characters on the screen. You'll accept the
parameters of a gradient, and you'll output the displayed gradient.
# Formal Inputs and Outputs
## Input Description
You will first accept the size of the output display, as a width and height in characters, like this:
40 30
This corresponds to a grid 40 across and 30 down, like this:
........................................
........................................
........................................
........................................
........................................
........................................
........................................
........................................
........................................
........................................
........................................
........................................
........................................
........................................
........................................
........................................
........................................
........................................
........................................
........................................
........................................
........................................
........................................
........................................
........................................
........................................
........................................
........................................
........................................
........................................
The grid follows **screen space**, so the **top-left** corner is position **(0, 0)**.
Next, you will accept the characters that make up the gradient 'colours', from start to finish (or from inside to
outside, for a radial gradient), like this: (note the space at the start)
.,:;xX&@
Any points outside the gradient will have the first/last character, depending on which side of the gradient they're on.
After this, you will accept the parameters of the gradient. This may take one of two forms:
* For a **radial** gradient, the next line will look like this:
`radial x y r`
Where **(`x`, `y`)** is the center of the gradient, and **`r`** is the radius of the gradient, both in pixels.
* For a **linear** gradient, the next line will look like this:
`linear x1 y1 x2 y2`
Where **(`x1`, `y1`)** is the start point of the gradient, and **(`x2`, `y2`)** is the end point of the gradient,
both in pixel measure.
## Output Description
You are to display the given gradient on a grid with the given size, like this:
@@@@@@@@@@@&&&&&XXXXXXXXX&&&&&@@@@@@@@@@
@@@@@@@@@@&&&&XXXXXXXXXXXXX&&&&@@@@@@@@@
@@@@@@@@&&&&XXXXXXxxxxxXXXXXX&&&&@@@@@@@
@@@@@@@&&&&XXXXxxxxxxxxxxxXXXX&&&&@@@@@@
@@@@@@@&&&XXXxxxxxx;;;xxxxxxXXX&&&@@@@@@
@@@@@@&&&XXXxxxx;;;;;;;;;xxxxXXX&&&@@@@@
@@@@@&&&XXXxxx;;;;;;;;;;;;;xxxXXX&&&@@@@
@@@@@&&XXXxxx;;;;:::::::;;;;xxxXXX&&@@@@
@@@@&&&XXxxx;;;:::::::::::;;;xxxXX&&&@@@
@@@@&&XXXxx;;;::::,,,,,::::;;;xxXXX&&@@@
@@@&&&XXxxx;;:::,,,,,,,,,:::;;xxxXX&&&@@
@@@&&XXXxx;;;::,,,,...,,,,::;;;xxXXX&&@@
@@@&&XXXxx;;:::,,.......,,:::;;xxXXX&&@@
@@@&&XXxxx;;::,,,... ...,,,::;;xxxXX&&@@
@@@&&XXxx;;;::,,... ...,,::;;;xxXX&&@@
@@@&&XXxx;;;::,,.. ..,,::;;;xxXX&&@@
@@@&&XXxx;;;::,,... ...,,::;;;xxXX&&@@
@@@&&XXxxx;;::,,,... ...,,,::;;xxxXX&&@@
@@@&&XXXxx;;:::,,.......,,:::;;xxXXX&&@@
@@@&&XXXxx;;;::,,,,...,,,,::;;;xxXXX&&@@
@@@&&&XXxxx;;:::,,,,,,,,,:::;;xxxXX&&&@@
@@@@&&XXXxx;;;::::,,,,,::::;;;xxXXX&&@@@
@@@@&&&XXxxx;;;:::::::::::;;;xxxXX&&&@@@
@@@@@&&XXXxxx;;;;:::::::;;;;xxxXXX&&@@@@
@@@@@&&&XXXxxx;;;;;;;;;;;;;xxxXXX&&&@@@@
@@@@@@&&&XXXxxxx;;;;;;;;;xxxxXXX&&&@@@@@
@@@@@@@&&&XXXxxxxxx;;;xxxxxxXXX&&&@@@@@@
@@@@@@@&&&&XXXXxxxxxxxxxxxXXXX&&&&@@@@@@
@@@@@@@@&&&&XXXXXXxxxxxXXXXXX&&&&@@@@@@@
@@@@@@@@@@&&&&XXXXXXXXXXXXX&&&&@@@@@@@@@
# Sample Inputs and Outputs
## Gradient 1
### Input
40 30
.,:;xX&@
radial 20 15 20
### Output
(shown above, in **Output Description**)
## Gradient 2
Notice how the colours appear in the reverse order, as the end point is to the *left* of the start point.
### Input
60 30
'"^+$
linear 30 30 0 0
### Output
$$$$$$$$$$$++++++++++^^^^^^^^^^""""""""""'''''''''
$$$$$$$$$$++++++++++^^^^^^^^^^""""""""""'''''''''
$$$$$$$$$++++++++++^^^^^^^^^^""""""""""'''''''''
$$$$$$$$++++++++++^^^^^^^^^^""""""""""'''''''''
$$$$$$$++++++++++^^^^^^^^^^""""""""""'''''''''
$$$$$$++++++++++^^^^^^^^^^""""""""""'''''''''
$$$$$++++++++++^^^^^^^^^^""""""""""'''''''''
$$$$++++++++++^^^^^^^^^^""""""""""'''''''''
$$$++++++++++^^^^^^^^^^""""""""""'''''''''
$$++++++++++^^^^^^^^^^""""""""""'''''''''
$++++++++++^^^^^^^^^^""""""""""'''''''''
++++++++++^^^^^^^^^^""""""""""'''''''''
+++++++++^^^^^^^^^^""""""""""'''''''''
++++++++^^^^^^^^^^""""""""""'''''''''
+++++++^^^^^^^^^^""""""""""'''''''''
++++++^^^^^^^^^^""""""""""'''''''''
+++++^^^^^^^^^^""""""""""'''''''''
++++^^^^^^^^^^""""""""""'''''''''
+++^^^^^^^^^^""""""""""'''''''''
++^^^^^^^^^^""""""""""'''''''''
+^^^^^^^^^^""""""""""'''''''''
^^^^^^^^^^""""""""""'''''''''
^^^^^^^^^""""""""""'''''''''
^^^^^^^^""""""""""'''''''''
^^^^^^^""""""""""'''''''''
^^^^^^""""""""""'''''''''
^^^^^""""""""""'''''''''
^^^^""""""""""'''''''''
^^^""""""""""'''''''''
^^""""""""""'''''''''
## Gradient 3
The gradient start/end/centre points don't have to be inside the grid!
### Input
40 40
aaabcccdeeefggg
radial -10 20 60
### Output
ccccccccccdddddeeeeeeeeeeeeeeeffffgggggg
cccccccccccdddddeeeeeeeeeeeeeefffffggggg
ccccccccccccdddddeeeeeeeeeeeeeeffffggggg<|fim▁hole|> cccccccccccccccddddeeeeeeeeeeeeeffffgggg
cccccccccccccccdddddeeeeeeeeeeeeeffffggg
bcccccccccccccccddddeeeeeeeeeeeeeffffggg
bbccccccccccccccdddddeeeeeeeeeeeeffffggg
bbbccccccccccccccddddeeeeeeeeeeeeffffggg
bbbbcccccccccccccddddeeeeeeeeeeeeeffffgg
bbbbcccccccccccccddddeeeeeeeeeeeeeffffgg
bbbbbcccccccccccccddddeeeeeeeeeeeeffffgg
abbbbcccccccccccccddddeeeeeeeeeeeeffffgg
abbbbbccccccccccccddddeeeeeeeeeeeeffffgg
aabbbbccccccccccccddddeeeeeeeeeeeeffffgg
aabbbbccccccccccccddddeeeeeeeeeeeeffffgg
aabbbbccccccccccccddddeeeeeeeeeeeeffffgg
aabbbbccccccccccccddddeeeeeeeeeeeeffffgg
aabbbbccccccccccccddddeeeeeeeeeeeeffffgg
aabbbbccccccccccccddddeeeeeeeeeeeeffffgg
aabbbbccccccccccccddddeeeeeeeeeeeeffffgg
aabbbbccccccccccccddddeeeeeeeeeeeeffffgg
aabbbbccccccccccccddddeeeeeeeeeeeeffffgg
abbbbbccccccccccccddddeeeeeeeeeeeeffffgg
abbbbcccccccccccccddddeeeeeeeeeeeeffffgg
bbbbbcccccccccccccddddeeeeeeeeeeeeffffgg
bbbbcccccccccccccddddeeeeeeeeeeeeeffffgg
bbbbcccccccccccccddddeeeeeeeeeeeeeffffgg
bbbccccccccccccccddddeeeeeeeeeeeeffffggg
bbccccccccccccccdddddeeeeeeeeeeeeffffggg
bcccccccccccccccddddeeeeeeeeeeeeeffffggg
cccccccccccccccdddddeeeeeeeeeeeeeffffggg
cccccccccccccccddddeeeeeeeeeeeeeffffgggg
ccccccccccccccdddddeeeeeeeeeeeeeffffgggg
cccccccccccccdddddeeeeeeeeeeeeefffffgggg
cccccccccccccdddddeeeeeeeeeeeeeffffggggg
ccccccccccccdddddeeeeeeeeeeeeeeffffggggg
cccccccccccdddddeeeeeeeeeeeeeefffffggggg
# Notes
Got any cool challenge ideas? Submit them to /r/DailyProgrammer_Ideas!
"""
def main():
pass
if __name__ == "__main__":
main()<|fim▁end|> | cccccccccccccdddddeeeeeeeeeeeeeffffggggg
cccccccccccccdddddeeeeeeeeeeeeefffffgggg
ccccccccccccccdddddeeeeeeeeeeeeeffffgggg |
<|file_name|>query.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Utilities for querying the layout, as needed by the layout thread.
use app_units::Au;
use construct::ConstructionResult;
use context::LayoutContext;
use euclid::point::Point2D;
use euclid::rect::Rect;
use euclid::size::Size2D;
use flow::{self, Flow};
use fragment::{Fragment, FragmentBorderBoxIterator, SpecificFragmentInfo};
use gfx::display_list::{DisplayItemMetadata, DisplayList, OpaqueNode, ScrollOffsetMap};
use inline::LAST_FRAGMENT_OF_ELEMENT;
use ipc_channel::ipc::IpcSender;
use msg::constellation_msg::PipelineId;
use opaque_node::OpaqueNodeMethods;
use script_layout_interface::PendingImage;
use script_layout_interface::rpc::{ContentBoxResponse, ContentBoxesResponse};
use script_layout_interface::rpc::{HitTestResponse, LayoutRPC};
use script_layout_interface::rpc::{MarginStyleResponse, NodeGeometryResponse};
use script_layout_interface::rpc::{NodeOverflowResponse, OffsetParentResponse};
use script_layout_interface::rpc::{NodeScrollRootIdResponse, ResolvedStyleResponse, TextIndexResponse};
use script_layout_interface::wrapper_traits::{LayoutNode, ThreadSafeLayoutElement, ThreadSafeLayoutNode};
use script_traits::LayoutMsg as ConstellationMsg;
use script_traits::UntrustedNodeAddress;
use sequential;
use std::cmp::{min, max};
use std::mem;
use std::ops::Deref;
use std::sync::{Arc, Mutex};
use style::computed_values;
use style::context::{StyleContext, ThreadLocalStyleContext};
use style::dom::TElement;
use style::logical_geometry::{WritingMode, BlockFlowDirection, InlineBaseDirection};
use style::properties::{style_structs, PropertyId, PropertyDeclarationId, LonghandId};
use style::properties::longhands::{display, position};
use style::selector_parser::PseudoElement;
use style_traits::ToCss;
use style_traits::cursor::Cursor;
use webrender_traits::ClipId;
use wrapper::{LayoutNodeHelpers, LayoutNodeLayoutData};
/// Mutable data belonging to the LayoutThread.
///
/// This needs to be protected by a mutex so we can do fast RPCs.
pub struct LayoutThreadData {
/// The channel on which messages can be sent to the constellation.
pub constellation_chan: IpcSender<ConstellationMsg>,
/// The root stacking context.
pub display_list: Option<Arc<DisplayList>>,
/// A queued response for the union of the content boxes of a node.
pub content_box_response: Option<Rect<Au>>,
/// A queued response for the content boxes of a node.
pub content_boxes_response: Vec<Rect<Au>>,
/// A queued response for the client {top, left, width, height} of a node in pixels.
pub client_rect_response: Rect<i32>,
/// A queued response for the node at a given point
pub hit_test_response: (Option<DisplayItemMetadata>, bool),
/// A queued response for the scroll root id for a given node.
pub scroll_root_id_response: Option<ClipId>,
/// A pair of overflow property in x and y
pub overflow_response: NodeOverflowResponse,
/// A queued response for the scroll {top, left, width, height} of a node in pixels.
pub scroll_area_response: Rect<i32>,
/// A queued response for the resolved style property of an element.
pub resolved_style_response: String,
/// A queued response for the offset parent/rect of a node.
pub offset_parent_response: OffsetParentResponse,
/// A queued response for the offset parent/rect of a node.
pub margin_style_response: MarginStyleResponse,
/// Scroll offsets of stacking contexts. This will only be populated if WebRender is in use.
pub stacking_context_scroll_offsets: ScrollOffsetMap,
/// Index in a text fragment. We need this do determine the insertion point.
pub text_index_response: TextIndexResponse,
/// A list of images requests that need to be initiated.
pub pending_images: Vec<PendingImage>,
/// A queued response for the list of nodes at a given point.
pub nodes_from_point_response: Vec<UntrustedNodeAddress>,
}
pub struct LayoutRPCImpl(pub Arc<Mutex<LayoutThreadData>>);
// https://drafts.csswg.org/cssom-view/#overflow-directions
fn overflow_direction(writing_mode: &WritingMode) -> OverflowDirection {
match (writing_mode.block_flow_direction(), writing_mode.inline_base_direction()) {
(BlockFlowDirection::TopToBottom, InlineBaseDirection::LeftToRight) |
(BlockFlowDirection::LeftToRight, InlineBaseDirection::LeftToRight) => OverflowDirection::RightAndDown,
(BlockFlowDirection::TopToBottom, InlineBaseDirection::RightToLeft) |
(BlockFlowDirection::RightToLeft, InlineBaseDirection::LeftToRight) => OverflowDirection::LeftAndDown,
(BlockFlowDirection::RightToLeft, InlineBaseDirection::RightToLeft) => OverflowDirection::LeftAndUp,
(BlockFlowDirection::LeftToRight, InlineBaseDirection::RightToLeft) => OverflowDirection::RightAndUp
}
}
impl LayoutRPC for LayoutRPCImpl {
// The neat thing here is that in order to answer the following two queries we only
// need to compare nodes for equality. Thus we can safely work only with `OpaqueNode`.
fn content_box(&self) -> ContentBoxResponse {
let &LayoutRPCImpl(ref rw_data) = self;
let rw_data = rw_data.lock().unwrap();
ContentBoxResponse(rw_data.content_box_response)
}
/// Requests the dimensions of all the content boxes, as in the `getClientRects()` call.
fn content_boxes(&self) -> ContentBoxesResponse {
let &LayoutRPCImpl(ref rw_data) = self;
let rw_data = rw_data.lock().unwrap();
ContentBoxesResponse(rw_data.content_boxes_response.clone())
}
/// Requests the node containing the point of interest.
fn hit_test(&self) -> HitTestResponse {
let &LayoutRPCImpl(ref rw_data) = self;
let rw_data = rw_data.lock().unwrap();
let &(ref result, update_cursor) = &rw_data.hit_test_response;
if update_cursor {
// Compute the new cursor.
let cursor = match *result {
None => Cursor::Default,
Some(dim) => dim.pointing.unwrap(),
};
rw_data.constellation_chan.send(ConstellationMsg::SetCursor(cursor)).unwrap();
}
HitTestResponse {
node_address: result.map(|dim| dim.node.to_untrusted_node_address()),
}
}
fn nodes_from_point_response(&self) -> Vec<UntrustedNodeAddress> {
let &LayoutRPCImpl(ref rw_data) = self;
let rw_data = rw_data.lock().unwrap();
rw_data.nodes_from_point_response.clone()
}
fn node_geometry(&self) -> NodeGeometryResponse {
let &LayoutRPCImpl(ref rw_data) = self;
let rw_data = rw_data.lock().unwrap();
NodeGeometryResponse {
client_rect: rw_data.client_rect_response
}
}
fn node_overflow(&self) -> NodeOverflowResponse {
NodeOverflowResponse(self.0.lock().unwrap().overflow_response.0)
}
fn node_scroll_area(&self) -> NodeGeometryResponse {
NodeGeometryResponse {
client_rect: self.0.lock().unwrap().scroll_area_response
}
}
fn node_scroll_root_id(&self) -> NodeScrollRootIdResponse {
NodeScrollRootIdResponse(self.0.lock()
.unwrap().scroll_root_id_response
.expect("scroll_root_id is not correctly fetched"))
}
/// Retrieves the resolved value for a CSS style property.
fn resolved_style(&self) -> ResolvedStyleResponse {
let &LayoutRPCImpl(ref rw_data) = self;
let rw_data = rw_data.lock().unwrap();
ResolvedStyleResponse(rw_data.resolved_style_response.clone())
}
fn offset_parent(&self) -> OffsetParentResponse {
let &LayoutRPCImpl(ref rw_data) = self;
let rw_data = rw_data.lock().unwrap();
rw_data.offset_parent_response.clone()
}
fn margin_style(&self) -> MarginStyleResponse {
let &LayoutRPCImpl(ref rw_data) = self;
let rw_data = rw_data.lock().unwrap();
rw_data.margin_style_response.clone()
}
fn text_index(&self) -> TextIndexResponse {
let &LayoutRPCImpl(ref rw_data) = self;
let rw_data = rw_data.lock().unwrap();
rw_data.text_index_response.clone()
}
fn pending_images(&self) -> Vec<PendingImage> {
let &LayoutRPCImpl(ref rw_data) = self;
let mut rw_data = rw_data.lock().unwrap();
mem::replace(&mut rw_data.pending_images, vec![])
}
}
struct UnioningFragmentBorderBoxIterator {
node_address: OpaqueNode,
rect: Option<Rect<Au>>,
}
impl UnioningFragmentBorderBoxIterator {
fn new(node_address: OpaqueNode) -> UnioningFragmentBorderBoxIterator {
UnioningFragmentBorderBoxIterator {
node_address: node_address,
rect: None
}
}
}
impl FragmentBorderBoxIterator for UnioningFragmentBorderBoxIterator {
fn process(&mut self, _: &Fragment, _: i32, border_box: &Rect<Au>) {
self.rect = match self.rect {
Some(rect) => {
Some(rect.union(border_box))
}
None => {
Some(*border_box)
}
};
}
fn should_process(&mut self, fragment: &Fragment) -> bool {
fragment.contains_node(self.node_address)
}
}
struct CollectingFragmentBorderBoxIterator {
node_address: OpaqueNode,
rects: Vec<Rect<Au>>,
}
impl CollectingFragmentBorderBoxIterator {
fn new(node_address: OpaqueNode) -> CollectingFragmentBorderBoxIterator {
CollectingFragmentBorderBoxIterator {
node_address: node_address,
rects: Vec::new(),
}
}
}
impl FragmentBorderBoxIterator for CollectingFragmentBorderBoxIterator {
fn process(&mut self, _: &Fragment, _: i32, border_box: &Rect<Au>) {
self.rects.push(*border_box);
}
fn should_process(&mut self, fragment: &Fragment) -> bool {
fragment.contains_node(self.node_address)
}
}
enum Side {
Left,
Right,
Bottom,
Top
}
enum MarginPadding {
Margin,
Padding
}
enum PositionProperty {
Left,
Right,
Top,
Bottom,
Width,
Height,
}
#[derive(Debug)]<|fim▁hole|> LeftAndDown,
LeftAndUp,
RightAndUp,
}
struct PositionRetrievingFragmentBorderBoxIterator {
node_address: OpaqueNode,
result: Option<Au>,
position: Point2D<Au>,
property: PositionProperty,
}
impl PositionRetrievingFragmentBorderBoxIterator {
fn new(node_address: OpaqueNode,
property: PositionProperty,
position: Point2D<Au>) -> PositionRetrievingFragmentBorderBoxIterator {
PositionRetrievingFragmentBorderBoxIterator {
node_address: node_address,
position: position,
property: property,
result: None,
}
}
}
impl FragmentBorderBoxIterator for PositionRetrievingFragmentBorderBoxIterator {
fn process(&mut self, fragment: &Fragment, _: i32, border_box: &Rect<Au>) {
let border_padding = fragment.border_padding.to_physical(fragment.style.writing_mode);
self.result =
Some(match self.property {
PositionProperty::Left => self.position.x,
PositionProperty::Top => self.position.y,
PositionProperty::Width => border_box.size.width - border_padding.horizontal(),
PositionProperty::Height => border_box.size.height - border_padding.vertical(),
// TODO: the following 2 calculations are completely wrong.
// They should return the difference between the parent's and this
// fragment's border boxes.
PositionProperty::Right => border_box.max_x() + self.position.x,
PositionProperty::Bottom => border_box.max_y() + self.position.y,
});
}
fn should_process(&mut self, fragment: &Fragment) -> bool {
fragment.contains_node(self.node_address)
}
}
struct MarginRetrievingFragmentBorderBoxIterator {
node_address: OpaqueNode,
result: Option<Au>,
writing_mode: WritingMode,
margin_padding: MarginPadding,
side: Side,
}
impl MarginRetrievingFragmentBorderBoxIterator {
fn new(node_address: OpaqueNode, side: Side, margin_padding:
MarginPadding, writing_mode: WritingMode) -> MarginRetrievingFragmentBorderBoxIterator {
MarginRetrievingFragmentBorderBoxIterator {
node_address: node_address,
side: side,
margin_padding: margin_padding,
result: None,
writing_mode: writing_mode,
}
}
}
impl FragmentBorderBoxIterator for MarginRetrievingFragmentBorderBoxIterator {
fn process(&mut self, fragment: &Fragment, _: i32, _: &Rect<Au>) {
let rect = match self.margin_padding {
MarginPadding::Margin => &fragment.margin,
MarginPadding::Padding => &fragment.border_padding
};
self.result = Some(match self.side {
Side::Left => rect.left(self.writing_mode),
Side::Right => rect.right(self.writing_mode),
Side::Bottom => rect.bottom(self.writing_mode),
Side::Top => rect.top(self.writing_mode)
});
}
fn should_process(&mut self, fragment: &Fragment) -> bool {
fragment.contains_node(self.node_address)
}
}
pub fn process_content_box_request<N: LayoutNode>(
requested_node: N, layout_root: &mut Flow) -> Option<Rect<Au>> {
// FIXME(pcwalton): This has not been updated to handle the stacking context relative
// stuff. So the position is wrong in most cases.
let mut iterator = UnioningFragmentBorderBoxIterator::new(requested_node.opaque());
sequential::iterate_through_flow_tree_fragment_border_boxes(layout_root, &mut iterator);
iterator.rect
}
pub fn process_content_boxes_request<N: LayoutNode>(requested_node: N, layout_root: &mut Flow)
-> Vec<Rect<Au>> {
// FIXME(pcwalton): This has not been updated to handle the stacking context relative
// stuff. So the position is wrong in most cases.
let mut iterator = CollectingFragmentBorderBoxIterator::new(requested_node.opaque());
sequential::iterate_through_flow_tree_fragment_border_boxes(layout_root, &mut iterator);
iterator.rects
}
struct FragmentLocatingFragmentIterator {
node_address: OpaqueNode,
client_rect: Rect<i32>,
}
impl FragmentLocatingFragmentIterator {
fn new(node_address: OpaqueNode) -> FragmentLocatingFragmentIterator {
FragmentLocatingFragmentIterator {
node_address: node_address,
client_rect: Rect::zero()
}
}
}
struct UnioningFragmentScrollAreaIterator {
node_address: OpaqueNode,
union_rect: Rect<i32>,
origin_rect: Rect<i32>,
level: Option<i32>,
is_child: bool,
overflow_direction: OverflowDirection
}
impl UnioningFragmentScrollAreaIterator {
fn new(node_address: OpaqueNode) -> UnioningFragmentScrollAreaIterator {
UnioningFragmentScrollAreaIterator {
node_address: node_address,
union_rect: Rect::zero(),
origin_rect: Rect::zero(),
level: None,
is_child: false,
overflow_direction: OverflowDirection::RightAndDown
}
}
}
struct NodeOffsetBoxInfo {
offset: Point2D<Au>,
rectangle: Rect<Au>,
}
struct ParentBorderBoxInfo {
node_address: OpaqueNode,
origin: Point2D<Au>,
}
struct ParentOffsetBorderBoxIterator {
node_address: OpaqueNode,
has_processed_node: bool,
node_offset_box: Option<NodeOffsetBoxInfo>,
parent_nodes: Vec<Option<ParentBorderBoxInfo>>,
}
impl ParentOffsetBorderBoxIterator {
fn new(node_address: OpaqueNode) -> ParentOffsetBorderBoxIterator {
ParentOffsetBorderBoxIterator {
node_address: node_address,
has_processed_node: false,
node_offset_box: None,
parent_nodes: Vec::new(),
}
}
}
impl FragmentBorderBoxIterator for FragmentLocatingFragmentIterator {
fn process(&mut self, fragment: &Fragment, _: i32, border_box: &Rect<Au>) {
let style_structs::Border {
border_top_width: top_width,
border_right_width: right_width,
border_bottom_width: bottom_width,
border_left_width: left_width,
..
} = *fragment.style.get_border();
self.client_rect.origin.y = top_width.to_px();
self.client_rect.origin.x = left_width.to_px();
self.client_rect.size.width = (border_box.size.width - left_width - right_width).to_px();
self.client_rect.size.height = (border_box.size.height - top_width - bottom_width).to_px();
}
fn should_process(&mut self, fragment: &Fragment) -> bool {
fragment.node == self.node_address
}
}
// https://drafts.csswg.org/cssom-view/#scrolling-area
impl FragmentBorderBoxIterator for UnioningFragmentScrollAreaIterator {
fn process(&mut self, fragment: &Fragment, level: i32, border_box: &Rect<Au>) {
// In cases in which smaller child elements contain less padding than the parent
// the a union of the two elements padding rectangles could result in an unwanted
// increase in size. To work around this, we store the original elements padding
// rectangle as `origin_rect` and the union of all child elements padding and
// margin rectangles as `union_rect`.
let style_structs::Border {
border_top_width: top_border,
border_right_width: right_border,
border_bottom_width: bottom_border,
border_left_width: left_border,
..
} = *fragment.style.get_border();
let right_padding = (border_box.size.width - right_border - left_border).to_px();
let bottom_padding = (border_box.size.height - bottom_border - top_border).to_px();
let top_padding = top_border.to_px();
let left_padding = left_border.to_px();
match self.level {
Some(start_level) if level <= start_level => { self.is_child = false; }
Some(_) => {
let padding = Rect::new(Point2D::new(left_padding, top_padding),
Size2D::new(right_padding, bottom_padding));
let top_margin = fragment.margin.top(fragment.style.writing_mode).to_px();
let left_margin = fragment.margin.left(fragment.style.writing_mode).to_px();
let bottom_margin = fragment.margin.bottom(fragment.style.writing_mode).to_px();
let right_margin = fragment.margin.right(fragment.style.writing_mode).to_px();
let margin = Rect::new(Point2D::new(left_margin, top_margin),
Size2D::new(right_margin, bottom_margin));
self.union_rect = self.union_rect.union(&margin).union(&padding);
}
None => {
self.level = Some(level);
self.is_child = true;
self.overflow_direction = overflow_direction(&fragment.style.writing_mode);
self.origin_rect = Rect::new(Point2D::new(left_padding, top_padding),
Size2D::new(right_padding, bottom_padding));
},
};
}
fn should_process(&mut self, fragment: &Fragment) -> bool {
fragment.contains_node(self.node_address) || self.is_child
}
}
// https://drafts.csswg.org/cssom-view/#extensions-to-the-htmlelement-interface
impl FragmentBorderBoxIterator for ParentOffsetBorderBoxIterator {
fn process(&mut self, fragment: &Fragment, level: i32, border_box: &Rect<Au>) {
if self.node_offset_box.is_none() {
// We haven't found the node yet, so we're still looking
// for its parent. Remove all nodes at this level or
// higher, as they can't be parents of this node.
self.parent_nodes.truncate(level as usize);
assert_eq!(self.parent_nodes.len(), level as usize,
"Skipped at least one level in the flow tree!");
}
if !fragment.is_primary_fragment() {
// This fragment doesn't correspond to anything worth
// taking measurements from.
if self.node_offset_box.is_none() {
// If this is the only fragment in the flow, we need to
// do this to avoid failing the above assertion.
self.parent_nodes.push(None);
}
return;
}
if fragment.node == self.node_address {
// Found the fragment in the flow tree that matches the
// DOM node being looked for.
assert!(self.node_offset_box.is_none(),
"Node was being treated as inline, but it has an associated fragment!");
self.has_processed_node = true;
self.node_offset_box = Some(NodeOffsetBoxInfo {
offset: border_box.origin,
rectangle: *border_box,
});
// offsetParent returns null if the node is fixed.
if fragment.style.get_box().position == computed_values::position::T::fixed {
self.parent_nodes.clear();
}
} else if let Some(node) = fragment.inline_context.as_ref().and_then(|inline_context| {
inline_context.nodes.iter().find(|node| node.address == self.node_address)
}) {
// TODO: Handle cases where the `offsetParent` is an inline
// element. This will likely be impossible until
// https://github.com/servo/servo/issues/13982 is fixed.
// Found a fragment in the flow tree whose inline context
// contains the DOM node we're looking for, i.e. the node
// is inline and contains this fragment.
match self.node_offset_box {
Some(NodeOffsetBoxInfo { ref mut rectangle, .. }) => {
*rectangle = rectangle.union(border_box);
},
None => {
// https://github.com/servo/servo/issues/13982 will
// cause this assertion to fail sometimes, so it's
// commented out for now.
/*assert!(node.flags.contains(FIRST_FRAGMENT_OF_ELEMENT),
"First fragment of inline node found wasn't its first fragment!");*/
self.node_offset_box = Some(NodeOffsetBoxInfo {
offset: border_box.origin,
rectangle: *border_box,
});
},
}
if node.flags.contains(LAST_FRAGMENT_OF_ELEMENT) {
self.has_processed_node = true;
}
} else if self.node_offset_box.is_none() {
// TODO(gw): Is there a less fragile way of checking whether this
// fragment is the body element, rather than just checking that
// it's at level 1 (below the root node)?
let is_body_element = level == 1;
let is_valid_parent = match (is_body_element,
fragment.style.get_box().position,
&fragment.specific) {
// Spec says it's valid if any of these are true:
// 1) Is the body element
// 2) Is static position *and* is a table or table cell
// 3) Is not static position
(true, _, _) |
(false, computed_values::position::T::static_, &SpecificFragmentInfo::Table) |
(false, computed_values::position::T::static_, &SpecificFragmentInfo::TableCell) |
(false, computed_values::position::T::absolute, _) |
(false, computed_values::position::T::relative, _) |
(false, computed_values::position::T::fixed, _) => true,
// Otherwise, it's not a valid parent
(false, computed_values::position::T::static_, _) => false,
};
let parent_info = if is_valid_parent {
let border_width = fragment.border_width().to_physical(fragment.style.writing_mode);
Some(ParentBorderBoxInfo {
node_address: fragment.node,
origin: border_box.origin + Point2D::new(border_width.left, border_width.top),
})
} else {
None
};
self.parent_nodes.push(parent_info);
}
}
fn should_process(&mut self, _: &Fragment) -> bool {
!self.has_processed_node
}
}
pub fn process_node_geometry_request<N: LayoutNode>(requested_node: N, layout_root: &mut Flow)
-> Rect<i32> {
let mut iterator = FragmentLocatingFragmentIterator::new(requested_node.opaque());
sequential::iterate_through_flow_tree_fragment_border_boxes(layout_root, &mut iterator);
iterator.client_rect
}
pub fn process_node_scroll_root_id_request<N: LayoutNode>(id: PipelineId,
requested_node: N)
-> ClipId {
let layout_node = requested_node.to_threadsafe();
layout_node.generate_scroll_root_id(id)
}
pub fn process_node_scroll_area_request< N: LayoutNode>(requested_node: N, layout_root: &mut Flow)
-> Rect<i32> {
let mut iterator = UnioningFragmentScrollAreaIterator::new(requested_node.opaque());
sequential::iterate_through_flow_tree_fragment_border_boxes(layout_root, &mut iterator);
match iterator.overflow_direction {
OverflowDirection::RightAndDown => {
let right = max(iterator.union_rect.size.width, iterator.origin_rect.size.width);
let bottom = max(iterator.union_rect.size.height, iterator.origin_rect.size.height);
Rect::new(iterator.origin_rect.origin, Size2D::new(right, bottom))
},
OverflowDirection::LeftAndDown => {
let bottom = max(iterator.union_rect.size.height, iterator.origin_rect.size.height);
let left = max(iterator.union_rect.origin.x, iterator.origin_rect.origin.x);
Rect::new(Point2D::new(left, iterator.origin_rect.origin.y),
Size2D::new(iterator.origin_rect.size.width, bottom))
},
OverflowDirection::LeftAndUp => {
let top = min(iterator.union_rect.origin.y, iterator.origin_rect.origin.y);
let left = min(iterator.union_rect.origin.x, iterator.origin_rect.origin.x);
Rect::new(Point2D::new(left, top), iterator.origin_rect.size)
},
OverflowDirection::RightAndUp => {
let top = min(iterator.union_rect.origin.y, iterator.origin_rect.origin.y);
let right = max(iterator.union_rect.size.width, iterator.origin_rect.size.width);
Rect::new(Point2D::new(iterator.origin_rect.origin.x, top),
Size2D::new(right, iterator.origin_rect.size.height))
}
}
}
/// Return the resolved value of property for a given (pseudo)element.
/// https://drafts.csswg.org/cssom/#resolved-value
pub fn process_resolved_style_request<'a, N>(context: &LayoutContext,
node: N,
pseudo: &Option<PseudoElement>,
property: &PropertyId,
layout_root: &mut Flow) -> String
where N: LayoutNode,
{
use style::traversal::resolve_style;
let element = node.as_element().unwrap();
// We call process_resolved_style_request after performing a whole-document
// traversal, so in the common case, the element is styled.
if element.get_data().is_some() {
return process_resolved_style_request_internal(node, pseudo, property, layout_root);
}
// However, the element may be in a display:none subtree. The style system
// has a mechanism to give us that within a defined scope (after which point
// it's cleared to maintained style system invariants).
let mut tlc = ThreadLocalStyleContext::new(&context.style_context);
let mut context = StyleContext {
shared: &context.style_context,
thread_local: &mut tlc,
};
let mut result = None;
let ensure = |el: N::ConcreteElement| el.as_node().initialize_data();
let clear = |el: N::ConcreteElement| el.as_node().clear_data();
resolve_style(&mut context, element, &ensure, &clear, |_: &_| {
let s = process_resolved_style_request_internal(node, pseudo, property, layout_root);
result = Some(s);
});
result.unwrap()
}
/// The primary resolution logic, which assumes that the element is styled.
fn process_resolved_style_request_internal<'a, N>(requested_node: N,
pseudo: &Option<PseudoElement>,
property: &PropertyId,
layout_root: &mut Flow) -> String
where N: LayoutNode,
{
let layout_el = requested_node.to_threadsafe().as_element().unwrap();
let layout_el = match *pseudo {
Some(PseudoElement::Before) => layout_el.get_before_pseudo(),
Some(PseudoElement::After) => layout_el.get_after_pseudo(),
Some(PseudoElement::DetailsSummary) |
Some(PseudoElement::DetailsContent) |
Some(PseudoElement::Selection) => None,
_ => Some(layout_el)
};
let layout_el = match layout_el {
None => {
// The pseudo doesn't exist, return nothing. Chrome seems to query
// the element itself in this case, Firefox uses the resolved value.
// https://www.w3.org/Bugs/Public/show_bug.cgi?id=29006
return String::new();
}
Some(layout_el) => layout_el
};
let style = &*layout_el.resolved_style();
let longhand_id = match *property {
PropertyId::Longhand(id) => id,
// Firefox returns blank strings for the computed value of shorthands,
// so this should be web-compatible.
PropertyId::Shorthand(_) => return String::new(),
PropertyId::Custom(ref name) => {
return style.computed_value_to_string(PropertyDeclarationId::Custom(name))
}
};
let positioned = match style.get_box().position {
position::computed_value::T::relative |
/*position::computed_value::T::sticky |*/
position::computed_value::T::fixed |
position::computed_value::T::absolute => true,
_ => false
};
//TODO: determine whether requested property applies to the element.
// eg. width does not apply to non-replaced inline elements.
// Existing browsers disagree about when left/top/right/bottom apply
// (Chrome seems to think they never apply and always returns resolved values).
// There are probably other quirks.
let applies = true;
fn used_value_for_position_property<N: LayoutNode>(
layout_el: <N::ConcreteThreadSafeLayoutNode as ThreadSafeLayoutNode>::ConcreteThreadSafeLayoutElement,
layout_root: &mut Flow,
requested_node: N,
longhand_id: LonghandId) -> String {
let maybe_data = layout_el.borrow_layout_data();
let position = maybe_data.map_or(Point2D::zero(), |data| {
match (*data).flow_construction_result {
ConstructionResult::Flow(ref flow_ref, _) =>
flow::base(flow_ref.deref()).stacking_relative_position,
// TODO(dzbarsky) search parents until we find node with a flow ref.
// https://github.com/servo/servo/issues/8307
_ => Point2D::zero()
}
});
let property = match longhand_id {
LonghandId::Bottom => PositionProperty::Bottom,
LonghandId::Top => PositionProperty::Top,
LonghandId::Left => PositionProperty::Left,
LonghandId::Right => PositionProperty::Right,
LonghandId::Width => PositionProperty::Width,
LonghandId::Height => PositionProperty::Height,
_ => unreachable!()
};
let mut iterator =
PositionRetrievingFragmentBorderBoxIterator::new(requested_node.opaque(),
property,
position);
sequential::iterate_through_flow_tree_fragment_border_boxes(layout_root,
&mut iterator);
iterator.result.map(|r| r.to_css_string()).unwrap_or(String::new())
}
// TODO: we will return neither the computed nor used value for margin and padding.
match longhand_id {
LonghandId::MarginBottom | LonghandId::MarginTop |
LonghandId::MarginLeft | LonghandId::MarginRight |
LonghandId::PaddingBottom | LonghandId::PaddingTop |
LonghandId::PaddingLeft | LonghandId::PaddingRight
if applies && style.get_box().display != display::computed_value::T::none => {
let (margin_padding, side) = match longhand_id {
LonghandId::MarginBottom => (MarginPadding::Margin, Side::Bottom),
LonghandId::MarginTop => (MarginPadding::Margin, Side::Top),
LonghandId::MarginLeft => (MarginPadding::Margin, Side::Left),
LonghandId::MarginRight => (MarginPadding::Margin, Side::Right),
LonghandId::PaddingBottom => (MarginPadding::Padding, Side::Bottom),
LonghandId::PaddingTop => (MarginPadding::Padding, Side::Top),
LonghandId::PaddingLeft => (MarginPadding::Padding, Side::Left),
LonghandId::PaddingRight => (MarginPadding::Padding, Side::Right),
_ => unreachable!()
};
let mut iterator =
MarginRetrievingFragmentBorderBoxIterator::new(requested_node.opaque(),
side,
margin_padding,
style.writing_mode);
sequential::iterate_through_flow_tree_fragment_border_boxes(layout_root,
&mut iterator);
iterator.result.map(|r| r.to_css_string()).unwrap_or(String::new())
},
LonghandId::Bottom | LonghandId::Top | LonghandId::Right | LonghandId::Left
if applies && positioned && style.get_box().display !=
display::computed_value::T::none => {
used_value_for_position_property(layout_el, layout_root, requested_node, longhand_id)
}
LonghandId::Width | LonghandId::Height
if applies && style.get_box().display !=
display::computed_value::T::none => {
used_value_for_position_property(layout_el, layout_root, requested_node, longhand_id)
}
// FIXME: implement used value computation for line-height
_ => {
style.computed_value_to_string(PropertyDeclarationId::Longhand(longhand_id))
}
}
}
pub fn process_offset_parent_query<N: LayoutNode>(requested_node: N, layout_root: &mut Flow)
-> OffsetParentResponse {
let mut iterator = ParentOffsetBorderBoxIterator::new(requested_node.opaque());
sequential::iterate_through_flow_tree_fragment_border_boxes(layout_root, &mut iterator);
let node_offset_box = iterator.node_offset_box;
let parent_info = iterator.parent_nodes.into_iter().rev().filter_map(|info| info).next();
match (node_offset_box, parent_info) {
(Some(node_offset_box), Some(parent_info)) => {
let origin = node_offset_box.offset - parent_info.origin;
let size = node_offset_box.rectangle.size;
OffsetParentResponse {
node_address: Some(parent_info.node_address.to_untrusted_node_address()),
rect: Rect::new(origin, size),
}
}
_ => {
OffsetParentResponse::empty()
}
}
}
pub fn process_node_overflow_request<N: LayoutNode>(requested_node: N) -> NodeOverflowResponse {
let layout_node = requested_node.to_threadsafe();
let style = &*layout_node.as_element().unwrap().resolved_style();
let style_box = style.get_box();
NodeOverflowResponse(Some((Point2D::new(style_box.overflow_x, style_box.overflow_y))))
}
pub fn process_margin_style_query<N: LayoutNode>(requested_node: N)
-> MarginStyleResponse {
let layout_node = requested_node.to_threadsafe();
let style = &*layout_node.as_element().unwrap().resolved_style();
let margin = style.get_margin();
MarginStyleResponse {
top: margin.margin_top,
right: margin.margin_right,
bottom: margin.margin_bottom,
left: margin.margin_left,
}
}<|fim▁end|> | enum OverflowDirection {
RightAndDown, |
<|file_name|>fastcall-inreg.rs<|end_file_name|><|fim▁begin|>// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Checks if the "fastcall" calling convention marks function arguments
// as "inreg" like the C/C++ compilers for the platforms.
// x86 only.
// ignore-aarch64
// ignore-aarch64_be
// ignore-arm
// ignore-armeb
// ignore-avr
// ignore-bpfel
// ignore-bpfeb<|fim▁hole|>// ignore-msp430
// ignore-powerpc64
// ignore-powerpc64le
// ignore-powerpc
// ignore-r600
// ignore-amdgcn
// ignore-sparc
// ignore-sparc64
// ignore-sparcv9
// ignore-sparcel
// ignore-s390x
// ignore-tce
// ignore-thumb
// ignore-thumbeb
// ignore-x86_64
// ignore-xcore
// ignore-nvptx
// ignore-nvptx64
// ignore-le32
// ignore-le64
// ignore-amdil
// ignore-amdil64
// ignore-hsail
// ignore-hsail64
// ignore-spir
// ignore-spir64
// ignore-kalimba
// ignore-shave
// ignore-wasm32
// ignore-wasm64
// ignore-emscripten
// compile-flags: -C no-prepopulate-passes
#![crate_type = "lib"]
pub mod tests {
// CHECK: @f1(i32 inreg %arg0, i32 inreg %arg1, i32 %arg2)
#[no_mangle]
pub extern "fastcall" fn f1(_: i32, _: i32, _: i32) {}
// CHECK: @f2(i32* inreg %arg0, i32* inreg %arg1, i32* %arg2)
#[no_mangle]
pub extern "fastcall" fn f2(_: *const i32, _: *const i32, _: *const i32) {}
// CHECK: @f3(float %arg0, i32 inreg %arg1, i32 inreg %arg2, i32 %arg3)
#[no_mangle]
pub extern "fastcall" fn f3(_: f32, _: i32, _: i32, _: i32) {}
// CHECK: @f4(i32 inreg %arg0, float %arg1, i32 inreg %arg2, i32 %arg3)
#[no_mangle]
pub extern "fastcall" fn f4(_: i32, _: f32, _: i32, _: i32) {}
// CHECK: @f5(i64 %arg0, i32 %arg1)
#[no_mangle]
pub extern "fastcall" fn f5(_: i64, _: i32) {}
// CHECK: @f6(i1 inreg zeroext %arg0, i32 inreg %arg1, i32 %arg2)
#[no_mangle]
pub extern "fastcall" fn f6(_: bool, _: i32, _: i32) {}
}<|fim▁end|> | // ignore-hexagon
// ignore-mips
// ignore-mips64 |
<|file_name|>pln_distrib.py<|end_file_name|><|fim▁begin|>'''
Implementation in scipy form of the Double Pareto-Lognormal Distribution
'''
import numpy as np
from scipy.stats import rv_continuous, norm
def _pln_pdf(x, alpha, nu, tau2):
A1 = np.exp(alpha * nu + alpha ** 2 * tau2 / 2)
fofx = alpha * A1 * x ** (-alpha - 1) *\
norm.cdf((np.log(x) - nu - alpha * tau2) / np.sqrt(tau2))
return fofx<|fim▁hole|> A1 = np.exp(alpha * nu + alpha ** 2 * tau2 / 2)
term1 = norm.cdf((np.log(x) - nu) / np.sqrt(tau2))
term2 = x ** (-alpha) * A1 * \
norm.cdf((np.log(x) - nu - alpha * tau2) / np.sqrt(tau2))
return term1 - term2
def _pln_logpdf(x, alpha, nu, tau2):
return np.log(alpha) + alpha * nu + alpha * tau2 / 2 - \
(alpha + 1) * np.log(x) + \
norm.logcdf((np.log(x) - nu - alpha * tau2) / np.sqrt(tau2))
def _pln_rawmoments(r, alpha, nu, tau2):
if alpha > r:
return alpha / (alpha - r) * np.exp(r*nu + r**2.*tau2/2)
else:
return np.NaN
class pln_gen(rv_continuous):
def _pdf(self, x, alpha, nu, tau2):
return _pln_pdf(x, alpha, nu, tau2)
def _logpdf(self, x, alpha, nu, tau2):
return _pln_logpdf(x, alpha, nu, tau2)
def _cdf(self, x, alpha, nu, tau2):
return _pln_cdf(x, alpha, nu, tau2)
pln = pln_gen(name="pln", a=0.0)<|fim▁end|> |
def _pln_cdf(x, alpha, nu, tau2): |
<|file_name|>test_geometric.py<|end_file_name|><|fim▁begin|>import pytest
from .utils import *
import psi4
from qcengine.testing import using
@pytest.mark.parametrize('engine', [
pytest.param('optking'),
pytest.param('geometric', marks=using('geometric')),
]) # yapf: disable
@pytest.mark.parametrize('inp', [
pytest.param({'name': 'hf', 'options': {'scf_type': 'df'}, 'ref_ene' : -76.027032783717, 'ref_nuc': 9.300794299874}, id='rhf(df)'),
pytest.param({'name': 'hf', 'options': {'scf_type': 'pk'}, 'ref_ene' : -76.027053512764, 'ref_nuc': 9.300838770294}, id='rhf(pk)'),
pytest.param({'name': 'mp2', 'options': {'mp2_type': 'df'}, 'ref_ene' : -76.230938589591, 'ref_nuc': 9.133271168193}, id='mp2(df)'),
pytest.param({'name': 'mp2', 'options': {'mp2_type': 'conv'}, 'ref_ene' : -76.230989373502, 'ref_nuc': 9.133125471291}, id='mp2(conv)'),
pytest.param({'name': 'b3lyp', 'options': {'scf_type': 'df'}, 'ref_ene' : -76.420645414834, 'ref_nuc': 9.090397129492}, id='b3lyp'),
]) # yapf: disable
def test_h2o(inp, engine):
"""Optimization of the square water molecule"""
h2o = psi4.geometry("""
O
H 1 1.0
H 1 1.0 2 90.0
""")
psi4.set_options({'basis': 'cc-pvdz',
'g_convergence': 'gau_tight'
})
psi4.set_options(inp['options'])
e, wfn = psi4.optimize(inp['name'], return_wfn=True, engine=engine)
assert compare_values(inp['ref_ene'], e, 6)
assert compare_values(inp['ref_nuc'], h2o.nuclear_repulsion_energy(), 3)
@using('geometric')
@pytest.mark.parametrize('inp', [
pytest.param({'name': 'hf', 'options': {'scf_type': 'df'}, 'ref_ene' : -76.02079629252714, 'ref_nuc': 9.265341708725257}, id='rhf(df)'),
pytest.param({'name': 'hf', 'options': {'scf_type': 'pk'}, 'ref_ene' : -76.02082389228, 'ref_nuc': 9.26528625744628}, id='rhf(pk)'),
pytest.param({'name': 'mp2', 'options': {'mp2_type': 'df'}, 'ref_ene' : -76.22711819393223, 'ref_nuc': 9.09137805747361}, id='mp2(df)'),
pytest.param({'name': 'mp2', 'options': {'mp2_type': 'conv'}, 'ref_ene' : -76.2271678506303, 'ref_nuc': 9.091178486990861}, id='mp2(conv)'),
pytest.param({'name': 'b3lyp', 'options': {'scf_type': 'df'}, 'ref_ene' : -76.41632755714534, 'ref_nuc': 9.04535641436914}, id='b3lyp'),
]) # yapf: disable
def test_h2o_constrained(inp):
"""Constrained optimization of the square water molecule"""
h2o = psi4.geometry("""
O
H 1 1.0<|fim▁hole|> psi4.set_options({'basis': 'cc-pvdz',
'g_convergence': 'gau_tight'
})
psi4.set_options(inp['options'])
# geometric specific options
geometric_keywords = {
'coordsys' : 'tric',
'enforce' : 0.0,
'constraints' : {
'set' : [{'type' : 'angle',
'indices' : [1, 0, 2],
'value' : 90.0 }]
}
}
e, wfn = psi4.optimize(inp['name'], return_wfn=True, engine='geometric', optimizer_keywords=geometric_keywords)
assert compare_values(inp['ref_ene'], e, 6)
assert compare_values(inp['ref_nuc'], h2o.nuclear_repulsion_energy(), 3)<|fim▁end|> | H 1 1.0 2 90.0
""")
|
<|file_name|>main-footer.component.ts<|end_file_name|><|fim▁begin|>import { Component } from '@angular/core';<|fim▁hole|>@Component({
selector: 'main-footer',
template: `
<div id="main-footer" class="main-footer">
<div class="container">
<div class="row">
<div class="col-lg-12">
<div class="navbar-menu">
<span class="smiley">☺</span> Made by MATR Participants
</div>
</div>
</div>
</div>
</div>
`,
styles: [`
h1 {
margin: 0;
}
div.main-footer {
background-color:#0066B3;
color: white;
padding: 10px;
border: none;
height:50px;
border-radius: 0px;
position: absolute;
bottom: 0;
width: 100%;
/* Set the fixed height of the footer here */
line-height: 50px;
height: 70px;
}
div.navbar-menu {
text-align:center;
}
ul.navbar-menu {
list-style-type: none;
margin: 0;
padding: 0;
overflow: hidden;
}
li.navbar-menu {
display: block;
text-align: center;
text-decoration: none;
color:black;
}
.btn-main-custom {
text-align: center;
cursor: pointer;
line-height: 60px;
text-decoration: none;
padding: 0px 5px 0px 5px;
border: 0px solid #0066B3;
display: block;
color: #0066B3;
font-size: 14px;
font-weight: 400;
border-radius: 2px;
margin-right:10px;
min-width:50px;
}
.btn-main-custom:after {
content: '';
width: 0;
position: absolute;
-webkit-transition: .2s;
transition: .2s;
background: #0066B3;
left: 50%;
bottom: 0;
height: 5px;
}
.btn-main-custom:hover:after {
width: 100%;
left: 0;
}
a.navbar-logo {
text-decoration: none;
}
a.navbar-menu:hover, a.navbar-menu:visited {
background-color:transparent;
}
a.active {
color:black !important;
}
span.smiley {
font-size: 25px;
}
`]
})
export class MainFooterComponent {
}<|fim▁end|> | |
<|file_name|>GetInboxClinicalDocuments.java<|end_file_name|><|fim▁begin|><|fim▁hole|>import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlType;
import org.hl7.v3.CE;
/**
* <p>Java-Klasse für anonymous complex type.
*
* <p>Das folgende Schemafragment gibt den erwarteten Content an, der in dieser Klasse enthalten ist.
*
* <pre>
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="code" type="{urn:hl7-org:v3}CE" minOccurs="0"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "", propOrder = {
"code"
})
@XmlRootElement(name = "getInboxClinicalDocuments")
public class GetInboxClinicalDocuments {
protected CE code;
/**
* Ruft den Wert der code-Eigenschaft ab.
*
* @return
* possible object is
* {@link CE }
*
*/
public CE getCode() {
return code;
}
/**
* Legt den Wert der code-Eigenschaft fest.
*
* @param value
* allowed object is
* {@link CE }
*
*/
public void setCode(CE value) {
this.code = value;
}
}<|fim▁end|> | package ch.docbox.ws.cdachservicesv2;
import javax.xml.bind.annotation.XmlAccessType; |
<|file_name|>p051.rs<|end_file_name|><|fim▁begin|>//! [Problem 51](https://projecteuler.net/problem=51) solver.
#![warn(bad_style,
unused, unused_extern_crates, unused_import_braces,
unused_qualifications, unused_results)]
#[macro_use(problem)] extern crate common;
extern crate integer;
extern crate prime;
use integer::Integer;
use prime::PrimeSet;
fn compute(num_value: usize) -> u64 {
let radix = 10;<|fim▁hole|> let ps = PrimeSet::new();
for p in &ps {
let ds = p.into_digits(radix as u64);
let hs = p.into_digit_histogram();
for (d_src, &cnt) in hs.iter().enumerate() {
// Skip digits that are appeared less than twice.
if cnt <= 1 { continue }
let mut num_prime = 1;
for d_dst in (d_src + 1 .. radix) {
if radix - d_dst < num_value - num_prime { break }
let it = ds.clone().map(|d| if d == (d_src as u64) { d_dst as u64 } else { d });
if ps.contains(Integer::from_digits(it, radix as u64)) {
num_prime += 1;
}
}
if num_prime >= num_value {
return p
}
}
}
unreachable!()
}
fn solve() -> String {
compute(8).to_string()
}
problem!("121313", solve);
#[cfg(test)]
mod tests {
#[test] fn seven() { assert_eq!(56003, super::compute(7)) }
}<|fim▁end|> | |
<|file_name|>Math.cpp<|end_file_name|><|fim▁begin|>//
// File: Math.cpp
// Author: John Barbero Unenge
// Date: 10/11/12.
//
// Copyright (c) 2012 Catch22. All rights reserved.
//<|fim▁hole|>// License: The following code is licensed under the Catch22-License
//
#include "Math.hpp"
#include <math.h>
Vector2d* Math::generateUnitVectorOf(Vector2d* vector)
{
if (vector == 0 || (vector->m_x == 0 && vector->m_y == 0)) {
return new Vector2d(0.0,0.0);
}
float scalar = sqrtf((vector->m_x * vector->m_x) + (vector->m_y * vector->m_y));
Vector2d* returnVector = new Vector2d(vector->m_x/scalar, vector->m_y/scalar);
return returnVector;
}
double Math::abs(double value)
{
return value > 0 ? value : -1 * value;
}<|fim▁end|> | // All code is my own except where credited to others.
// |
<|file_name|>net.rs<|end_file_name|><|fim▁begin|>use color::Colorf32;
use fastup::{self, Node, parse_for_first_node, parse_color};
use stats_once::{NET_DOWNLOAD_SPEED, NET_UPLOAD_SPEED};
use super::error_node;
use std::mem::swap;
#[derive(Debug, Copy, Clone, Default)]
pub struct Widget;
impl super::Widget for Widget {
fn expand(&self, args: Vec<String>) -> Node {
match args.len() {
4 | 6 => node_from_args(args),
8 => node_from_args(args), // debug only
_ => error_node("(ccdd44: net) takes 4 or 6 arguments \\(speed in KiB\\): color-cold\\|color-hot\\|lo-speed\\|hi-speed{\\|lo-speed-upload\\|hi-speed-upload}"),
}
}
}
fn node_from_args(args: Vec<String>) -> Node {
try_node_from_args(args).unwrap_or_else(|e| error_node(&e))
}
fn try_node_from_args(args: Vec<String>) -> Result<Node, String> {
assert!((args.len() == 4 || args.len() == 6) || args.len() == 8);
let cold = parse_color(&args[0]).map_err(escape_fastup)?;
let hot = parse_color(&args[1]).map_err(escape_fastup)?;
let parse_speed = |i| -> Result<Option<u64>, String> {
if args.len() <= i || args[i].trim().len() == 0 {
Ok(None)
} else {
let speed: f64 = args[i].parse().map_err(escape_fastup)?;
if speed < 0.0 {
Err(format!("speed cannot be a negative number, but got {}", speed))
} else {
Ok(Some((speed * 1024.0) as u64))
}
}
};
let lo_speed_down = parse_speed(2)?.unwrap();
let hi_speed_down = parse_speed(3)?.unwrap();
let lo_speed_up = parse_speed(4)?.unwrap_or(lo_speed_down);
let hi_speed_up = parse_speed(5)?.unwrap_or(hi_speed_down);
let speed_down = parse_speed(6)?.unwrap_or(*NET_DOWNLOAD_SPEED);
let speed_up = parse_speed(7)?.unwrap_or(*NET_UPLOAD_SPEED);
Ok(
net_node(
cold.into(),
hot.into(),
lo_speed_down,
hi_speed_down,
lo_speed_up,
hi_speed_up,
speed_down,
speed_up,
)
)
}
fn net_node(
cold: Colorf32,
hot: Colorf32,
lo_speed_down: u64,
hi_speed_down: u64,
lo_speed_up: u64,<|fim▁hole|> hi_speed_up: u64,
speed_down: u64,
speed_up: u64,
) -> Node
{
const BG_LIGHTNESS: f32 = 0.15;
const FG_LIGHTNESS: f32 = 0.4;
const IDLE_LIGHTNESS: f32 = 0.2;
const FAST_LIGHTNESS_BOOST: f32 = 0.3;
const PROGRESS_INDICATORS: &'static str = "▁▂▃▄▅▆▇";
let ratio_of = |x, lo, hi| nan_to_zero(unmix(x as f32, (lo as f32, hi as f32)));
let ratio_down = ratio_of(speed_down, lo_speed_down, hi_speed_down);
let ratio_up = ratio_of(speed_up, lo_speed_up, hi_speed_up);
let (fast_speed, fast_ratio, fast_boost, boost_down, boost_up) = {
if speed_down == 0 && speed_up == 0 {
(0, 0.0, 0.0, 0.0, 0.0)
} else if (ratio_up - ratio_down).abs() < 1e-5 {
(speed_down, ratio_down, FAST_LIGHTNESS_BOOST, FAST_LIGHTNESS_BOOST, 0.0)
} else if ratio_up > ratio_down {
(speed_up, ratio_up, FAST_LIGHTNESS_BOOST, 0.0, FAST_LIGHTNESS_BOOST)
} else {
(speed_down, ratio_down, FAST_LIGHTNESS_BOOST, FAST_LIGHTNESS_BOOST, 0.0)
}
};
let fast_ratio = fast_ratio.max(0.0).min(1.0);
let ratio_down = ratio_down.max(0.0).min(1.0);
let ratio_up = ratio_up .max(0.0).min(1.0);
let bg_cold = cold.set_lightness(BG_LIGHTNESS);
let bg_hot = hot.set_lightness(BG_LIGHTNESS);
let bg = bg_cold.mix(bg_hot, fast_ratio).clamp_to_888();
let idle_for = |ratio: f32, boost: f32| {
let idle_cold = cold.set_lightness(IDLE_LIGHTNESS + boost);
let idle_hot = hot.set_lightness(IDLE_LIGHTNESS + boost);
idle_cold.mix(idle_hot, ratio).clamp_to_888()
};
let fg_for = |ratio: f32, boost: f32| {
if ratio == 0.0 {
idle_for(ratio, boost)
} else {
let fg_cold = cold.set_lightness(FG_LIGHTNESS + boost);
let fg_hot = hot.set_lightness(FG_LIGHTNESS + boost);
fg_cold.mix(fg_hot, ratio).clamp_to_888()
}
};
let indicator_for = |ratio: f32, reverse: bool| {
let len = PROGRESS_INDICATORS.chars().count() as f32;
let mut i = (len * ratio).floor();
if reverse { i = len - 1.0 - i }
let i = i.min(len - 1.0).max(0.0) as usize;
PROGRESS_INDICATORS.chars().nth(i).unwrap()
};
let styled_indicator_for = |ratio: f32, boost: f32, upside_down: bool| {
let mut fg = fg_for(ratio, boost);
let mut bg = bg;
if upside_down { swap(&mut fg, &mut bg) }
let indicator = indicator_for(ratio, upside_down);
format!("[{}:({}:{})]", bg, fg, indicator)
};
let indicator_down = styled_indicator_for(ratio_down, boost_down, true);
let indicator_up = styled_indicator_for(ratio_up, boost_up, false);
let fast_text = {
let fg_fast = fg_for(fast_ratio, fast_boost);
let text = format_byte_size(fast_speed);
let text = format!("{:>5}", text);
let text = escape_fastup(text);
format!("({}:{})", fg_fast, text)
};
let node = format!("[{}: \\ {}{}{} ]", bg, indicator_up, indicator_down, fast_text);
parse_for_first_node(&node).unwrap()
}
fn escape_fastup<T>(input: T) -> String
where T: ToString
{
fastup::escape_for_text(&input.to_string())
}
/// Format byte size into short and human readable form.
///
/// Shortness is the most important considering factor here.
/// The largest unit is 'G'.
//
// 0B
// 10B
// 100B
// 999B
// 0.9K
// 9.0K
// 10K
// 100K
// ...
//
// NOTE: There are NO spaces in front of the result.
fn format_byte_size(size: u64) -> String {
let (size, unit) = find_byte_size_unit(size);
if unit == 'B' {
format!("{}{}", size.floor(), unit)
} else {
if size < 10.0 {
format!("{:.1}{}", (size * 10.0).floor() / 10.0, unit)
} else {
format!("{}{}", size.floor(), unit)
}
}
}
fn find_byte_size_unit(size: u64) -> (f64, char) {
const UNITS: &'static str = "BKM";
let mut size = size as f64;
for unit in UNITS.chars() {
if size > 999.9 {
size /= 1024.0;
} else {
return (size, unit);
}
}
(size, 'G')
}
fn unmix(x: f32, range: (f32, f32)) -> f32 {
let (a, b) = range;
(x - a) / (b - a)
}
fn nan_to_zero(x: f32) -> f32 {
if x.is_nan() { 0.0 } else { x }
}<|fim▁end|> | |
<|file_name|>PrefPageOracle.java<|end_file_name|><|fim▁begin|>/*
* DBeaver - Universal Database Manager
* Copyright (C) 2010-2017 Serge Rider ([email protected])
* Copyright (C) 2011-2012 Eugene Fradkin ([email protected])
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jkiss.dbeaver.ext.oracle.views;
import org.eclipse.swt.SWT;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.widgets.*;
import org.jkiss.dbeaver.core.DBeaverCore;
import org.jkiss.dbeaver.ext.oracle.model.OracleConstants;
import org.jkiss.dbeaver.model.preferences.DBPPreferenceStore;
import org.jkiss.dbeaver.model.DBPDataSourceContainer;
import org.jkiss.dbeaver.ui.UIUtils;
import org.jkiss.dbeaver.ui.preferences.PreferenceStoreDelegate;
import org.jkiss.dbeaver.ui.preferences.TargetPrefPage;
import org.jkiss.dbeaver.utils.PrefUtils;
/**
* PrefPageOracle
*/
public class PrefPageOracle extends TargetPrefPage
{
public static final String PAGE_ID = "org.jkiss.dbeaver.preferences.oracle.general"; //$NON-NLS-1$
<|fim▁hole|> public PrefPageOracle()
{
super();
setPreferenceStore(new PreferenceStoreDelegate(DBeaverCore.getGlobalPreferenceStore()));
}
@Override
protected boolean hasDataSourceSpecificOptions(DBPDataSourceContainer dataSourceDescriptor)
{
DBPPreferenceStore store = dataSourceDescriptor.getPreferenceStore();
return
store.contains(OracleConstants.PREF_EXPLAIN_TABLE_NAME) ||
store.contains(OracleConstants.PREF_SUPPORT_ROWID) ||
store.contains(OracleConstants.PREF_DBMS_OUTPUT)
;
}
@Override
protected boolean supportsDataSourceSpecificOptions()
{
return true;
}
@Override
protected Control createPreferenceContent(Composite parent)
{
Composite composite = UIUtils.createPlaceholder(parent, 1);
{
Group planGroup = UIUtils.createControlGroup(composite, "Execution plan", 2, GridData.FILL_HORIZONTAL, 0);
Label descLabel = new Label(planGroup, SWT.WRAP);
descLabel.setText("By default plan table in current or SYS schema will be used.\nYou may set some particular fully qualified plan table name here.");
GridData gd = new GridData(GridData.HORIZONTAL_ALIGN_BEGINNING);
gd.horizontalSpan = 2;
descLabel.setLayoutData(gd);
explainTableText = UIUtils.createLabelText(planGroup, "Plan table", "", SWT.BORDER, new GridData(GridData.FILL_HORIZONTAL));
}
{
Group planGroup = UIUtils.createControlGroup(composite, "Misc", 2, GridData.FILL_HORIZONTAL, 0);
rowidSupportCheck = UIUtils.createLabelCheckbox(planGroup, "Use ROWID to identify rows", true);
enableDbmsOuputCheck = UIUtils.createLabelCheckbox(planGroup, "Enable DBMS Output", true);
}
return composite;
}
@Override
protected void loadPreferences(DBPPreferenceStore store)
{
explainTableText.setText(store.getString(OracleConstants.PREF_EXPLAIN_TABLE_NAME));
rowidSupportCheck.setSelection(store.getBoolean(OracleConstants.PREF_SUPPORT_ROWID));
enableDbmsOuputCheck.setSelection(store.getBoolean(OracleConstants.PREF_DBMS_OUTPUT));
}
@Override
protected void savePreferences(DBPPreferenceStore store)
{
store.setValue(OracleConstants.PREF_EXPLAIN_TABLE_NAME, explainTableText.getText());
store.setValue(OracleConstants.PREF_SUPPORT_ROWID, rowidSupportCheck.getSelection());
store.setValue(OracleConstants.PREF_DBMS_OUTPUT, enableDbmsOuputCheck.getSelection());
PrefUtils.savePreferenceStore(store);
}
@Override
protected void clearPreferences(DBPPreferenceStore store)
{
store.setToDefault(OracleConstants.PREF_EXPLAIN_TABLE_NAME);
store.setToDefault(OracleConstants.PREF_SUPPORT_ROWID);
store.setToDefault(OracleConstants.PREF_DBMS_OUTPUT);
}
@Override
protected String getPropertyPageID()
{
return PAGE_ID;
}
}<|fim▁end|> | private Text explainTableText;
private Button rowidSupportCheck;
private Button enableDbmsOuputCheck;
|
<|file_name|>BLITSaw_processor.cpp<|end_file_name|><|fim▁begin|>/*
* BLITSaw_processor.cpp
*
* Copyright (c) 2014, fukuroda (https://github.com/fukuroder)
* Released under the MIT license
*/
#include "BLITSaw_processor.h"
#include "BLITSaw_guids.h"
#include "pluginterfaces/vst/ivstparameterchanges.h"
#include <algorithm>
namespace MyVst {
//
BLITSaw_processor::BLITSaw_processor()
{
setControllerClass(BLITSawControllerID);
}
//
FUnknown* BLITSaw_processor::create(void* context)
{
return (IAudioProcessor*)new BLITSaw_processor();
}
//
tresult PLUGIN_API BLITSaw_processor::initialize(FUnknown* context)
{
// base class initialization
tresult result = AudioEffect::initialize(context);
if (result != kResultOk)
{
return result;
}
// set bus
addAudioOutput(STR16("Stereo Out"), SpeakerArr::kStereo);
return kResultOk;
}
//
tresult PLUGIN_API BLITSaw_processor::setBusArrangements(
SpeakerArrangement* inputs,
int32 numIns,
SpeakerArrangement* outputs,
int32 numOuts
){
if (numIns == 0 && numOuts == 1 && outputs[0] == SpeakerArr::kStereo)
{
return AudioEffect::setBusArrangements(inputs, numIns, outputs, numOuts);
}
return kResultFalse;
}
//
tresult PLUGIN_API BLITSaw_processor::process(ProcessData& data)
{
//-------------------
// update parameters
//-------------------
if (data.inputParameterChanges)
{
int32 numParamsChanged = data.inputParameterChanges->getParameterCount();
for (int32 ii = 0; ii < numParamsChanged; ii++)
{
IParamValueQueue* paramQueue = data.inputParameterChanges->getParameterData(ii);
if (paramQueue)
{
int32 offsetSamples;
double value;
//
if (paramQueue->getPoint(paramQueue->getPointCount() - 1, offsetSamples, value) == kResultTrue)
{
ParamID id = paramQueue->getParameterId();
if (id == Leak)
{
// -> [0.99, 1.0]
double Leak = 0.99 + 0.01 * value;
blit.setLeak(Leak);
}
}
}
}
}
//----------------
// process events
//----------------
if (data.inputEvents)
{
int nEventCount = data.inputEvents->getEventCount();
for (int ii = 0; ii < nEventCount; ii++)
{
Event e;
tresult result = data.inputEvents->getEvent(ii, e);
if (result != kResultOk)continue;
if (e.type == Event::kNoteOnEvent)
{
blit.trigger(e.noteOn, e.sampleOffset, processSetup.sampleRate);
}
else if (e.type == Event::kNoteOffEvent)
{
blit.release(e.noteOff);
}
}
}
if (blit.is_silent())
{
return kResultOk;
}
//
if (data.numInputs == 0 && data.numOutputs == 1 && data.outputs[0].numChannels == 2)
{
Sample32** out = data.outputs[0].channelBuffers32;
const int32 sampleFrames = data.numSamples;
for (int ii = 0; ii < sampleFrames; ii++)
{
out[0][ii] = out[1][ii] = blit.render();
blit.next();
<|fim▁hole|> return kResultOk;
}
} // namespace<|fim▁end|> | }
}
|
<|file_name|>quiz_error.rs<|end_file_name|><|fim▁begin|>extern crate diesel;
use self::diesel::result::Error as DatabaseError;
use std::error;
use std::fmt;
use std::convert::From;
#[derive(Debug)]
pub enum QuizError {
DatabaseError(DatabaseError),
JokerUnavailable,
GameAlreadyFinished,
NoGameInProgress,
GameStillInProgress,
StateError,
OutOfResources,
}
impl fmt::Display for QuizError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
QuizError::DatabaseError(ref err) => write!(f, "Database error: {}", err),
QuizError::JokerUnavailable => write!(f, "Joker error: Tried to use unavailable Joker"),
QuizError::GameAlreadyFinished => {<|fim▁hole|> QuizError::NoGameInProgress => {
write!(f,
"No game in progress error: Tried to play without starting a game first")
}
QuizError::GameStillInProgress => {
write!(f,
"Game still in progress error: Tried to start game while old one was not finished yet")
}
QuizError::StateError => {
write!(f,
"State error: Found game in a corrupt state, e.g. no available categories")
}
QuizError::OutOfResources => {
write!(f, "Out of resources error: Answered all possible questions")
}
}
}
}
impl error::Error for QuizError {
fn description(&self) -> &str {
match *self {
QuizError::DatabaseError(ref err) => err.description(),
QuizError::JokerUnavailable => "Joker unavailable error",
QuizError::GameAlreadyFinished => "Game already finished error",
QuizError::GameStillInProgress => "Game still in progress error",
QuizError::NoGameInProgress => "No game in progress error",
QuizError::StateError => "State error",
QuizError::OutOfResources => "Out of resources error",
}
}
fn cause(&self) -> Option<&error::Error> {
match *self {
QuizError::DatabaseError(ref err) => Some(err),
_ => None,
}
}
}
impl From<DatabaseError> for QuizError {
fn from(err: DatabaseError) -> Self {
QuizError::DatabaseError(err)
}
}<|fim▁end|> | write!(f,
"Game already finished error: Tried to interact with a game that has already been finished")
} |
<|file_name|>symbol.rs<|end_file_name|><|fim▁begin|>// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! An "interner" is a data structure that associates values with usize tags and
//! allows bidirectional lookup; i.e. given a value, one can easily find the
//! type, and vice versa.
use hygiene::SyntaxContext;
use std::cell::RefCell;
use std::collections::HashMap;
use std::fmt;
use serde::{Serialize, Serializer, Deserialize, Deserializer};
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
pub struct Ident {
pub name: Symbol,
pub ctxt: SyntaxContext,
}
impl Ident {
pub fn with_empty_ctxt(name: Symbol) -> Ident {
Ident { name: name, ctxt: SyntaxContext::empty() }
}
/// Maps a string to an identifier with an empty syntax context.
pub fn from_str(string: &str) -> Ident {
Ident::with_empty_ctxt(Symbol::intern(string))
}
pub fn modern(self) -> Ident {
Ident { name: self.name, ctxt: self.ctxt.modern() }
}
}
impl fmt::Debug for Ident {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}{:?}", self.name, self.ctxt)
}
}
impl fmt::Display for Ident {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt(&self.name, f)
}
}
impl Serialize for Ident {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where S: Serializer
{
if self.ctxt.modern() == SyntaxContext::empty() {
serializer.serialize_str(&self.name.as_str())
} else { // FIXME(jseyfried) intercrate hygiene
let mut string = "#".to_owned();
string.push_str(&self.name.as_str());
serializer.serialize_str(&string)
}
}
}
impl<'de> Deserialize<'de> for Ident {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where D: Deserializer<'de>
{
let string = String::deserialize(deserializer)?;
Ok(if !string.starts_with('#') {
Ident::from_str(&string)
} else { // FIXME(jseyfried) intercrate hygiene
Ident::with_empty_ctxt(Symbol::gensym(&string[1..]))
})
}
}
/// A symbol is an interned or gensymed string.
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct Symbol(u32);
// FIXME syntex
// The interner in thread-local, so `Symbol` shouldn't move between threads.
// impl !Send for Symbol { }
impl Symbol {
/// Maps a string to its interned representation.
pub fn intern(string: &str) -> Self {
with_interner(|interner| interner.intern(string))
}
pub fn interned(self) -> Self {
with_interner(|interner| interner.interned(self))
}
/// gensym's a new usize, using the current interner.
pub fn gensym(string: &str) -> Self {
with_interner(|interner| interner.gensym(string))
}
pub fn gensymed(self) -> Self {
with_interner(|interner| interner.gensymed(self))
}
pub fn as_str(self) -> InternedString {
with_interner(|interner| unsafe {
InternedString {
string: ::std::mem::transmute::<&str, &str>(interner.get(self))
}
})
}
pub fn as_u32(self) -> u32 {
self.0
}
}
impl fmt::Debug for Symbol {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}({})", self, self.0)
}
}
impl fmt::Display for Symbol {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt(&self.as_str(), f)
}
}
impl Serialize for Symbol {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where S: Serializer
{
serializer.serialize_str(&self.as_str())
}
}
impl<'de> Deserialize<'de> for Symbol {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where D: Deserializer<'de>
{
String::deserialize(deserializer).map(|s| Symbol::intern(&s))
}
}
impl<T: ::std::ops::Deref<Target=str>> PartialEq<T> for Symbol {
fn eq(&self, other: &T) -> bool {
self.as_str() == other.deref()
}
}
#[derive(Default)]
pub struct Interner {
names: HashMap<Box<str>, Symbol>,
strings: Vec<Box<str>>,
gensyms: Vec<Symbol>,
}
impl Interner {
pub fn new() -> Self {
Interner::default()
}
fn prefill(init: &[&str]) -> Self {
let mut this = Interner::new();
for &string in init {
this.intern(string);
}
this
}
pub fn intern(&mut self, string: &str) -> Symbol {
if let Some(&name) = self.names.get(string) {
return name;
}
let name = Symbol(self.strings.len() as u32);
let string = string.to_string().into_boxed_str();
self.strings.push(string.clone());
self.names.insert(string, name);
name
}
pub fn interned(&self, symbol: Symbol) -> Symbol {
if (symbol.0 as usize) < self.strings.len() {
symbol
} else {
self.interned(self.gensyms[(!0 - symbol.0) as usize])
}
}
fn gensym(&mut self, string: &str) -> Symbol {
let symbol = self.intern(string);
self.gensymed(symbol)
}
fn gensymed(&mut self, symbol: Symbol) -> Symbol {
self.gensyms.push(symbol);
Symbol(!0 - self.gensyms.len() as u32 + 1)
}
pub fn get(&self, symbol: Symbol) -> &str {
match self.strings.get(symbol.0 as usize) {
Some(ref string) => string,
None => self.get(self.gensyms[(!0 - symbol.0) as usize]),
}
}
}
// In this macro, there is the requirement that the name (the number) must be monotonically
// increasing by one in the special identifiers, starting at 0; the same holds for the keywords,
// except starting from the next number instead of zero.
macro_rules! declare_keywords {(
$( ($index: expr, $konst: ident, $string: expr) )*
) => {
pub mod keywords {
use super::{Symbol, Ident};
#[derive(Clone, Copy, PartialEq, Eq)]
pub struct Keyword {
ident: Ident,
}
impl Keyword {
#[inline] pub fn ident(self) -> Ident { self.ident }
#[inline] pub fn name(self) -> Symbol { self.ident.name }
}
$(
#[allow(non_upper_case_globals)]
pub const $konst: Keyword = Keyword {
ident: Ident {
name: super::Symbol($index),
ctxt: ::NO_EXPANSION,
}
};
)*
}
impl Interner {
fn fresh() -> Self {
Interner::prefill(&[$($string,)*])
}
}
}}
// NB: leaving holes in the ident table is bad! a different ident will get
// interned with the id from the hole, but it will be between the min and max
// of the reserved words, and thus tagged as "reserved".
// After modifying this list adjust `is_strict_keyword`/`is_reserved_keyword`,
// this should be rarely necessary though if the keywords are kept in alphabetic order.
declare_keywords! {
// Invalid identifier
(0, Invalid, "")
// Strict keywords used in the language.
(1, As, "as")
(2, Box, "box")
(3, Break, "break")
(4, Const, "const")
(5, Continue, "continue")
(6, Crate, "crate")
(7, Else, "else")
(8, Enum, "enum")
(9, Extern, "extern")
(10, False, "false")
(11, Fn, "fn")
(12, For, "for")
(13, If, "if")
(14, Impl, "impl")
(15, In, "in")
(16, Let, "let")
(17, Loop, "loop")
(18, Match, "match")
(19, Mod, "mod")
(20, Move, "move")
(21, Mut, "mut")
(22, Pub, "pub")
(23, Ref, "ref")
(24, Return, "return")
(25, SelfValue, "self")
(26, SelfType, "Self")
(27, Static, "static")
(28, Struct, "struct")
(29, Super, "super")
(30, Trait, "trait")
(31, True, "true")
(32, Type, "type")
(33, Unsafe, "unsafe")
(34, Use, "use")
(35, Where, "where")
(36, While, "while")
// Keywords reserved for future use.
(37, Abstract, "abstract")
(38, Alignof, "alignof")
(39, Become, "become")
(40, Do, "do")
(41, Final, "final")
(42, Macro, "macro")
(43, Offsetof, "offsetof")
(44, Override, "override")
(45, Priv, "priv")
(46, Proc, "proc")
(47, Pure, "pure")
(48, Sizeof, "sizeof")
(49, Typeof, "typeof")
(50, Unsized, "unsized")
(51, Virtual, "virtual")
(52, Yield, "yield")
// Weak keywords, have special meaning only in specific contexts.
(53, Default, "default")
(54, StaticLifetime, "'static")
(55, Union, "union")
(56, Catch, "catch")
// A virtual keyword that resolves to the crate root when used in a lexical scope.
(57, CrateRoot, "{{root}}")
}
// If an interner exists in TLS, return it. Otherwise, prepare a fresh one.
fn with_interner<T, F: FnOnce(&mut Interner) -> T>(f: F) -> T {
thread_local!(static INTERNER: RefCell<Interner> = {
RefCell::new(Interner::fresh())
});
INTERNER.with(|interner| f(&mut *interner.borrow_mut()))
}
/// Represents a string stored in the thread-local interner. Because the
/// interner lives for the life of the thread, this can be safely treated as an
/// immortal string, as long as it never crosses between threads.
///
/// FIXME(pcwalton): You must be careful about what you do in the destructors
/// of objects stored in TLS, because they may run after the interner is
/// destroyed. In particular, they must not access string contents. This can
/// be fixed in the future by just leaking all strings until thread death
/// somehow.
#[derive(Clone, Hash, PartialOrd, Eq, Ord)]
pub struct InternedString {
string: &'static str,
}
impl<U: ?Sized> ::std::convert::AsRef<U> for InternedString where str: ::std::convert::AsRef<U> {
fn as_ref(&self) -> &U {
self.string.as_ref()
}
}
impl<T: ::std::ops::Deref<Target = str>> ::std::cmp::PartialEq<T> for InternedString {
fn eq(&self, other: &T) -> bool {
self.string == other.deref()
}
}
impl ::std::cmp::PartialEq<InternedString> for str {
fn eq(&self, other: &InternedString) -> bool {
self == other.string
}
}
impl<'a> ::std::cmp::PartialEq<InternedString> for &'a str {
fn eq(&self, other: &InternedString) -> bool {
*self == other.string
}
}
impl ::std::cmp::PartialEq<InternedString> for String {
fn eq(&self, other: &InternedString) -> bool {
self == other.string
}
}
impl<'a> ::std::cmp::PartialEq<InternedString> for &'a String {
fn eq(&self, other: &InternedString) -> bool {
*self == other.string
}
}
// FIXME syntex
// impl !Send for InternedString { }
impl ::std::ops::Deref for InternedString {
type Target = str;
fn deref(&self) -> &str { self.string }
}
impl fmt::Debug for InternedString {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(self.string, f)
}
}
impl fmt::Display for InternedString {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt(self.string, f)
}
}
impl<'de> Deserialize<'de> for InternedString {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where D: Deserializer<'de>
{
Symbol::deserialize(deserializer).map(Symbol::as_str)
}
}
impl Serialize for InternedString {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where S: Serializer
{
serializer.serialize_str(self.string)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn interner_tests() {
let mut i: Interner = Interner::new();
// first one is zero:
assert_eq!(i.intern("dog"), Symbol(0));
// re-use gets the same entry:<|fim▁hole|> // different string gets a different #:
assert_eq!(i.intern("cat"), Symbol(1));
assert_eq!(i.intern("cat"), Symbol(1));
// dog is still at zero
assert_eq!(i.intern("dog"), Symbol(0));
assert_eq!(i.gensym("zebra"), Symbol(4294967295));
// gensym of same string gets new number :
assert_eq!(i.gensym("zebra"), Symbol(4294967294));
// gensym of *existing* string gets new number:
assert_eq!(i.gensym("dog"), Symbol(4294967293));
}
}<|fim▁end|> | assert_eq!(i.intern ("dog"), Symbol(0)); |
<|file_name|>jj.js<|end_file_name|><|fim▁begin|>/*
Just Journal JavaScript Library
author: Lucas Holt
date created: June 10, 2007
*/
function follow(friend) {
'use strict';
var request = jQuery.ajax({
url: "/api/friend/" + friend,
type: "PUT",
data: {}
});
request.done(function () {
window.alert('Now following ' + friend);
});
request.fail(function (jqXHR, textStatus) {
window.alert("Could not follow friend. Request failed: " + textStatus);
});
}
function unfollow(friend) {
'use strict';
var request = jQuery.ajax({
url: "/api/friend/" + friend,
type: "DELETE",
data: {}
});
request.done(function () {
window.alert('Unfollowing ' + friend);
});
request.fail(function (jqXHR, textStatus) {
window.alert("Could not unfollow friend. Request failed: " + textStatus);
});
}
function addFavorite(entryId) {
'use strict';
var request = jQuery.ajax({
url: "/api/favorite/" + entryId,
type: "POST",
data: {}
});
request.done(function () {
window.alert('Favorite saved.');
});
request.fail(function (jqXHR, textStatus) {
window.alert("Favorite not saved. Request failed: " + textStatus);
});
}
function deleteFavorite(entryId) {
'use strict';
var request = jQuery.ajax({
url: "/api/favorite/" + entryId,
type: "DELETE",
data: {}
});
request.done(function () {
window.alert('Favorite removed.');
});
request.fail(function (jqXHR, textStatus) {
window.alert("Favorite not removed. Request failed: " + textStatus);
});
}
function deleteEntry(entryId) {
'use strict';
if (confirmDelete()) {
var request = jQuery.ajax({
url: "/api/entry/" + entryId,
type: "DELETE",
data: {}
});
request.done(function() {
window.alert('Removed Entry');
});
request.fail(function(jqXHR, textStatus) {
window.alert("Request failed: " + textStatus);
});
}
}
function deleteComment(commentId) {
'use strict';
if (confirmDelete()) {
var request = jQuery.ajax({
url: "/api/comment/" + commentId,
type: "DELETE",
data: {}
});
request.done(function() {
window.alert('Removed Comment');
});
request.fail(function(jqXHR, textStatus) {
window.alert("Request failed: " + textStatus);
});
}
}
function confirmDelete() {
'use strict';
return window.confirm("Are you sure you want to delete this?");
}
function showbox(boxId) {
'use strict';
var box = document.getElementById(boxId);
var parentBox = document.getElementById(boxId + "parent");
box.style.top = getAbsY(parentBox) + "px";
box.style.left = getAbsX(parentBox) + "px";
box.style.visibility='visible';
}
function hidebox(boxId) {
'use strict';
var box = document.getElementById(boxId);
box.style.visibility='hidden';
}
// get the true X offset of anything on NS4, IE4/5 &
// NS6, even if it's in a table!
function getAbsX(elt) {
'use strict';
return (elt.x) ? elt.x : getAbsPos(elt,"Left");
}
// get the true Y offset of anything on NS4, IE4/5 &
// NS6, even if it's in a table!
function getAbsY(elt) {<|fim▁hole|> 'use strict';
return (elt.y) ? elt.y : getAbsPos(elt,"Top");
}
function getAbsPos(elt,which) {
'use strict';
var iPos = 0;
while (elt !== null) {
iPos += elt["offset" + which];
elt = elt.offsetParent;
}
return iPos;
}<|fim▁end|> | |
<|file_name|>MainActivity.java<|end_file_name|><|fim▁begin|>package fr.hnit.babyname;
/*<|fim▁hole|>either version 2 of the License, or (at your option) any
later version.
The babyname app is distributed in the hope that it will be
useful, but WITHOUT ANY WARRANTY; without even the implied
warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the GNU General Public License for more
details.
You should have received a copy of the GNU General
Public License along with the TXM platform. If not, see
http://www.gnu.org/licenses
*/
import android.app.AlertDialog;
import android.content.ClipData;
import android.content.ClipboardManager;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.view.ContextMenu;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.widget.AdapterView;
import android.widget.ListView;
import android.widget.Toast;
import java.util.ArrayList;
import java.util.List;
public class MainActivity extends AppCompatActivity {
public static final String UPDATE_EXTRA = "update";
ListView namesListView;
BabyNameAdapter adapter;
public static BabyNameDatabase database = new BabyNameDatabase();
public static ArrayList<BabyNameProject> projects = new ArrayList<>();
Intent editIntent;
Intent findIntent;
Intent settingsIntent;
Intent aboutIntent;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
if (database.size() == 0)
database.initialize();
namesListView = (ListView) findViewById(R.id.listView);
registerForContextMenu(namesListView);
namesListView.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> adapterView, View view, int i, long l) {
BabyNameProject project = projects.get(i);
if (project != null) {
doFindName(project);
}
}
});
adapter = new BabyNameAdapter(this, projects);
namesListView.setAdapter(adapter);
if (projects.size() == 0) {
initializeProjects();
}
editIntent = new Intent(MainActivity.this, EditActivity.class);
findIntent = new Intent(MainActivity.this, FindActivity.class);
settingsIntent = new Intent(MainActivity.this, SettingsActivity.class);
aboutIntent = new Intent(MainActivity.this, AboutActivity.class);
}
@Override
public void onResume() {
super.onResume(); // Always call the superclass method first
adapter.notifyDataSetChanged();
for (BabyNameProject project : projects) {
if (project.needSaving) {
//Toast.makeText(this, "Saving changes of "+project+"... "+project, Toast.LENGTH_SHORT).show();
if (!BabyNameProject.storeProject(project, this)) {
Toast.makeText(this, "Error: could not save changes to babyname project: "+project, Toast.LENGTH_LONG).show();
}
}
}
}
private void initializeProjects() {
//AppLogger.info("Initializing projects...");
for (String filename : this.fileList()) {
if (filename.endsWith(".baby")) {
//AppLogger.info("Restoring... "+filename);
try {
BabyNameProject project = BabyNameProject.readProject(filename, this);
if (project != null)
projects.add(project);
else
Toast.makeText(MainActivity.this, "Error: could not read baby name project from "+filename, Toast.LENGTH_LONG).show();
} catch (Exception e) {
e.printStackTrace();
}
}
}
}
@Override
public void onCreateContextMenu(ContextMenu menu, View v, ContextMenu.ContextMenuInfo menuInfo) {
super.onCreateContextMenu(menu, v, menuInfo);
MenuInflater inflater = getMenuInflater();
inflater.inflate(R.menu.menu_list, menu);
}
@Override
public boolean onContextItemSelected(MenuItem item) {
AdapterView.AdapterContextMenuInfo info = (AdapterView.AdapterContextMenuInfo) item.getMenuInfo();
if (adapter.getCount() <= info.position) return false;
BabyNameProject project = adapter.getItem(info.position);
if (project == null) return false;
switch (item.getItemId()) {
case R.id.action_reset_baby:
doResetBaby(project);
return true;
case R.id.action_top_baby:
doShowTop10(project);
return true;
case R.id.action_delete_baby:
doDeleteBaby(project);
return true;
default:
return super.onContextItemSelected(item);
}
}
public void doResetBaby(final BabyNameProject project) {
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setTitle(R.string.reset_question_title);
builder.setMessage(R.string.reset_question_content);
builder.setPositiveButton(R.string.yes, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
project.reset();
adapter.notifyDataSetChanged();
if (!BabyNameProject.storeProject(project, MainActivity.this)) {
Toast.makeText(MainActivity.this, "Error: could not save reset changes to babyname project: "+project, Toast.LENGTH_LONG).show();
}
dialog.dismiss();
}
});
builder.setNegativeButton(R.string.no, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
// I do not need any action here you might
dialog.dismiss();
}
});
AlertDialog alert = builder.create();
alert.show();
}
public void doDeleteBaby(final BabyNameProject project) {
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setTitle(R.string.delete_question_title);
builder.setMessage(R.string.delete_question_content);
builder.setPositiveButton(R.string.yes, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
projects.remove(project);
MainActivity.this.deleteFile(project.getID()+".baby");
adapter.notifyDataSetChanged();
dialog.dismiss();
}
});
builder.setNegativeButton(R.string.no, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
}
});
AlertDialog alert = builder.create();
alert.show();
}
public String projectToString(BabyNameProject p) {
String l1 = "";
if (p.genders.contains(NameData.F) && p.genders.contains(NameData.M)) {
l1 += getString(R.string.boy_or_girl_name);
} else if (p.genders.contains(NameData.M)) {
l1 += getString(R.string.boy_name);
} else {
l1 +=getString( R.string.girl_name);
}
if (p.origins.size() == 1) {
l1 += "\n\t "+String.format(getString(R.string.origin_is), p.origins.toArray()[0]);
} else if (p.origins.size() > 1) {
l1 += "\n\t "+String.format(getString(R.string.origin_are), p.origins);
} else {
l1 += "\n\t "+getString(R.string.no_origin);
}
if (p.pattern != null) {
if (".*".equals(p.pattern.toString())) {
l1 += "\n\t "+getString(R.string.no_pattern);
} else {
l1 += "\n\t "+String.format(getString(R.string.matches_with), p.pattern);
}
}
if (p.nexts.size() == 1) {
l1 += "\n\t"+getString(R.string.one_remaining_name);
} else if (p.nexts.size() == 0) {
int n = p.scores.size();
if (n > 11) n = n - 10;
l1 += "\n\t"+String.format(getString(R.string.no_remaining_loop), p.loop, n);
} else {
l1 += "\n\t"+String.format(getString(R.string.remaining_names), p.nexts.size());
}
if (p.scores.size() > 0 && p.getBest() != null) {
l1 += "\n\n\t"+String.format(getString(R.string.bact_match_is), p.getBest());
}
return l1;
}
public void doShowTop10(final BabyNameProject project) {
List<Integer> names = project.getTop10();
final StringBuffer buffer = new StringBuffer();
int n = 0;
for (Integer name : names) {
buffer.append("\n"+MainActivity.database.get(name)+": "+project.scores.get(name));
}
if (names.size() == 0) buffer.append(getString(R.string.no_name_rated));
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setTitle(R.string.top_title);
builder.setMessage(buffer.toString());
builder.setPositiveButton(R.string.ok, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
}
});
if (names.size() > 0)
builder.setNegativeButton(R.string.copy, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
ClipboardManager clipboard = (ClipboardManager)
getSystemService(Context.CLIPBOARD_SERVICE);
ClipData clip = ClipData.newPlainText("baby top10", buffer.toString());
clipboard.setPrimaryClip(clip);
Toast.makeText(MainActivity.this, R.string.text_copied, Toast.LENGTH_LONG).show();
}
});
AlertDialog alert = builder.create();
alert.show();
//Toast.makeText(this, buffer.toString(), Toast.LENGTH_LONG).show();
}
public void doFindName(BabyNameProject project) {
//AppLogger.info("Open FindActivity with "+project+" index="+projects.indexOf(project));
findIntent.putExtra(FindActivity.PROJECT_EXTRA, projects.indexOf(project));
this.startActivityForResult(findIntent, 0);
}
private void openEditActivity(BabyNameProject project) {
//AppLogger.info("Open EditActivity with "+project+" index="+projects.indexOf(project));
editIntent.putExtra(EditActivity.PROJECT_EXTRA, projects.indexOf(project));
this.startActivityForResult(editIntent, 0);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
MenuInflater inflater = getMenuInflater();
inflater.inflate(R.menu.main, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case R.id.action_settings:
this.startActivityForResult(settingsIntent, 0);
return true;
case R.id.action_about:
this.startActivityForResult(aboutIntent, 0);
return true;
case R.id.action_new_baby:
doNewBaby();
return true;
default:
return super.onOptionsItemSelected(item);
}
}
public void doNewBaby() {
Toast.makeText(this, R.string.new_baby, Toast.LENGTH_LONG).show();
BabyNameProject project = new BabyNameProject();
projects.add(project);
openEditActivity(project);
}
}<|fim▁end|> | The babyname app is free software: you can redistribute it
and/or modify it under the terms of the GNU General Public
License as published by the Free Software Foundation, |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>extern crate bip_metainfo;
extern crate bip_disk;
extern crate bip_util;
extern crate bytes;
extern crate futures;
extern crate tokio_core;
extern crate rand;
use std::collections::HashMap;
use std::io::{self};
use std::path::{Path, PathBuf};
use std::sync::{Mutex, Arc};
use std::cmp;
use std::time::Duration;
use bip_disk::{FileSystem, IDiskMessage, BlockMetadata, BlockMut};
use bip_metainfo::{IntoAccessor, Accessor, PieceAccess};
use bip_util::bt::InfoHash;
use bytes::BytesMut;
use rand::Rng;
use tokio_core::reactor::{Core, Timeout};
use futures::future::{self, Loop, Future};
use futures::stream::Stream;
use futures::sink::{Sink, Wait};
mod add_torrent;
mod disk_manager_send_backpressure;
mod complete_torrent;
mod load_block;
mod process_block;
mod remove_torrent;
mod resume_torrent;
/// Generate buffer of size random bytes.
fn random_buffer(size: usize) -> Vec<u8> {
let mut buffer = vec![0u8; size];
let mut rng = rand::weak_rng();
for i in 0..size {
buffer[i] = rng.gen();
}
buffer
}
/// Initiate a core loop with the given timeout, state, and closure.
///
/// Returns R or panics if an error occurred in the loop (including a timeout).
fn core_loop_with_timeout<I, S, F, R>(core: &mut Core, timeout_ms: u64, state: (I, S), call: F) -> R
where F: FnMut(I, S, S::Item) -> Loop<R, (I, S)>,
S: Stream {
let timeout = Timeout::new(Duration::from_millis(timeout_ms), &core.handle())
.unwrap()
.then(|_| Err(()));
// Have to stick the call in our init state so that we transfer ownership between loops
core.run(
future::loop_fn((call, state), |(mut call, (init, stream))| {
stream.into_future()
.map(|(opt_msg, stream)| {
let msg = opt_msg
.unwrap_or_else(|| panic!("End Of Stream Reached"));
match call(init, stream, msg) {
Loop::Continue((init, stream)) => Loop::Continue((call, (init, stream))),
Loop::Break(ret) => Loop::Break(ret)
}
})
})
.map_err(|_| ())
.select(timeout)
.map(|(item, _)| item)
).unwrap_or_else(|_| panic!("Core Loop Timed Out"))
}
/// Send block with the given metadata and entire data given.
fn send_block<S, M>(blocking_send: &mut Wait<S>, data: &[u8], hash: InfoHash, piece_index: u64, block_offset: u64, block_len: usize, modify: M)
where S: Sink<SinkItem=IDiskMessage>, M: Fn(&mut [u8]) {
let mut bytes = BytesMut::new();
bytes.extend_from_slice(data);
let mut block = BlockMut::new(BlockMetadata::new(hash, piece_index, block_offset, block_len), bytes);
modify(&mut block[..]);
blocking_send.send(IDiskMessage::ProcessBlock(block.into())).unwrap_or_else(|_| panic!("Failed To Send Process Block Message"));
}
//----------------------------------------------------------------------------//
/// Allow us to mock out multi file torrents.
struct MultiFileDirectAccessor {
dir: PathBuf,
files: Vec<(Vec<u8>, PathBuf)>
}
impl MultiFileDirectAccessor {
pub fn new(dir: PathBuf, files: Vec<(Vec<u8>, PathBuf)>) -> MultiFileDirectAccessor {
MultiFileDirectAccessor{ dir: dir, files: files }
}
}
// TODO: Ugh, once specialization lands, we can see about having a default impl for IntoAccessor
impl IntoAccessor for MultiFileDirectAccessor {
type Accessor = MultiFileDirectAccessor;
fn into_accessor(self) -> io::Result<MultiFileDirectAccessor> {
Ok(self)
}
}
impl Accessor for MultiFileDirectAccessor {
fn access_directory(&self) -> Option<&Path> {
// Do not just return the option here, unwrap it and put it in
// another Option (since we know this is a multi file torrent)
Some(self.dir.as_ref())
}
fn access_metadata<C>(&self, mut callback: C) -> io::Result<()>
where C: FnMut(u64, &Path) {
for &(ref buffer, ref path) in self.files.iter() {
callback(buffer.len() as u64, &*path)
}
Ok(())
}
fn access_pieces<C>(&self, mut callback: C) -> io::Result<()>
where C: for<'a> FnMut(PieceAccess<'a>) -> io::Result<()> {
for &(ref buffer, _) in self.files.iter() {
try!(callback(PieceAccess::Compute(&mut &buffer[..])))
}
Ok(())
}
}
//----------------------------------------------------------------------------//
/// Allow us to mock out the file system.
#[derive(Clone)]
struct InMemoryFileSystem {
files: Arc<Mutex<HashMap<PathBuf, Vec<u8>>>>
}
impl InMemoryFileSystem {
pub fn new() -> InMemoryFileSystem {
InMemoryFileSystem{ files: Arc::new(Mutex::new(HashMap::new())) }
}
pub fn run_with_lock<C, R>(&self, call: C) -> R
where C: FnOnce(&mut HashMap<PathBuf, Vec<u8>>) -> R {
let mut lock_files = self.files.lock().unwrap();
call(&mut *lock_files)
}
}
struct InMemoryFile {
path: PathBuf
}
impl FileSystem for InMemoryFileSystem {
type File = InMemoryFile;
fn open_file<P>(&self, path: P) -> io::Result<Self::File>
where P: AsRef<Path> + Send + 'static {
let file_path = path.as_ref().to_path_buf();
<|fim▁hole|> });
Ok(InMemoryFile{ path: file_path })
}
fn sync_file<P>(&self, _path: P) -> io::Result<()>
where P: AsRef<Path> + Send + 'static {
Ok(())
}
fn file_size(&self, file: &Self::File) -> io::Result<u64> {
self.run_with_lock(|files| {
files.get(&file.path)
.map(|file| file.len() as u64)
.ok_or(io::Error::new(io::ErrorKind::NotFound, "File Not Found"))
})
}
fn read_file(&self, file: &mut Self::File, offset: u64, buffer: &mut [u8]) -> io::Result<usize> {
self.run_with_lock(|files| {
files.get(&file.path)
.map(|file_buffer| {
let cast_offset = offset as usize;
let bytes_to_copy = cmp::min(file_buffer.len() - cast_offset, buffer.len());
let bytes = &file_buffer[cast_offset..(bytes_to_copy + cast_offset)];
buffer.clone_from_slice(bytes);
bytes_to_copy
})
.ok_or(io::Error::new(io::ErrorKind::NotFound, "File Not Found"))
})
}
fn write_file(&self, file: &mut Self::File, offset: u64, buffer: &[u8]) -> io::Result<usize> {
self.run_with_lock(|files| {
files.get_mut(&file.path)
.map(|file_buffer| {
let cast_offset = offset as usize;
let last_byte_pos = cast_offset + buffer.len();
if last_byte_pos > file_buffer.len() {
file_buffer.resize(last_byte_pos, 0);
}
let bytes_to_copy = cmp::min(file_buffer.len() - cast_offset, buffer.len());
if bytes_to_copy != 0 {
file_buffer[cast_offset..(cast_offset + bytes_to_copy)].clone_from_slice(buffer);
}
// TODO: If the file is full, this will return zero, we should also simulate io::ErrorKind::WriteZero
bytes_to_copy
})
.ok_or(io::Error::new(io::ErrorKind::NotFound, "File Not Found"))
})
}
}<|fim▁end|> | self.run_with_lock(|files| {
if !files.contains_key(&file_path) {
files.insert(file_path.clone(), Vec::new());
} |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(plugin_registrar, rustc_private)]
extern crate syntax;
extern crate rustc;
use syntax::ast::{MetaItem, Expr};
use syntax::ast;
use syntax::codemap::Span;
use syntax::ext::base::{ExtCtxt, Annotatable};
use syntax::ext::build::AstBuilder;
use syntax::ext::deriving::generic::*;
use syntax::ext::deriving::generic::ty::*;
use syntax::parse::token::InternedString;
use syntax::ptr::P;
use syntax::ext::base::MultiDecorator;
use syntax::parse::token;
use rustc::plugin::Registry;
macro_rules! pathvec {
($($x:ident)::+) => (
vec![ $( stringify!($x) ),+ ]
)
}
macro_rules! path {
($($x:tt)*) => (
::syntax::ext::deriving::generic::ty::Path::new( pathvec!( $($x)* ) )
)
}
macro_rules! path_local {
($x:ident) => (
::syntax::ext::deriving::generic::ty::Path::new_local(stringify!($x))
)
}
macro_rules! pathvec_std {
($cx:expr, $first:ident :: $($rest:ident)::+) => (
if $cx.use_std {
pathvec!(std :: $($rest)::+)
} else {
pathvec!($first :: $($rest)::+)
}
)
}
macro_rules! path_std {
($($x:tt)*) => (
::syntax::ext::deriving::generic::ty::Path::new( pathvec_std!( $($x)* ) )
)
}
pub fn expand_deriving_from_primitive(cx: &mut ExtCtxt,
span: Span,
mitem: &MetaItem,
item: &Annotatable,
push: &mut FnMut(Annotatable))
{
let inline = cx.meta_word(span, InternedString::new("inline"));
let attrs = vec!(cx.attribute(span, inline));
let trait_def = TraitDef {
span: span,
attributes: Vec::new(),
path: path!(num::FromPrimitive),
additional_bounds: Vec::new(),
generics: LifetimeBounds::empty(),
methods: vec!(
MethodDef {
name: "from_i64",
is_unsafe: false,
generics: LifetimeBounds::empty(),
explicit_self: None,
args: vec!(Literal(path_local!(i64))),
ret_ty: Literal(Path::new_(pathvec_std!(cx, core::option::Option),
None,
vec!(Box::new(Self_)),
true)),
// #[inline] liable to cause code-bloat
attributes: attrs.clone(),
combine_substructure: combine_substructure(Box::new(|c, s, sub| {
cs_from("i64", c, s, sub)
})),
},
MethodDef {
name: "from_u64",
is_unsafe: false,
generics: LifetimeBounds::empty(),<|fim▁hole|> vec!(Box::new(Self_)),
true)),
// #[inline] liable to cause code-bloat
attributes: attrs,
combine_substructure: combine_substructure(Box::new(|c, s, sub| {
cs_from("u64", c, s, sub)
})),
}
),
associated_types: Vec::new(),
};
trait_def.expand(cx, mitem, &item, push)
}
fn cs_from(name: &str, cx: &mut ExtCtxt, trait_span: Span, substr: &Substructure) -> P<Expr> {
if substr.nonself_args.len() != 1 {
cx.span_bug(trait_span, "incorrect number of arguments in `derive(FromPrimitive)`")
}
let n = &substr.nonself_args[0];
match *substr.fields {
StaticStruct(..) => {
cx.span_err(trait_span, "`FromPrimitive` cannot be derived for structs");
return cx.expr_fail(trait_span, InternedString::new(""));
}
StaticEnum(enum_def, _) => {
if enum_def.variants.is_empty() {
cx.span_err(trait_span,
"`FromPrimitive` cannot be derived for enums with no variants");
return cx.expr_fail(trait_span, InternedString::new(""));
}
let mut arms = Vec::new();
for variant in &enum_def.variants {
match variant.node.kind {
ast::TupleVariantKind(ref args) => {
if !args.is_empty() {
cx.span_err(trait_span,
"`FromPrimitive` cannot be derived for \
enum variants with arguments");
return cx.expr_fail(trait_span,
InternedString::new(""));
}
let span = variant.span;
// expr for `$n == $variant as $name`
let path = cx.path(span, vec![substr.type_ident, variant.node.name]);
let variant = cx.expr_path(path);
let ty = cx.ty_ident(span, cx.ident_of(name));
let cast = cx.expr_cast(span, variant.clone(), ty);
let guard = cx.expr_binary(span, ast::BiEq, n.clone(), cast);
// expr for `Some($variant)`
let body = cx.expr_some(span, variant);
// arm for `_ if $guard => $body`
let arm = ast::Arm {
attrs: vec!(),
pats: vec!(cx.pat_wild(span)),
guard: Some(guard),
body: body,
};
arms.push(arm);
}
ast::StructVariantKind(_) => {
cx.span_err(trait_span,
"`FromPrimitive` cannot be derived for enums \
with struct variants");
return cx.expr_fail(trait_span,
InternedString::new(""));
}
}
}
// arm for `_ => None`
let arm = ast::Arm {
attrs: vec!(),
pats: vec!(cx.pat_wild(trait_span)),
guard: None,
body: cx.expr_none(trait_span),
};
arms.push(arm);
cx.expr_match(trait_span, n.clone(), arms)
}
_ => cx.span_bug(trait_span, "expected StaticEnum in derive(FromPrimitive)")
}
}
#[plugin_registrar]
#[doc(hidden)]
pub fn plugin_registrar(reg: &mut Registry) {
reg.register_syntax_extension(
token::intern("derive_NumFromPrimitive"),
MultiDecorator(Box::new(expand_deriving_from_primitive)));
}<|fim▁end|> | explicit_self: None,
args: vec!(Literal(path_local!(u64))),
ret_ty: Literal(Path::new_(pathvec_std!(cx, core::option::Option),
None, |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>use std::ops::Add;
use std::ops::Mul;
fn main() {
// Rust allows limited operators overloading
// There are special traits for overloadng operators
#[derive(Debug)]
struct Point { x: i32, y: i32, }
impl Add for Point {
type Output = Point;
fn add(self, other: Point) -> Point {
Point { x: self.x + other.x, y: self.y + other.y }
}
}
let p1 = Point { x: 1, y: 0 };
let p2 = Point { x: 2, y: 3 };
let p3 = p1 + p2;
println!("{:?}", p3);
// not clear why ops trait require to move args
// using operator overloads is traits for generic structs
trait HasArea<T> { fn area(&self) -> T; }
struct Square<T> { _x: T, _y: T, side: T, }
<|fim▁hole|> where T: Mul<Output=T> + Copy {
fn area(&self) -> T {
self.side * self.side // Copy is important here, Rust would try to move self.side to return value then
}
}
let s = Square { _x: 0.0f64, _y: 0.0f64, side: 12.0f64 };
println!("square (side = {}) area is {}", s.side, s.area());
}<|fim▁end|> | impl<T> HasArea<T> for Square<T> |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.