prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>EnvironmentAuthenticationResponse.java<|end_file_name|><|fim▁begin|>package com.sequenceiq.environment.api.v1.environment.model.response;
import java.io.Serializable;
import com.sequenceiq.environment.api.doc.environment.EnvironmentModelDescription;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
@ApiModel("EnvironmentAuthenticationV1Response")
public class EnvironmentAuthenticationResponse implements Serializable {
@ApiModelProperty(EnvironmentModelDescription.PUBLIC_KEY)
private String publicKey;
@ApiModelProperty(EnvironmentModelDescription.PUBLIC_KEY_ID)
private String publicKeyId;
@ApiModelProperty(EnvironmentModelDescription.LOGIN_USER_NAME)
private String loginUserName;
public String getPublicKey() {
return publicKey;
}
public void setPublicKey(String publicKey) {
this.publicKey = publicKey;
}
public String getPublicKeyId() {
return publicKeyId;
}
public void setPublicKeyId(String publicKeyId) {
this.publicKeyId = publicKeyId;
}
public String getLoginUserName() {
return loginUserName;
}
public void setLoginUserName(String loginUserName) {
this.loginUserName = loginUserName;
}
public static Builder builder() {
return new Builder();
}
@Override
public String toString() {
return "EnvironmentAuthenticationResponse{" +
"publicKey='" + publicKey + '\'' +
", publicKeyId='" + publicKeyId + '\'' +
", loginUserName='" + loginUserName + '\'' +
'}';
}
public static class Builder {
private String publicKey;
private String publicKeyId;
private String loginUserName;
private Builder() {
}
public Builder withPublicKey(String publicKey) {
this.publicKey = publicKey;
return this;
}
public Builder withPublicKeyId(String publicKeyId) {
this.publicKeyId = publicKeyId;
return this;
}
public Builder withLoginUserName(String loginUserName) {
this.loginUserName = loginUserName;
return this;
}
public EnvironmentAuthenticationResponse build() {
EnvironmentAuthenticationResponse response = new EnvironmentAuthenticationResponse();
response.setLoginUserName(loginUserName);<|fim▁hole|> response.setPublicKey(publicKey);
response.setPublicKeyId(publicKeyId);
return response;
}
}
}<|fim▁end|> | |
<|file_name|>BlockedComForce.hpp<|end_file_name|><|fim▁begin|>int ObservableBlockedComForce::actual_calculate(PartCfg & partCfg) {
double* A = last_value;
unsigned int i;
unsigned int block;
unsigned int n_blocks;
unsigned int blocksize;
unsigned int id;
IntList* ids;
if (!sortPartCfg()) {
runtimeErrorMsg() <<"could not sort partCfg";
return -1;
}
ids=(IntList*) container;
n_blocks=n/3;
blocksize=ids->n/n_blocks;
for ( block = 0; block < n_blocks; block++ ) {
for ( i = 0; i < blocksize; i++ ) {<|fim▁hole|> return 1;
A[3*block+0] += partCfg[id].f.f[0]/time_step/time_step*2;
A[3*block+1] += partCfg[id].f.f[1]/time_step/time_step*2;
A[3*block+2] += partCfg[id].f.f[2]/time_step/time_step*2;
}
}
return 0;
}<|fim▁end|> | id = ids->e[block*blocksize+i];
if (ids->e[i] >= n_part) |
<|file_name|>SignTest.java<|end_file_name|><|fim▁begin|>/**
* Copyright (C) 2013 Rohan Padhye
*
* This library is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, either version 2.1 of the
* License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package vasco.soot.examples;
import java.util.Map;
import org.junit.Test;
import soot.Local;
import soot.PackManager;
import soot.SceneTransformer;
import soot.SootMethod;
import soot.Transform;
import soot.Unit;
import vasco.DataFlowSolution;
import vasco.soot.examples.SignAnalysis.Sign;
/**
* A Soot {@link SceneTransformer} for performing {@link SignAnalysis}.
*
* @author Rohan Padhye
*/
public class SignTest extends SceneTransformer {
private SignAnalysis analysis;
@Override
protected void internalTransform(String arg0, @SuppressWarnings("rawtypes") Map arg1) {
analysis = new SignAnalysis();
analysis.doAnalysis();
DataFlowSolution<Unit,Map<Local,Sign>> solution = analysis.getMeetOverValidPathsSolution();
System.out.println("----------------------------------------------------------------");
for (SootMethod sootMethod : analysis.getMethods()) {
System.out.println(sootMethod);
for (Unit unit : sootMethod.getActiveBody().getUnits()) {
System.out.println("----------------------------------------------------------------");
System.out.println(unit);
System.out.println("IN: " + formatConstants(solution.getValueBefore(unit)));
System.out.println("OUT: " + formatConstants(solution.getValueAfter(unit)));
}
System.out.println("----------------------------------------------------------------");
}
}
public static String formatConstants(Map<Local, Sign> value) {
if (value == null) {
return "";
}
StringBuffer sb = new StringBuffer();
for (Map.Entry<Local,Sign> entry : value.entrySet()) {
Local local = entry.getKey();
Sign sign = entry.getValue();
if (sign != null) {
sb.append("(").append(local).append(": ").append(sign.toString()).append(") ");
}
}
return sb.toString();
}
public SignAnalysis getAnalysis() {
return analysis;
}
public static void main(String args[]) {
String classPath = System.getProperty("java.class.path");
String mainClass = null;
/* ------------------- OPTIONS ---------------------- */
try {
int i=0;
while(true){
if (args[i].equals("-cp")) {
classPath = args[i+1];
i += 2;
} else {
mainClass = args[i];
i++;
break;
}
}
if (i != args.length || mainClass == null)
throw new Exception();
} catch (Exception e) {
System.err.println("Usage: java SignTest [-cp CLASSPATH] MAIN_CLASS");
System.exit(1);
}
String[] sootArgs = {
"-cp", classPath, "-pp",
"-w", "-app",
"-keep-line-number",
"-keep-bytecode-offset",
"-p", "jb", "use-original-names",
"-p", "cg", "implicit-entry:false",
"-p", "cg.spark", "enabled",<|fim▁hole|> "-p", "cg", "safe-newinstance",
"-main-class", mainClass,
"-f", "none", mainClass
};
SignTest sgn = new SignTest();
PackManager.v().getPack("wjtp").add(new Transform("wjtp.sgn", sgn));
soot.Main.main(sootArgs);
}
@Test
public void testSignAnalysis() {
// TODO: Compare output with an ideal (expected) output
SignTest.main(new String[]{"vasco.tests.SignTestCase"});
}
}<|fim▁end|> | "-p", "cg.spark", "simulate-natives",
"-p", "cg", "safe-forname", |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>pub mod stdthunk_x64;<|fim▁hole|>pub mod stdthunk_x86;
pub mod thunk_pool;
#[cfg(target_arch = "x86_64")]
pub use self::stdthunk_x64::imp::Thunk;
#[cfg(target_arch = "x86")]
pub use self::stdthunk_x86::imp::Thunk;
pub use self::thunk_pool::{get_thunk, put_back, set_this, get_this, drop_pool};<|fim▁end|> | |
<|file_name|>main.go<|end_file_name|><|fim▁begin|>package main
import (<|fim▁hole|> "encoding/gob"
"encoding/json"
"log"
"os"
"sort"
"github.com/tsenart/vegeta/lib"
)
func main() {
var res vegeta.Results
if err := gob.NewDecoder(os.Stdin).Decode(&res); err != nil {
log.Fatal(err)
}
sort.Sort(res)
if err := json.NewEncoder(os.Stdout).Encode(res); err != nil {
log.Fatal(err)
}
}<|fim▁end|> | |
<|file_name|>AgencyRegistrationProcessDao.java<|end_file_name|><|fim▁begin|>package com.abm.mainet.agency.dao;
import com.abm.mainet.agency.dto.TPAgencyReqDTO;
import com.abm.mainet.agency.dto.TPAgencyResDTO;
import com.abm.mainet.common.domain.Employee;
/**
* @author Arun.Chavda
*
*/
public interface AgencyRegistrationProcessDao {
<|fim▁hole|>}<|fim▁end|> | Employee saveAgnEmployeeDetails(Employee employee);
TPAgencyResDTO getAuthStatus(TPAgencyReqDTO requestDTO);
void updatedAuthStatus(Long empId, Long orgId, String flag);
|
<|file_name|>bip9-softforks.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# Copyright (c) 2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from test_framework.test_framework import ComparisonTestFramework
from test_framework.util import *
from test_framework.mininode import CTransaction, NetworkThread
from test_framework.blocktools import create_coinbase, create_block
from test_framework.comptool import TestInstance, TestManager
from test_framework.script import CScript, OP_1NEGATE, OP_CHECKSEQUENCEVERIFY, OP_DROP
from io import BytesIO
import time
import itertools
'''
This test is meant to exercise BIP forks<|fim▁hole|>mine 2 block and save coinbases for later use
mine 141 blocks to transition from DEFINED to STARTED
mine 100 blocks signalling readiness and 44 not in order to fail to change state this period
mine 108 blocks signalling readiness and 36 blocks not signalling readiness (STARTED->LOCKED_IN)
mine a further 143 blocks (LOCKED_IN)
test that enforcement has not triggered (which triggers ACTIVE)
test that enforcement has triggered
'''
class BIP9SoftForksTest(ComparisonTestFramework):
def __init__(self):
self.num_nodes = 1
def setup_network(self):
self.nodes = start_nodes(1, self.options.tmpdir,
extra_args=[['-debug', '-whitelist=127.0.0.1']],
binary=[self.options.testbinary])
def run_test(self):
self.test = TestManager(self, self.options.tmpdir)
self.test.add_all_connections(self.nodes)
NetworkThread().start() # Start up network handling in another thread
self.test.run()
def create_transaction(self, node, coinbase, to_address, amount):
from_txid = node.getblock(coinbase)['tx'][0]
inputs = [{ "txid" : from_txid, "vout" : 0}]
outputs = { to_address : amount }
rawtx = node.createrawtransaction(inputs, outputs)
tx = CTransaction()
f = BytesIO(hex_str_to_bytes(rawtx))
tx.deserialize(f)
tx.nVersion = 2
return tx
def sign_transaction(self, node, tx):
signresult = node.signrawtransaction(bytes_to_hex_str(tx.serialize()))
tx = CTransaction()
f = BytesIO(hex_str_to_bytes(signresult['hex']))
tx.deserialize(f)
return tx
def generate_blocks(self, number, version, test_blocks = []):
for i in range(number):
block = create_block(self.tip, create_coinbase(self.height), self.last_block_time + 1)
block.nVersion = version
block.rehash()
block.solve()
test_blocks.append([block, True])
self.last_block_time += 1
self.tip = block.sha256
self.height += 1
return test_blocks
def get_bip9_status(self, key):
info = self.nodes[0].getblockchaininfo()
for row in info['bip9_softforks']:
if row['id'] == key:
return row
raise IndexError ('key:"%s" not found' % key)
def test_BIP(self, bipName, activated_version, invalidate, invalidatePostSignature, bitno):
# generate some coins for later
self.coinbase_blocks = self.nodes[0].generate(2)
self.height = 3 # height of the next block to build
self.tip = int("0x" + self.nodes[0].getbestblockhash(), 0)
self.nodeaddress = self.nodes[0].getnewaddress()
self.last_block_time = int(time.time())
assert_equal(self.get_bip9_status(bipName)['status'], 'defined')
tmpl = self.nodes[0].getblocktemplate({})
assert(bipName not in tmpl['rules'])
assert(bipName not in tmpl['vbavailable'])
assert_equal(tmpl['vbrequired'], 0)
assert_equal(tmpl['version'], 0x20000000)
# Test 1
# Advance from DEFINED to STARTED
test_blocks = self.generate_blocks(141, 4)
yield TestInstance(test_blocks, sync_every_block=False)
assert_equal(self.get_bip9_status(bipName)['status'], 'started')
tmpl = self.nodes[0].getblocktemplate({})
assert(bipName not in tmpl['rules'])
assert_equal(tmpl['vbavailable'][bipName], bitno)
assert_equal(tmpl['vbrequired'], 0)
assert(tmpl['version'] & activated_version)
# Test 2
# Fail to achieve LOCKED_IN 100 out of 144 signal bit 1
# using a variety of bits to simulate multiple parallel softforks
test_blocks = self.generate_blocks(50, activated_version) # 0x20000001 (signalling ready)
test_blocks = self.generate_blocks(20, 4, test_blocks) # 0x00000004 (signalling not)
test_blocks = self.generate_blocks(50, activated_version, test_blocks) # 0x20000101 (signalling ready)
test_blocks = self.generate_blocks(24, 4, test_blocks) # 0x20010000 (signalling not)
yield TestInstance(test_blocks, sync_every_block=False)
assert_equal(self.get_bip9_status(bipName)['status'], 'started')
tmpl = self.nodes[0].getblocktemplate({})
assert(bipName not in tmpl['rules'])
assert_equal(tmpl['vbavailable'][bipName], bitno)
assert_equal(tmpl['vbrequired'], 0)
assert(tmpl['version'] & activated_version)
# Test 3
# 108 out of 144 signal bit 1 to achieve LOCKED_IN
# using a variety of bits to simulate multiple parallel softforks
test_blocks = self.generate_blocks(58, activated_version) # 0x20000001 (signalling ready)
test_blocks = self.generate_blocks(26, 4, test_blocks) # 0x00000004 (signalling not)
test_blocks = self.generate_blocks(50, activated_version, test_blocks) # 0x20000101 (signalling ready)
test_blocks = self.generate_blocks(10, 4, test_blocks) # 0x20010000 (signalling not)
yield TestInstance(test_blocks, sync_every_block=False)
assert_equal(self.get_bip9_status(bipName)['status'], 'locked_in')
tmpl = self.nodes[0].getblocktemplate({})
assert(bipName not in tmpl['rules'])
# Test 4
# 143 more version 536870913 blocks (waiting period-1)
test_blocks = self.generate_blocks(143, 4)
yield TestInstance(test_blocks, sync_every_block=False)
assert_equal(self.get_bip9_status(bipName)['status'], 'locked_in')
tmpl = self.nodes[0].getblocktemplate({})
assert(bipName not in tmpl['rules'])
# Test 5
# Check that the new rule is enforced
spendtx = self.create_transaction(self.nodes[0],
self.coinbase_blocks[0], self.nodeaddress, 1.0)
invalidate(spendtx)
spendtx = self.sign_transaction(self.nodes[0], spendtx)
spendtx.rehash()
invalidatePostSignature(spendtx)
spendtx.rehash()
block = create_block(self.tip, create_coinbase(self.height), self.last_block_time + 1)
block.nVersion = activated_version
block.vtx.append(spendtx)
block.hashMerkleRoot = block.calc_merkle_root()
block.rehash()
block.solve()
self.last_block_time += 1
self.tip = block.sha256
self.height += 1
yield TestInstance([[block, True]])
assert_equal(self.get_bip9_status(bipName)['status'], 'active')
tmpl = self.nodes[0].getblocktemplate({})
assert(bipName in tmpl['rules'])
assert(bipName not in tmpl['vbavailable'])
assert_equal(tmpl['vbrequired'], 0)
assert(not (tmpl['version'] & (1 << bitno)))
# Test 6
# Check that the new sequence lock rules are enforced
spendtx = self.create_transaction(self.nodes[0],
self.coinbase_blocks[1], self.nodeaddress, 1.0)
invalidate(spendtx)
spendtx = self.sign_transaction(self.nodes[0], spendtx)
spendtx.rehash()
invalidatePostSignature(spendtx)
spendtx.rehash()
block = create_block(self.tip, create_coinbase(self.height), self.last_block_time + 1)
block.nVersion = 5
block.vtx.append(spendtx)
block.hashMerkleRoot = block.calc_merkle_root()
block.rehash()
block.solve()
self.last_block_time += 1
yield TestInstance([[block, False]])
# Restart all
stop_nodes(self.nodes)
wait_electrumds()
shutil.rmtree(self.options.tmpdir)
self.setup_chain()
self.setup_network()
self.test.clear_all_connections()
self.test.add_all_connections(self.nodes)
NetworkThread().start() # Start up network handling in another thread
def get_tests(self):
for test in itertools.chain(
self.test_BIP('csv', 0x20000001, self.sequence_lock_invalidate, self.donothing, 0),
self.test_BIP('csv', 0x20000001, self.mtp_invalidate, self.donothing, 0),
self.test_BIP('csv', 0x20000001, self.donothing, self.csv_invalidate, 0)
):
yield test
def donothing(self, tx):
return
def csv_invalidate(self, tx):
'''Modify the signature in vin 0 of the tx to fail CSV
Prepends -1 CSV DROP in the scriptSig itself.
'''
tx.vin[0].scriptSig = CScript([OP_1NEGATE, OP_CHECKSEQUENCEVERIFY, OP_DROP] +
list(CScript(tx.vin[0].scriptSig)))
def sequence_lock_invalidate(self, tx):
'''Modify the nSequence to make it fails once sequence lock rule is activated (high timespan)
'''
tx.vin[0].nSequence = 0x00FFFFFF
tx.nLockTime = 0
def mtp_invalidate(self, tx):
'''Modify the nLockTime to make it fails once MTP rule is activated
'''
# Disable Sequence lock, Activate nLockTime
tx.vin[0].nSequence = 0x90FFFFFF
tx.nLockTime = self.last_block_time
if __name__ == '__main__':
BIP9SoftForksTest().main()<|fim▁end|> | Connect to a single node.
regtest lock-in with 108/144 block signalling
activation after a further 144 blocks |
<|file_name|>HeadsetTester.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from Headset import Headset
import logging
import time
puerto = 'COM3'
headset = Headset(logging.INFO)
try:
headset.connect(puerto, 115200)<|fim▁hole|>print "-----------------------------------------"
headset.startReading(persist_data=True)
time.sleep(5)
headset.stopReading()
headset.closePort()
print "-----------------------------------------"
print "Is conected? " + str(headset.isConnected())
print headset.getStatus()<|fim▁end|> | except Exception, e:
raise e
print "Is conected? " + str(headset.isConnected()) |
<|file_name|>crypto.rs<|end_file_name|><|fim▁begin|>// Copyright 2015-2017 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
use std::fmt;
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use serde::ser::SerializeStruct;
use serde::de::{Visitor, MapVisitor, Error};
use super::{Cipher, CipherSer, CipherSerParams, Kdf, KdfSer, KdfSerParams, H256, Bytes};
pub type CipherText = Bytes;
#[derive(Debug, PartialEq)]
pub struct Crypto {
pub cipher: Cipher,
pub ciphertext: CipherText,
pub kdf: Kdf,
pub mac: H256,
}
enum CryptoField {
Cipher,
CipherParams,
CipherText,
Kdf,
KdfParams,
Mac,
}
impl Deserialize for CryptoField {
fn deserialize<D>(deserializer: D) -> Result<CryptoField, D::Error>
where D: Deserializer
{
deserializer.deserialize(CryptoFieldVisitor)
}
}
struct CryptoFieldVisitor;
impl Visitor for CryptoFieldVisitor {
type Value = CryptoField;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
write!(formatter, "a valid crypto struct description")
}
fn visit_str<E>(self, value: &str) -> Result<Self::Value, E><|fim▁hole|> {
match value {
"cipher" => Ok(CryptoField::Cipher),
"cipherparams" => Ok(CryptoField::CipherParams),
"ciphertext" => Ok(CryptoField::CipherText),
"kdf" => Ok(CryptoField::Kdf),
"kdfparams" => Ok(CryptoField::KdfParams),
"mac" => Ok(CryptoField::Mac),
_ => Err(Error::custom(format!("Unknown field: '{}'", value))),
}
}
}
impl Deserialize for Crypto {
fn deserialize<D>(deserializer: D) -> Result<Crypto, D::Error>
where D: Deserializer
{
static FIELDS: &'static [&'static str] = &["id", "version", "crypto", "Crypto", "address"];
deserializer.deserialize_struct("Crypto", FIELDS, CryptoVisitor)
}
}
struct CryptoVisitor;
impl Visitor for CryptoVisitor {
type Value = Crypto;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
write!(formatter, "a valid vault crypto object")
}
fn visit_map<V>(self, mut visitor: V) -> Result<Self::Value, V::Error>
where V: MapVisitor
{
let mut cipher = None;
let mut cipherparams = None;
let mut ciphertext = None;
let mut kdf = None;
let mut kdfparams = None;
let mut mac = None;
loop {
match visitor.visit_key()? {
Some(CryptoField::Cipher) => { cipher = Some(visitor.visit_value()?); }
Some(CryptoField::CipherParams) => { cipherparams = Some(visitor.visit_value()?); }
Some(CryptoField::CipherText) => { ciphertext = Some(visitor.visit_value()?); }
Some(CryptoField::Kdf) => { kdf = Some(visitor.visit_value()?); }
Some(CryptoField::KdfParams) => { kdfparams = Some(visitor.visit_value()?); }
Some(CryptoField::Mac) => { mac = Some(visitor.visit_value()?); }
None => { break; }
}
}
let cipher = match (cipher, cipherparams) {
(Some(CipherSer::Aes128Ctr), Some(CipherSerParams::Aes128Ctr(params))) => Cipher::Aes128Ctr(params),
(None, _) => return Err(V::Error::missing_field("cipher")),
(Some(_), None) => return Err(V::Error::missing_field("cipherparams")),
};
let ciphertext = match ciphertext {
Some(ciphertext) => ciphertext,
None => return Err(V::Error::missing_field("ciphertext")),
};
let kdf = match (kdf, kdfparams) {
(Some(KdfSer::Pbkdf2), Some(KdfSerParams::Pbkdf2(params))) => Kdf::Pbkdf2(params),
(Some(KdfSer::Scrypt), Some(KdfSerParams::Scrypt(params))) => Kdf::Scrypt(params),
(Some(_), Some(_)) => return Err(V::Error::custom("Invalid cipherparams")),
(None, _) => return Err(V::Error::missing_field("kdf")),
(Some(_), None) => return Err(V::Error::missing_field("kdfparams")),
};
let mac = match mac {
Some(mac) => mac,
None => return Err(V::Error::missing_field("mac")),
};
let result = Crypto {
cipher: cipher,
ciphertext: ciphertext,
kdf: kdf,
mac: mac,
};
Ok(result)
}
}
impl Serialize for Crypto {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where S: Serializer
{
let mut crypto = serializer.serialize_struct("Crypto", 6)?;
match self.cipher {
Cipher::Aes128Ctr(ref params) => {
crypto.serialize_field("cipher", &CipherSer::Aes128Ctr)?;
crypto.serialize_field("cipherparams", params)?;
},
}
crypto.serialize_field("ciphertext", &self.ciphertext)?;
match self.kdf {
Kdf::Pbkdf2(ref params) => {
crypto.serialize_field("kdf", &KdfSer::Pbkdf2)?;
crypto.serialize_field("kdfparams", params)?;
},
Kdf::Scrypt(ref params) => {
crypto.serialize_field("kdf", &KdfSer::Scrypt)?;
crypto.serialize_field("kdfparams", params)?;
},
}
crypto.serialize_field("mac", &self.mac)?;
crypto.end()
}
}<|fim▁end|> | where E: Error |
<|file_name|>moveconnectionlabel.py<|end_file_name|><|fim▁begin|>from umlfri2.application.commands.base import Command
from umlfri2.application.events.diagram import ConnectionMovedEvent
class MoveConnectionLabelCommand(Command):
def __init__(self, connection_label, delta):
self.__diagram_name = connection_label.connection.diagram.get_display_name()
self.__connection_label = connection_label
self.__delta = delta
self.__label_position = None
<|fim▁hole|> def _do(self, ruler):
self.__label_position = self.__connection_label.get_position(ruler)
self._redo(ruler)
def _redo(self, ruler):
self.__connection_label.move(ruler, self.__label_position + self.__delta)
def _undo(self, ruler):
self.__connection_label.move(ruler, self.__label_position)
def get_updates(self):
yield ConnectionMovedEvent(self.__connection_label.connection)<|fim▁end|> | @property
def description(self):
return "Moved label on connection in diagram {0}".format(self.__diagram_name)
|
<|file_name|>slice_hound.py<|end_file_name|><|fim▁begin|>import sys
from services.spawn import MobileTemplate
from services.spawn import WeaponTemplate
from resources.datatables import WeaponType
from resources.datatables import Difficulty
from resources.datatables import Options
from java.util import Vector
def addTemplate(core):
mobileTemplate = MobileTemplate()
mobileTemplate.setCreatureName('slice_hound')
mobileTemplate.setLevel(28)
mobileTemplate.setDifficulty(Difficulty.NORMAL)
mobileTemplate.setMinSpawnDistance(4)
mobileTemplate.setMaxSpawnDistance(8)<|fim▁hole|> mobileTemplate.setHideType("Bristly Hide")
mobileTemplate.setHideAmount(35)
mobileTemplate.setBoneType("Animal Bones")
mobileTemplate.setBoneAmount(30)
mobileTemplate.setSocialGroup("slice hound")
mobileTemplate.setAssistRange(2)
mobileTemplate.setStalker(False)
mobileTemplate.setOptionsBitmask(Options.ATTACKABLE)
templates = Vector()
templates.add('object/mobile/shared_corellian_slice_hound.iff')
mobileTemplate.setTemplates(templates)
weaponTemplates = Vector()
weapontemplate = WeaponTemplate('object/weapon/melee/unarmed/shared_unarmed_default.iff', WeaponType.UNARMED, 1.0, 6, 'kinetic')
weaponTemplates.add(weapontemplate)
mobileTemplate.setWeaponTemplateVector(weaponTemplates)
attacks = Vector()
attacks.add('bm_bite_2')
attacks.add('bm_hamstring_2')
mobileTemplate.setDefaultAttack('creatureMeleeAttack')
mobileTemplate.setAttacks(attacks)
core.spawnService.addMobileTemplate('slice_hound', mobileTemplate)
return<|fim▁end|> | mobileTemplate.setDeathblow(False)
mobileTemplate.setScale(1)
mobileTemplate.setMeatType("Carnivore Meat")
mobileTemplate.setMeatAmount(65) |
<|file_name|>automated_params.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
module that specified how we choose paramaters based on current search database
properties
"""
from __future__ import absolute_import, division, print_function
#import six
import utool as ut
#import numpy as np
#import vtool as vt
#from ibeis.algo.hots import hstypes
#from ibeis.algo.hots import match_chips4 as mc4
#from ibeis.algo.hots import distinctiveness_normalizer
#from six.moves import filter
print, print_, printDBG, rrr, profile = ut.inject(__name__, '[autoparams]')
@profile
def choose_vsmany_K(num_names, qaids, daids):
"""
TODO: Should also scale up the number of checks as well
method for choosing K in the initial vsmany queries
Ignore:
>>> # DISABLE_DOCTEST
>>> # Shows plot for K vs number of names
>>> from ibeis.algo.hots.automated_params import * # NOQA
>>> import ibeis
>>> from ibeis import constants as const
>>> ibs = ibeis.opendb(defaultdb='testdb1')
>>> valid_aids = ibs.get_valid_aids(species=const.TEST_SPECIES.ZEB_PLAIN)
>>> num_names = np.arange(0, 1000)
>>> num_names_slope = .1
>>> K_max = 10
>>> K_min = 1
>>> K_list = np.floor(num_names_slope * num_names)
>>> K_list[K_list > K_max] = K_max
>>> K_list[K_list < K_min] = K_min
>>> clip_index_list = np.where(K_list >= K_max)[0]
>>> clip_index = clip_index_list[min(len(clip_index_list) - 1, 10)]
>>> K_list = K_list[0:clip_index]
>>> num_names = num_names[0:clip_index]
>>> pt.plot2(num_names, K_list, x_label='num_names', y_label='K',
... equal_aspect=False, marker='g-', pad=1, dark=True)
>>> pt.update()
"""
#K = ibs.cfg.query_cfg.nn_cfg.K
# TODO: paramaterize in config
num_names_slope = .1 # increase K every fifty names
K_max = 10
K_min = 1
num_names_lower = K_min / num_names_slope
num_names_upper = K_max / num_names_slope
if num_names < num_names_lower:
K = K_min
elif num_names < num_names_upper:<|fim▁hole|> with ut.embed_on_exception_context:
if len(ut.intersect_ordered(qaids, daids)) > 0:
# if self is in query bump k
K += 1
return K
if __name__ == '__main__':
"""
CommandLine:
python -m ibeis.algo.hots.automated_params
python -m ibeis.algo.hots.automated_params --allexamples
python -m ibeis.algo.hots.automated_params --allexamples --noface --nosrc
"""
import multiprocessing
multiprocessing.freeze_support() # for win32
import utool as ut # NOQA
ut.doctest_funcs()<|fim▁end|> | K = num_names_slope * num_names
else:
K = K_max
|
<|file_name|>node_netbsd.go<|end_file_name|><|fim▁begin|>package restic
import "syscall"
func (node Node) restoreSymlinkTimestamps(path string, utimes [2]syscall.Timespec) error {
return nil
}
func (node Node) device() int {
return int(node.Device)
}
func (s statUnix) atim() syscall.Timespec { return s.Atimespec }
func (s statUnix) mtim() syscall.Timespec { return s.Mtimespec }
func (s statUnix) ctim() syscall.Timespec { return s.Ctimespec }
// Getxattr retrieves extended attribute data associated with path.
func Getxattr(path, name string) ([]byte, error) {
return nil, nil
}<|fim▁hole|>
// Listxattr retrieves a list of names of extended attributes associated with the
// given path in the file system.
func Listxattr(path string) ([]string, error) {
return nil, nil
}
// Setxattr associates name and data together as an attribute of path.
func Setxattr(path, name string, data []byte) error {
return nil
}<|fim▁end|> | |
<|file_name|>reader.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! A wrapper around any Reader to treat it as an RNG.
use option::{Some, None};
use rt::io::Reader;
use rt::io::ReaderByteConversions;
use rand::Rng;
/// An RNG that reads random bytes straight from a `Reader`. This will
/// work best with an infinite reader, but this is not required.
///
/// It will fail if it there is insufficient data to fulfill a request.
///
/// # Example
///
/// ```rust
/// use std::rand::{reader, Rng};
/// use std::rt::io::mem;
///
/// fn main() {
/// let mut rng = reader::ReaderRng::new(mem::MemReader::new(~[1,2,3,4,5,6,7,8]));
/// println!("{:x}", rng.gen::<uint>());
/// }
/// ```
pub struct ReaderRng<R> {
priv reader: R
}
impl<R: Reader> ReaderRng<R> {
/// Create a new `ReaderRng` from a `Reader`.
pub fn new(r: R) -> ReaderRng<R> {
ReaderRng {
reader: r
}
}
}
impl<R: Reader> Rng for ReaderRng<R> {
fn next_u32(&mut self) -> u32 {
// This is designed for speed: reading a LE integer on a LE
// platform just involves blitting the bytes into the memory
// of the u32, similarly for BE on BE; avoiding byteswapping.
if cfg!(target_endian="little") {
self.reader.read_le_u32_()
} else {
self.reader.read_be_u32_()
}
}
fn next_u64(&mut self) -> u64 {
// see above for explanation.
if cfg!(target_endian="little") {
self.reader.read_le_u64_()
} else {
self.reader.read_be_u64_()
}
}
fn fill_bytes(&mut self, v: &mut [u8]) {
if v.len() == 0 { return }
match self.reader.read(v) {
Some(n) if n == v.len() => return,
Some(n) => fail2!("ReaderRng.fill_bytes could not fill buffer: \
read {} out of {} bytes.", n, v.len()),
None => fail2!("ReaderRng.fill_bytes reached eof.")
}
}<|fim▁hole|>
#[cfg(test)]
mod test {
use super::*;
use rt::io::mem::MemReader;
use cast;
#[test]
fn test_reader_rng_u64() {
// transmute from the target to avoid endianness concerns.
let v = ~[1u64, 2u64, 3u64];
let bytes: ~[u8] = unsafe {cast::transmute(v)};
let mut rng = ReaderRng::new(MemReader::new(bytes));
assert_eq!(rng.next_u64(), 1);
assert_eq!(rng.next_u64(), 2);
assert_eq!(rng.next_u64(), 3);
}
#[test]
fn test_reader_rng_u32() {
// transmute from the target to avoid endianness concerns.
let v = ~[1u32, 2u32, 3u32];
let bytes: ~[u8] = unsafe {cast::transmute(v)};
let mut rng = ReaderRng::new(MemReader::new(bytes));
assert_eq!(rng.next_u32(), 1);
assert_eq!(rng.next_u32(), 2);
assert_eq!(rng.next_u32(), 3);
}
#[test]
fn test_reader_rng_fill_bytes() {
let v = [1u8, 2, 3, 4, 5, 6, 7, 8];
let mut w = [0u8, .. 8];
let mut rng = ReaderRng::new(MemReader::new(v.to_owned()));
rng.fill_bytes(w);
assert_eq!(v, w);
}
#[test]
#[should_fail]
fn test_reader_rng_insufficient_bytes() {
let mut rng = ReaderRng::new(MemReader::new(~[]));
let mut v = [0u8, .. 3];
rng.fill_bytes(v);
}
}<|fim▁end|> | } |
<|file_name|>jss-plugin-props-sort.js<|end_file_name|><|fim▁begin|>(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) :
typeof define === 'function' && define.amd ? define(['exports'], factory) :
(global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.jssPluginPropsSort = {}));
}(this, (function (exports) { 'use strict';
/**
* Sort props by length.
*/
function jssPropsSort() {
var sort = function sort(prop0, prop1) {
if (prop0.length === prop1.length) {
return prop0 > prop1 ? 1 : -1;
}
return prop0.length - prop1.length;
};
return {
onProcessStyle: function onProcessStyle(style, rule) {
if (rule.type !== 'style') return style;
var newStyle = {};
var props = Object.keys(style).sort(sort);
for (var i = 0; i < props.length; i++) {
newStyle[props[i]] = style[props[i]];
}
return newStyle;
}
};
}
<|fim▁hole|> Object.defineProperty(exports, '__esModule', { value: true });
})));
//# sourceMappingURL=jss-plugin-props-sort.js.map<|fim▁end|> | exports.default = jssPropsSort;
|
<|file_name|>E0608.rs<|end_file_name|><|fim▁begin|>// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at<|fim▁hole|>// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
fn main() {
0u8[2]; //~ ERROR E0608
}<|fim▁end|> | // http://rust-lang.org/COPYRIGHT.
// |
<|file_name|>length_parameter.py<|end_file_name|><|fim▁begin|>import numpy as np
import bpy
from bpy.props import FloatProperty, EnumProperty, IntProperty, BoolProperty
from sverchok.node_tree import SverchCustomTreeNode
from sverchok.data_structure import updateNode, zip_long_repeat, ensure_nesting_level
from sverchok.utils.curve import SvCurveLengthSolver, SvCurve
from sverchok.utils.nodes_mixins.draft_mode import DraftMode
class SvCurveLengthParameterNode(DraftMode, bpy.types.Node, SverchCustomTreeNode):
"""
Triggers: Curve Length Parameter
Tooltip: Solve curve length (natural) parameter
"""
bl_idname = 'SvExCurveLengthParameterNode'
bl_label = 'Curve Length Parameter'
bl_icon = 'OUTLINER_OB_EMPTY'
sv_icon = 'SV_CURVE_LENGTH_P'
resolution : IntProperty(
name = 'Resolution',
min = 1,
default = 50,
update = updateNode)
length : FloatProperty(
name = "Length",
min = 0.0,
default = 0.5,
update = updateNode)
length_draft : FloatProperty(
name = "[D] Length",
min = 0.0,
default = 0.5,
update = updateNode)
modes = [('SPL', 'Cubic', "Cubic Spline", 0),
('LIN', 'Linear', "Linear Interpolation", 1)]
mode: EnumProperty(name='Interpolation mode', default="SPL", items=modes, update=updateNode)
def update_sockets(self, context):
self.inputs['Length'].hide_safe = self.eval_mode != 'MANUAL'
self.inputs['Samples'].hide_safe = self.eval_mode != 'AUTO'
updateNode(self, context)
eval_modes = [
('AUTO', "Automatic", "Evaluate the curve at evenly spaced points", 0),
('MANUAL', "Manual", "Evaluate the curve at specified points", 1)<|fim▁hole|>
eval_mode : EnumProperty(
name = "Mode",
items = eval_modes,
default = 'AUTO',
update = update_sockets)
sample_size : IntProperty(
name = "Samples",
default = 50,
min = 4,
update = updateNode)
specify_accuracy : BoolProperty(
name = "Specify accuracy",
default = False,
update = updateNode)
accuracy : IntProperty(
name = "Accuracy",
default = 3,
min = 0,
update = updateNode)
accuracy_draft : IntProperty(
name = "[D] Accuracy",
default = 1,
min = 0,
update = updateNode)
draft_properties_mapping = dict(length = 'length_draft', accuracy = 'accuracy_draft')
def sv_init(self, context):
self.inputs.new('SvCurveSocket', "Curve")
self.inputs.new('SvStringsSocket', "Resolution").prop_name = 'resolution'
self.inputs.new('SvStringsSocket', "Length").prop_name = 'length'
self.inputs.new('SvStringsSocket', "Samples").prop_name = 'sample_size'
self.outputs.new('SvStringsSocket', "T")
self.outputs.new('SvVerticesSocket', "Vertices")
self.update_sockets(context)
def draw_buttons(self, context, layout):
layout.prop(self, 'eval_mode', expand=True)
layout.prop(self, 'specify_accuracy')
if self.specify_accuracy:
if self.id_data.sv_draft:
layout.prop(self, 'accuracy_draft')
else:
layout.prop(self, 'accuracy')
def draw_buttons_ext(self, context, layout):
self.draw_buttons(context, layout)
layout.prop(self, 'mode', expand=True)
def does_support_draft_mode(self):
return True
def draw_label(self):
label = self.label or self.name
if self.id_data.sv_draft:
label = "[D] " + label
return label
def process(self):
if not any((s.is_linked for s in self.outputs)):
return
need_eval = self.outputs['Vertices'].is_linked
curves_s = self.inputs['Curve'].sv_get()
resolution_s = self.inputs['Resolution'].sv_get()
length_s = self.inputs['Length'].sv_get()
samples_s = self.inputs['Samples'].sv_get(default=[[]])
length_s = ensure_nesting_level(length_s, 3)
resolution_s = ensure_nesting_level(resolution_s, 2)
samples_s = ensure_nesting_level(samples_s, 2)
curves_s = ensure_nesting_level(curves_s, 2, data_types=(SvCurve,))
ts_out = []
verts_out = []
for curves, resolutions, input_lengths_i, samples_i in zip_long_repeat(curves_s, resolution_s, length_s, samples_s):
for curve, resolution, input_lengths, samples in zip_long_repeat(curves, resolutions, input_lengths_i, samples_i):
mode = self.mode
accuracy = self.accuracy
if self.id_data.sv_draft:
mode = 'LIN'
accuracy = self.accuracy_draft
if self.specify_accuracy:
tolerance = 10 ** (-accuracy)
else:
tolerance = None
solver = SvCurveLengthSolver(curve)
solver.prepare(mode, resolution, tolerance=tolerance)
if self.eval_mode == 'AUTO':
total_length = solver.get_total_length()
input_lengths = np.linspace(0.0, total_length, num = samples)
else:
input_lengths = np.array(input_lengths)
ts = solver.solve(input_lengths)
ts_out.append(ts.tolist())
if need_eval:
verts = curve.evaluate_array(ts).tolist()
verts_out.append(verts)
self.outputs['T'].sv_set(ts_out)
self.outputs['Vertices'].sv_set(verts_out)
def register():
bpy.utils.register_class(SvCurveLengthParameterNode)
def unregister():
bpy.utils.unregister_class(SvCurveLengthParameterNode)<|fim▁end|> | ] |
<|file_name|>scan.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# coding=utf-8
# code by [email protected]
from threading import Thread
from telnetlib import Telnet
import requests
import sqlite3
import queue
import time
import sys
import os
def ip2num(ip):
ip = [int(x) for x in ip.split('.')]
return ip[0] << 24 | ip[1] << 16 | ip[2] << 8 | ip[3]
def num2ip(num):
return '%s.%s.%s.%s' % ((num & 0xff000000) >> 24, (num & 0x00ff0000) >> 16, (num & 0x0000ff00) >> 8, num & 0x000000ff)
def ip_range(start, end):
return [num2ip(num) for num in range(ip2num(start), ip2num(end) + 1) if num & 0xff]
class Database:
db = sys.path[0] + "/TPLINK_KEY.db"
charset = 'utf8'
def __init__(self):
self.connection = sqlite3.connect(self.db)
self.connection.text_factory = str
self.cursor = self.connection.cursor()
def insert(self, query, params):
try:
self.cursor.execute(query, params)
self.connection.commit()
except Exception as e:
print(e)
self.connection.rollback()
def update(self, query, params):
try:
self.cursor.execute(query, params)
self.connection.commit()
except Exception as e:
print(e)
self.connection.rollback()
def query(self, query, params):
cursor = self.connection.cursor()
cursor.execute(query, params)
return cursor.fetchall()
def __del__(self):
self.connection.close()
def b_thread(ip_address_list):
thread_list = []
queue_list = queue.Queue()
hosts = ip_address_list
for host in hosts:
queue_list.put(host)
for x in range(0, int(sys.argv[1])):
thread_list.append(tThread(queue_list))
for t in thread_list:
try:
t.daemon = True
t.start()
except Exception as e:
print(e)
for t in thread_list:<|fim▁hole|> t.join()
class tThread(Thread):
def __init__(self, queue_obj):
Thread.__init__(self)
self.queue = queue_obj
def run(self):
while not self.queue.empty():
host = self.queue.get()
try:
get_info(host)
except Exception as e:
print(e)
continue
def get_position_by_ip(host):
try:
ip_url = "http://ip-api.com/json/{ip}?lang=zh-CN".format(ip=host)
header = {"User-Agent": "Mozilla/5.0 (X11; Linux x86_64; rv:45.0) Gecko/20100101 Firefox/45.0"}
json_data = requests.get(url=ip_url, headers=header, timeout=10).json()
info = [json_data.get("country"), json_data.get('regionName'), json_data.get('city'), json_data.get('isp')]
return info
except Exception as e:
print(e)
def get_info(host):
username = "admin"
password = "admin"
telnet_timeout = 15
cmd_timeout = 5
try:
t = Telnet(host, timeout=telnet_timeout)
t.read_until("username:", cmd_timeout)
t.write(username + "\n")
t.read_until("password:", cmd_timeout)
t.write(password + "\n")
t.write("wlctl show\n")
t.read_until("SSID", cmd_timeout)
wifi_str = t.read_very_eager()
t.write("lan show info\n")
t.read_until("MACAddress", cmd_timeout)
lan_str = t.read_very_eager()
t.close()
if len(wifi_str) > 0:
# clear extra space
wifi_str = "".join(wifi_str.split())
wifi_str = wifi_str
# get SID KEY MAC
wifi_ssid = wifi_str[1:wifi_str.find('QSS')]
wifi_key = wifi_str[wifi_str.find('Key=') + 4:wifi_str.find('cmd')] if wifi_str.find('Key=') != -1 else '无密码'
router_mac = lan_str[1:lan_str.find('__')].replace('\r\n', '')
current_time = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time()))
try:
my_sqlite_db = Database()
query_info = """select * from scanlog where ssid=? and key=? and mac=?"""
query_result = my_sqlite_db.query(query_info, [wifi_ssid, wifi_key, router_mac])
if len(query_result) < 1:
position_data = get_position_by_ip(host)
country = position_data[0]
province = position_data[1]
city = position_data[2]
isp = position_data[3]
insert_info = """INSERT INTO scanlog (`host`,`mac`,`ssid`,`wifikey`,`country`,`province`,`city`,`isp`) VALUES (?,?,?,?,?,?,?,?)"""
my_sqlite_db.insert(insert_info, [host, router_mac, wifi_ssid, wifi_key, country, province, city, isp])
print('[√] [%s] Info %s %s %s => Inserted!' % (current_time, host, wifi_ssid, wifi_key))
else:
print('[x] [%s] Found %s %s %s in DB, do nothing!' % (current_time, host, wifi_ssid, wifi_key))
except Exception as e:
print(e)
except Exception as e:
pass
if __name__ == '__main__':
print('==========================================')
print(' Scan TPLINK(MERCURY) wifi key by telnet')
print(' Author 92ez.com')
print('==========================================')
begin_ip = sys.argv[2].split('-')[0]
end_ip = sys.argv[2].split('-')[1]
ip_list = ip_range(begin_ip, end_ip)
current_pid = os.getpid()
print('\n[*] Total %d IP...' % len(ip_list))
print('\n================ Running =================')
try:
b_thread(ip_list)
except KeyboardInterrupt:
print('\n[*] Kill all thread.')
os.kill(current_pid, 9)<|fim▁end|> | |
<|file_name|>test-hyperscript.js<|end_file_name|><|fim▁begin|>"use strict"
var o = require("ospec")
var m = require("../../render/hyperscript")
o.spec("hyperscript", function() {
o.spec("selector", function() {
o("throws on null selector", function(done) {
try {m(null)} catch(e) {done()}
})
o("throws on non-string selector w/o a view property", function(done) {
try {m({})} catch(e) {done()}
})
o("handles tag in selector", function() {
var vnode = m("a")
o(vnode.tag).equals("a")
})
o("class and className normalization", function(){
o(m("a", {
class: null
}).attrs).deepEquals({
class: null
})
o(m("a", {
class: undefined
}).attrs).deepEquals({
class: null
})
o(m("a", {
class: false
}).attrs).deepEquals({
class: null,
className: false
})
o(m("a", {
class: true
}).attrs).deepEquals({
class: null,
className: true
})
o(m("a.x", {
class: null
}).attrs).deepEquals({
class: null,
className: "x"
})
o(m("a.x", {
class: undefined
}).attrs).deepEquals({
class: null,
className: "x"
})
o(m("a.x", {
class: false
}).attrs).deepEquals({
class: null,
className: "x false"
})
o(m("a.x", {
class: true
}).attrs).deepEquals({
class: null,
className: "x true"
})
o(m("a", {
className: null
}).attrs).deepEquals({
className: null
})
o(m("a", {
className: undefined
}).attrs).deepEquals({
className: undefined
})
o(m("a", {
className: false
}).attrs).deepEquals({
className: false
})
o(m("a", {
className: true
}).attrs).deepEquals({
className: true
})
o(m("a.x", {
className: null
}).attrs).deepEquals({
className: "x"
})
o(m("a.x", {
className: undefined
}).attrs).deepEquals({
className: "x"
})
o(m("a.x", {
className: false
}).attrs).deepEquals({
className: "x false"
})
o(m("a.x", {
className: true
}).attrs).deepEquals({
className: "x true"
})
})
o("handles class in selector", function() {
var vnode = m(".a")
o(vnode.tag).equals("div")
o(vnode.attrs.className).equals("a")
})
o("handles many classes in selector", function() {
var vnode = m(".a.b.c")
o(vnode.tag).equals("div")
o(vnode.attrs.className).equals("a b c")
})
o("handles id in selector", function() {
var vnode = m("#a")
o(vnode.tag).equals("div")
o(vnode.attrs.id).equals("a")
})
o("handles attr in selector", function() {
var vnode = m("[a=b]")
o(vnode.tag).equals("div")
o(vnode.attrs.a).equals("b")
})
o("handles many attrs in selector", function() {
var vnode = m("[a=b][c=d]")
o(vnode.tag).equals("div")
o(vnode.attrs.a).equals("b")
o(vnode.attrs.c).equals("d")
})
o("handles attr w/ spaces in selector", function() {
var vnode = m("[a = b]")
o(vnode.tag).equals("div")
o(vnode.attrs.a).equals("b")
})
o("handles attr w/ quotes in selector", function() {
var vnode = m("[a='b']")
o(vnode.tag).equals("div")
o(vnode.attrs.a).equals("b")
})
o("handles attr w/ quoted square bracket", function() {
var vnode = m("[x][a='[b]'].c")
o(vnode.tag).equals("div")
o(vnode.attrs.x).equals(true)
o(vnode.attrs.a).equals("[b]")
o(vnode.attrs.className).equals("c")
})
o("handles attr w/ unmatched square bracket", function() {
var vnode = m("[a=']'].c")
o(vnode.tag).equals("div")
o(vnode.attrs.a).equals("]")
o(vnode.attrs.className).equals("c")
})
o("handles attr w/ quoted square bracket and quote", function() {
var vnode = m("[a='[b\"\\']'].c") // `[a='[b"\']']`
o(vnode.tag).equals("div")
o(vnode.attrs.a).equals("[b\"']") // `[b"']`
o(vnode.attrs.className).equals("c")
})
o("handles attr w/ quoted square containing escaped square bracket", function() {
var vnode = m("[a='[\\]]'].c") // `[a='[\]]']`
o(vnode.tag).equals("div")
o(vnode.attrs.a).equals("[\\]]") // `[\]]`
o(vnode.attrs.className).equals("c")
})
o("handles attr w/ backslashes", function() {
var vnode = m("[a='\\\\'].c") // `[a='\\']`
o(vnode.tag).equals("div")
o(vnode.attrs.a).equals("\\")
o(vnode.attrs.className).equals("c")
})
o("handles attr w/ quotes and spaces in selector", function() {
var vnode = m("[a = 'b']")
o(vnode.tag).equals("div")
o(vnode.attrs.a).equals("b")
})
o("handles many attr w/ quotes and spaces in selector", function() {
var vnode = m("[a = 'b'][c = 'd']")
o(vnode.tag).equals("div")
o(vnode.attrs.a).equals("b")
o(vnode.attrs.c).equals("d")
})
o("handles tag, class, attrs in selector", function() {
var vnode = m("a.b[c = 'd']")
o(vnode.tag).equals("a")
o(vnode.attrs.className).equals("b")
o(vnode.attrs.c).equals("d")
})
o("handles tag, mixed classes, attrs in selector", function() {
var vnode = m("a.b[c = 'd'].e[f = 'g']")
o(vnode.tag).equals("a")
o(vnode.attrs.className).equals("b e")
o(vnode.attrs.c).equals("d")
o(vnode.attrs.f).equals("g")
})
o("handles attr without value", function() {
var vnode = m("[a]")
o(vnode.tag).equals("div")
o(vnode.attrs.a).equals(true)
})
o("handles explicit empty string value for input", function() {
var vnode = m('input[value=""]')
o(vnode.tag).equals("input")
o(vnode.attrs.value).equals("")
})
o("handles explicit empty string value for option", function() {
var vnode = m('option[value=""]')
o(vnode.tag).equals("option")
o(vnode.attrs.value).equals("")
})
})
o.spec("attrs", function() {
o("handles string attr", function() {
var vnode = m("div", {a: "b"})
o(vnode.tag).equals("div")
o(vnode.attrs.a).equals("b")
})
o("handles falsy string attr", function() {
var vnode = m("div", {a: ""})
o(vnode.tag).equals("div")
o(vnode.attrs.a).equals("")
})
o("handles number attr", function() {
var vnode = m("div", {a: 1})
o(vnode.tag).equals("div")
o(vnode.attrs.a).equals(1)
})
o("handles falsy number attr", function() {
var vnode = m("div", {a: 0})
o(vnode.tag).equals("div")
o(vnode.attrs.a).equals(0)
})
o("handles boolean attr", function() {
var vnode = m("div", {a: true})
o(vnode.tag).equals("div")
o(vnode.attrs.a).equals(true)
})
o("handles falsy boolean attr", function() {
var vnode = m("div", {a: false})
o(vnode.tag).equals("div")
o(vnode.attrs.a).equals(false)
})
o("handles only key in attrs", function() {
var vnode = m("div", {key:"a"})
o(vnode.tag).equals("div")
o(vnode.attrs).deepEquals({})
o(vnode.key).equals("a")
})
o("handles many attrs", function() {
var vnode = m("div", {a: "b", c: "d"})
o(vnode.tag).equals("div")
o(vnode.attrs.a).equals("b")
o(vnode.attrs.c).equals("d")
})
o("handles className attrs property", function() {
var vnode = m("div", {className: "a"})
o(vnode.attrs.className).equals("a")
})
o("handles 'class' as a verbose attribute declaration", function() {
var vnode = m("[class=a]")
o(vnode.attrs.className).equals("a")
})
o("handles merging classes w/ class property", function() {
var vnode = m(".a", {class: "b"})
o(vnode.attrs.className).equals("a b")
})
o("handles merging classes w/ className property", function() {
var vnode = m(".a", {className: "b"})
o(vnode.attrs.className).equals("a b")
})
})
o.spec("custom element attrs", function() {
o("handles string attr", function() {
var vnode = m("custom-element", {a: "b"})
o(vnode.tag).equals("custom-element")
o(vnode.attrs.a).equals("b")
})
o("handles falsy string attr", function() {
var vnode = m("custom-element", {a: ""})
o(vnode.tag).equals("custom-element")
o(vnode.attrs.a).equals("")
})
o("handles number attr", function() {
var vnode = m("custom-element", {a: 1})
o(vnode.tag).equals("custom-element")
o(vnode.attrs.a).equals(1)
})
o("handles falsy number attr", function() {
var vnode = m("custom-element", {a: 0})
o(vnode.tag).equals("custom-element")
o(vnode.attrs.a).equals(0)
})
o("handles boolean attr", function() {
var vnode = m("custom-element", {a: true})
o(vnode.tag).equals("custom-element")
o(vnode.attrs.a).equals(true)
})
o("handles falsy boolean attr", function() {
var vnode = m("custom-element", {a: false})
o(vnode.tag).equals("custom-element")
o(vnode.attrs.a).equals(false)
})
o("handles only key in attrs", function() {
var vnode = m("custom-element", {key:"a"})
o(vnode.tag).equals("custom-element")
o(vnode.attrs).deepEquals({})
o(vnode.key).equals("a")
})
o("handles many attrs", function() {
var vnode = m("custom-element", {a: "b", c: "d"})
o(vnode.tag).equals("custom-element")
o(vnode.attrs.a).equals("b")
o(vnode.attrs.c).equals("d")
})
o("handles className attrs property", function() {
var vnode = m("custom-element", {className: "a"})
o(vnode.attrs.className).equals("a")
})
o("casts className using toString like browsers", function() {
const className = {
valueOf: () => ".valueOf",
toString: () => "toString"
}
var vnode = m("custom-element" + className, {className: className})
o(vnode.attrs.className).equals("valueOf toString")
})
})
o.spec("children", function() {
o("handles string single child", function() {
var vnode = m("div", {}, ["a"])
o(vnode.children[0].children).equals("a")
})
o("handles falsy string single child", function() {
var vnode = m("div", {}, [""])
o(vnode.children[0].children).equals("")
})
o("handles number single child", function() {
var vnode = m("div", {}, [1])
o(vnode.children[0].children).equals("1")
})
o("handles falsy number single child", function() {
var vnode = m("div", {}, [0])
o(vnode.children[0].children).equals("0")
})
o("handles boolean single child", function() {
var vnode = m("div", {}, [true])
o(vnode.children).deepEquals([null])
})
o("handles falsy boolean single child", function() {
var vnode = m("div", {}, [false])
o(vnode.children).deepEquals([null])
})
o("handles null single child", function() {
var vnode = m("div", {}, [null])
<|fim▁hole|> var vnode = m("div", {}, [undefined])
o(vnode.children).deepEquals([null])
})
o("handles multiple string children", function() {
var vnode = m("div", {}, ["", "a"])
o(vnode.children[0].tag).equals("#")
o(vnode.children[0].children).equals("")
o(vnode.children[1].tag).equals("#")
o(vnode.children[1].children).equals("a")
})
o("handles multiple number children", function() {
var vnode = m("div", {}, [0, 1])
o(vnode.children[0].tag).equals("#")
o(vnode.children[0].children).equals("0")
o(vnode.children[1].tag).equals("#")
o(vnode.children[1].children).equals("1")
})
o("handles multiple boolean children", function() {
var vnode = m("div", {}, [false, true])
o(vnode.children).deepEquals([null, null])
})
o("handles multiple null/undefined child", function() {
var vnode = m("div", {}, [null, undefined])
o(vnode.children).deepEquals([null, null])
})
o("handles falsy number single child without attrs", function() {
var vnode = m("div", 0)
o(vnode.children[0].children).equals("0")
})
})
o.spec("permutations", function() {
o("handles null attr and children", function() {
var vnode = m("div", null, [m("a"), m("b")])
o(vnode.children.length).equals(2)
o(vnode.children[0].tag).equals("a")
o(vnode.children[1].tag).equals("b")
})
o("handles null attr and child unwrapped", function() {
var vnode = m("div", null, m("a"))
o(vnode.children.length).equals(1)
o(vnode.children[0].tag).equals("a")
})
o("handles null attr and children unwrapped", function() {
var vnode = m("div", null, m("a"), m("b"))
o(vnode.children.length).equals(2)
o(vnode.children[0].tag).equals("a")
o(vnode.children[1].tag).equals("b")
})
o("handles attr and children", function() {
var vnode = m("div", {a: "b"}, [m("i"), m("s")])
o(vnode.attrs.a).equals("b")
o(vnode.children[0].tag).equals("i")
o(vnode.children[1].tag).equals("s")
})
o("handles attr and child unwrapped", function() {
var vnode = m("div", {a: "b"}, m("i"))
o(vnode.attrs.a).equals("b")
o(vnode.children[0].tag).equals("i")
})
o("handles attr and children unwrapped", function() {
var vnode = m("div", {a: "b"}, m("i"), m("s"))
o(vnode.attrs.a).equals("b")
o(vnode.children[0].tag).equals("i")
o(vnode.children[1].tag).equals("s")
})
o("handles attr and text children", function() {
var vnode = m("div", {a: "b"}, ["c", "d"])
o(vnode.attrs.a).equals("b")
o(vnode.children[0].tag).equals("#")
o(vnode.children[0].children).equals("c")
o(vnode.children[1].tag).equals("#")
o(vnode.children[1].children).equals("d")
})
o("handles attr and single string text child", function() {
var vnode = m("div", {a: "b"}, ["c"])
o(vnode.attrs.a).equals("b")
o(vnode.children[0].children).equals("c")
})
o("handles attr and single falsy string text child", function() {
var vnode = m("div", {a: "b"}, [""])
o(vnode.attrs.a).equals("b")
o(vnode.children[0].children).equals("")
})
o("handles attr and single number text child", function() {
var vnode = m("div", {a: "b"}, [1])
o(vnode.attrs.a).equals("b")
o(vnode.children[0].children).equals("1")
})
o("handles attr and single falsy number text child", function() {
var vnode = m("div", {a: "b"}, [0])
o(vnode.attrs.a).equals("b")
o(vnode.children[0].children).equals("0")
})
o("handles attr and single boolean text child", function() {
var vnode = m("div", {a: "b"}, [true])
o(vnode.attrs.a).equals("b")
o(vnode.children).deepEquals([null])
})
o("handles attr and single falsy boolean text child", function() {
var vnode = m("div", {a: "b"}, [0])
o(vnode.attrs.a).equals("b")
o(vnode.children[0].children).equals("0")
})
o("handles attr and single false boolean text child", function() {
var vnode = m("div", {a: "b"}, [false])
o(vnode.attrs.a).equals("b")
o(vnode.children).deepEquals([null])
})
o("handles attr and single text child unwrapped", function() {
var vnode = m("div", {a: "b"}, "c")
o(vnode.attrs.a).equals("b")
o(vnode.children[0].children).equals("c")
})
o("handles attr and text children unwrapped", function() {
var vnode = m("div", {a: "b"}, "c", "d")
o(vnode.attrs.a).equals("b")
o(vnode.children[0].tag).equals("#")
o(vnode.children[0].children).equals("c")
o(vnode.children[1].tag).equals("#")
o(vnode.children[1].children).equals("d")
})
o("handles children without attr", function() {
var vnode = m("div", [m("i"), m("s")])
o(vnode.attrs).deepEquals({})
o(vnode.children[0].tag).equals("i")
o(vnode.children[1].tag).equals("s")
})
o("handles child without attr unwrapped", function() {
var vnode = m("div", m("i"))
o(vnode.attrs).deepEquals({})
o(vnode.children[0].tag).equals("i")
})
o("handles children without attr unwrapped", function() {
var vnode = m("div", m("i"), m("s"))
o(vnode.attrs).deepEquals({})
o(vnode.children[0].tag).equals("i")
o(vnode.children[1].tag).equals("s")
})
o("handles shared attrs", function() {
var attrs = {a: "b"}
var nodeA = m(".a", attrs)
var nodeB = m(".b", attrs)
o(nodeA.attrs.className).equals("a")
o(nodeA.attrs.a).equals("b")
o(nodeB.attrs.className).equals("b")
o(nodeB.attrs.a).equals("b")
})
o("doesnt modify passed attributes object", function() {
var attrs = {a: "b"}
m(".a", attrs)
o(attrs).deepEquals({a: "b"})
})
o("non-nullish attr takes precedence over selector", function() {
o(m("[a=b]", {a: "c"}).attrs).deepEquals({a: "c"})
})
o("null attr takes precedence over selector", function() {
o(m("[a=b]", {a: null}).attrs).deepEquals({a: null})
})
o("undefined attr takes precedence over selector", function() {
o(m("[a=b]", {a: undefined}).attrs).deepEquals({a: undefined})
})
o("handles fragment children without attr unwrapped", function() {
var vnode = m("div", [m("i")], [m("s")])
o(vnode.children[0].tag).equals("[")
o(vnode.children[0].children[0].tag).equals("i")
o(vnode.children[1].tag).equals("[")
o(vnode.children[1].children[0].tag).equals("s")
})
o("handles children with nested array", function() {
var vnode = m("div", [[m("i"), m("s")]])
o(vnode.children[0].tag).equals("[")
o(vnode.children[0].children[0].tag).equals("i")
o(vnode.children[0].children[1].tag).equals("s")
})
o("handles children with deeply nested array", function() {
var vnode = m("div", [[[m("i"), m("s")]]])
o(vnode.children[0].tag).equals("[")
o(vnode.children[0].children[0].tag).equals("[")
o(vnode.children[0].children[0].children[0].tag).equals("i")
o(vnode.children[0].children[0].children[1].tag).equals("s")
})
})
o.spec("components", function() {
o("works with POJOs", function() {
var component = {
view: function() {}
}
var vnode = m(component, {id: "a"}, "b")
o(vnode.tag).equals(component)
o(vnode.attrs.id).equals("a")
o(vnode.children.length).equals(1)
o(vnode.children[0]).equals("b")
})
o("works with constructibles", function() {
var component = o.spy()
component.prototype.view = function() {}
var vnode = m(component, {id: "a"}, "b")
o(component.callCount).equals(0)
o(vnode.tag).equals(component)
o(vnode.attrs.id).equals("a")
o(vnode.children.length).equals(1)
o(vnode.children[0]).equals("b")
})
o("works with closures", function () {
var component = o.spy()
var vnode = m(component, {id: "a"}, "b")
o(component.callCount).equals(0)
o(vnode.tag).equals(component)
o(vnode.attrs.id).equals("a")
o(vnode.children.length).equals(1)
o(vnode.children[0]).equals("b")
})
})
})<|fim▁end|> | o(vnode.children).deepEquals([null])
})
o("handles undefined single child", function() { |
<|file_name|>small_blurry_image.cc<|end_file_name|><|fim▁begin|>// Copyright 2008 Isis Innovation Limited
#include "ptam/construct/small_blurry_image.h"
#include <cvd/utility.h>
#include <cvd/convolution.h>
#include <cvd/vision.h>
#include <TooN/se2.h>
#include <TooN/Cholesky.h>
#include <TooN/wls.h>
//using namespace CVD;
using namespace std;
namespace ptam {
CVD::ImageRef SmallBlurryImage::mirSize(-1,-1);
SmallBlurryImage::SmallBlurryImage(KeyFrame &kf, double dBlur) {
mbMadeJacs = false;
MakeFromKF(kf, dBlur);
}
SmallBlurryImage::SmallBlurryImage() {
mbMadeJacs = false;
}
// Make a SmallBlurryImage from a KeyFrame This fills in the mimSmall
// image (Which is just a small un-blurred version of the KF) and
// mimTemplate (which is a floating-point, zero-mean blurred version
// of the above)
void SmallBlurryImage::MakeFromKF(KeyFrame &kf, double dBlur) {
if(mirSize[0] == -1)
mirSize = kf.aLevels[3].im.size() / 2;
mbMadeJacs = false;
mimSmall.resize(mirSize);
mimTemplate.resize(mirSize);
mbMadeJacs = false;
halfSample(kf.aLevels[3].im, mimSmall);
CVD::ImageRef ir;
unsigned int nSum = 0;
do
nSum += mimSmall[ir];
while(ir.next(mirSize));
float fMean = ((float) nSum) / mirSize.area();
ir.home();
do
mimTemplate[ir] = mimSmall[ir] - fMean;
while(ir.next(mirSize));
convolveGaussian(mimTemplate, dBlur);
}
// Make the jacobians (actually, no more than a gradient image)
// of the blurred template
void SmallBlurryImage::MakeJacs() {
mimImageJacs.resize(mirSize);
// Fill in the gradient image
CVD::ImageRef ir;
do {
TooN::Vector<2> &v2Grad = mimImageJacs[ir];
if (mimTemplate.in_image_with_border(ir,1)) {
v2Grad[0] = mimTemplate[ir + CVD::ImageRef(1,0)] -
mimTemplate[ir - CVD::ImageRef(1,0)];
v2Grad[1] = mimTemplate[ir + CVD::ImageRef(0,1)] -
mimTemplate[ir - CVD::ImageRef(0,1)];
// N.b. missing 0.5 factor in above, this will be added later.
} else {
v2Grad = TooN::Zeros;
}
} while (ir.next(mirSize));
mbMadeJacs = true;
}
// Calculate the zero-mean SSD between one image and the next.
// Since both are zero mean already, just calculate the SSD...
double SmallBlurryImage::ZMSSD(SmallBlurryImage &other) {
double dSSD = 0.0;
CVD::ImageRef ir;
do {
double dDiff = mimTemplate[ir] - other.mimTemplate[ir];
dSSD += dDiff * dDiff;
} while(ir.next(mirSize));
return dSSD;
}
// Find an SE2 which best aligns an SBI to a target
// Do this by ESM-tracking a la Benhimane & Malis
std::pair<TooN::SE2<>,double> SmallBlurryImage::IteratePosRelToTarget(
SmallBlurryImage &other, int nIterations) {
TooN::SE2<> se2CtoC;
TooN::SE2<> se2WfromC;
CVD::ImageRef irCenter = mirSize / 2;
se2WfromC.get_translation() = vec(irCenter);
std::pair<TooN::SE2<>, double> result_pair;
if (!other.mbMadeJacs) {
std::cerr << "You spanner, you didn't make the jacs for the target." << std::endl;
assert(other.mbMadeJacs);
};
double dMeanOffset = 0.0;
TooN::Vector<4> v4Accum;
TooN::Vector<10> v10Triangle;
CVD::Image<float> imWarped(mirSize);
double dFinalScore = 0.0;
for (int it = 0; it < nIterations; it++) {
dFinalScore = 0.0;
v4Accum = TooN::Zeros;
v10Triangle = TooN::Zeros; // Holds the bottom-left triangle of JTJ
TooN::Vector<4> v4Jac;
v4Jac[3] = 1.0;
TooN::SE2<> se2XForm = se2WfromC * se2CtoC * se2WfromC.inverse();
// Make the warped current image template:
TooN::Vector<2> v2Zero = TooN::Zeros;
CVD::transform(mimTemplate, imWarped, se2XForm.get_rotation().get_matrix(),
se2XForm.get_translation(), v2Zero, -9e20f);
// Now compare images, calc differences, and current image jacobian:
CVD::ImageRef ir;
do {
if (!imWarped.in_image_with_border(ir, 1))
continue;
float l,r,u,d,here;
l = imWarped[ir - CVD::ImageRef(1, 0)];
r = imWarped[ir + CVD::ImageRef(1, 0)];
u = imWarped[ir - CVD::ImageRef(0, 1)];
d = imWarped[ir + CVD::ImageRef(0, 1)];
here = imWarped[ir];
if (l + r + u + d + here < -9999.9) // This means it's out of the image; c.f. the -9e20f param to transform.
continue;
TooN::Vector<2> v2CurrentGrad;
v2CurrentGrad[0] = r - l; // Missing 0.5 factor
v2CurrentGrad[1] = d - u;<|fim▁hole|> // the fact we average two gradients, the other from
// each gradient missing a 0.5 factor.
v4Jac[0] = v2SumGrad[0];
v4Jac[1] = v2SumGrad[1];
v4Jac[2] = -(ir.y - irCenter.y) * v2SumGrad[0] + (ir.x - irCenter.x) * v2SumGrad[1];
// v4Jac[3] = 1.0;
double dDiff = imWarped[ir] - other.mimTemplate[ir] + dMeanOffset;
dFinalScore += dDiff * dDiff;
v4Accum += dDiff * v4Jac;
// Speedy fill of the LL triangle of JTJ:
double *p = &v10Triangle[0];
*p++ += v4Jac[0] * v4Jac[0];
*p++ += v4Jac[1] * v4Jac[0];
*p++ += v4Jac[1] * v4Jac[1];
*p++ += v4Jac[2] * v4Jac[0];
*p++ += v4Jac[2] * v4Jac[1];
*p++ += v4Jac[2] * v4Jac[2];
*p++ += v4Jac[0];
*p++ += v4Jac[1];
*p++ += v4Jac[2];
*p++ += 1.0;
} while(ir.next(mirSize));
TooN::Vector<4> v4Update;
// Solve for JTJ-1JTv;
{
TooN::Matrix<4> m4;
int v=0;
for(int j=0; j<4; j++)
for(int i=0; i<=j; i++)
m4[j][i] = m4[i][j] = v10Triangle[v++];
TooN::Cholesky<4> chol(m4);
v4Update = chol.backsub(v4Accum);
}
TooN::SE2<> se2Update;
se2Update.get_translation() = -v4Update.slice<0,2>();
se2Update.get_rotation() = TooN::SO2<>::exp(-v4Update[2]);
se2CtoC = se2CtoC * se2Update;
dMeanOffset -= v4Update[3];
}
result_pair.first = se2CtoC;
result_pair.second = dFinalScore;
return result_pair;
}
// What is the 3D camera rotation (zero trans) SE3<> which causes an
// input image SO2 rotation?
TooN::SE3<> SmallBlurryImage::SE3fromSE2(TooN::SE2<> se2, ATANCamera camera) {
// Do this by projecting two points, and then iterating the SE3<> (SO3
// actually) until convergence. It might seem stupid doing this so
// precisely when the whole SE2-finding is one big hack, but hey.
camera.SetImageSize(mirSize);
TooN::Vector<2> av2Turned[2]; // Our two warped points in pixels
av2Turned[0] = CVD::vec(mirSize / 2) + se2 * CVD::vec(CVD::ImageRef(5,0));
av2Turned[1] = CVD::vec(mirSize / 2) + se2 * CVD::vec(CVD::ImageRef(-5,0));
TooN::Vector<3> av3OrigPoints[2]; // 3D versions of these points.
av3OrigPoints[0] = unproject(camera.UnProject(CVD::vec(mirSize / 2) +
CVD::vec(CVD::ImageRef(5,0))));
av3OrigPoints[1] = unproject(camera.UnProject(CVD::vec(mirSize / 2) +
CVD::vec(CVD::ImageRef(-5,0))));
TooN::SO3<> so3;
for (int it = 0; it<3; it++) {
TooN::WLS<3> wls; // lazy; no need for the 'W'
wls.add_prior(10.0);
for (int i = 0; i < 2; i++) {
// Project into the image to find error
TooN::Vector<3> v3Cam = so3 * av3OrigPoints[i];
TooN::Vector<2> v2Implane = project(v3Cam);
TooN::Vector<2> v2Pixels = camera.Project(v2Implane);
TooN::Vector<2> v2Error = av2Turned[i] - v2Pixels;
TooN::Matrix<2> m2CamDerivs = camera.GetProjectionDerivs();
TooN::Matrix<2,3> m23Jacobian;
double dOneOverCameraZ = 1.0 / v3Cam[2];
for (int m = 0; m < 3; m++) {
const TooN::Vector<3> v3Motion = TooN::SO3<>::generator_field(m, v3Cam);
TooN::Vector<2> v2CamFrameMotion;
v2CamFrameMotion[0] = (v3Motion[0] - v3Cam[0] * v3Motion[2] * dOneOverCameraZ) * dOneOverCameraZ;
v2CamFrameMotion[1] = (v3Motion[1] - v3Cam[1] * v3Motion[2] * dOneOverCameraZ) * dOneOverCameraZ;
m23Jacobian.T()[m] = m2CamDerivs * v2CamFrameMotion;
};
wls.add_mJ(v2Error[0], m23Jacobian[0], 1.0);
wls.add_mJ(v2Error[1], m23Jacobian[1], 1.0);
};
wls.compute();
TooN::Vector<3> v3Res = wls.get_mu();
so3 = TooN::SO3<>::exp(v3Res) * so3;
};
TooN::SE3<> se3Result;
se3Result.get_rotation() = so3;
return se3Result;
}
} // namespace ptam<|fim▁end|> |
TooN::Vector<2> v2SumGrad = 0.25 * (v2CurrentGrad + other.mimImageJacs[ir]);
// Why 0.25? This is from missing 0.5 factors: One for |
<|file_name|>hr_time_conversion.py<|end_file_name|><|fim▁begin|># hackerrank - Algorithms: Time Conversion
# Written by James Andreou, University of Waterloo
S = raw_input()
TYPE = S[len(S)-2]
if S[:2] == "12":
if TYPE == "A":
print "00" + S[2:-2]
else:
print S[:-2]
elif TYPE == "P":
HOUR = int(S[:2]) + 12
print str(HOUR) + S[2:-2]
else:<|fim▁hole|><|fim▁end|> | print S[:-2] |
<|file_name|>textblock.cpp<|end_file_name|><|fim▁begin|>/***************************************************************************
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
* copyright (C) 2003 Brian Thomas <[email protected]> *
* copyright (C) 2004-2014 *
* Umbrello UML Modeller Authors <[email protected]> *
***************************************************************************/
// own header
#include "textblock.h"
// local includes
#include "codedocument.h"
#include "codegenerationpolicy.h"
#include "debug_utils.h"
#include "uml.h"
// qt includes
#include <QRegExp>
#include <QTextStream>
/**
* Constructor.
*/
TextBlock::TextBlock(CodeDocument * parent, const QString & text)
: m_text(QString()),
m_tag(QString()),
m_canDelete(true),
m_writeOutText(true),
m_indentationLevel(0),
m_parentDocument(parent)
{
setText(text);
}
/**
* Destructor.
*/
TextBlock::~TextBlock()
{
}
/**
* Set the attribute m_canDelete.
* @param canDelete the new value to set
*/
void TextBlock::setCanDelete(bool canDelete)
{
m_canDelete = canDelete;
}
/**
* Determine if its OK to delete this textblock from the document.
* Used by the text editor to know if deletion could cause a crash of
* the program.
* @return the value of m_canDelete
*/
bool TextBlock::canDelete() const
{
return m_canDelete;
}
/**
* Get the value of m_parentDoc
* @return the value of m_parentDoc
*/
CodeDocument * TextBlock::getParentDocument() const
{
return m_parentDocument;
}
/**
* Set the value of m_text
* The actual text of this code block.
* @param text the new value of m_text
*/
void TextBlock::setText(const QString & text)
{
m_text = text;
}
/**
* Add text to this object.
* @param text the text to add
*/
void TextBlock::appendText(const QString & text)
{
m_text = m_text + text;
}
/**
* Get the value of m_text
* The actual text of this code block.
* @return the value of m_text
*/
QString TextBlock::getText() const
{
return m_text;
}
/**
* Get the tag of this text block. This tag
* may be used to find this text block in the code document
* to which it belongs.
* @return the tag
*/
QString TextBlock::getTag() const
{
return m_tag;
}
/**
* Set the tag of this text block. This tag
* may be used to find this text block in the code document
* to which it belongs.
* @param value the new value for the tag
*/
void TextBlock::setTag(const QString & value)
{
m_tag = value;
}
/**
* Set the value of m_writeOutText
* Whether or not to include the text of this TextBlock into a file.
* @param write the new value of m_writeOutText
*/
void TextBlock::setWriteOutText(bool write)
{
m_writeOutText = write;
}
/**
* Get the value of m_writeOutText
* Whether or not to include the text of this TextBlock into a file.
* @return the value of m_writeOutText
*/
bool TextBlock::getWriteOutText() const
{
return m_writeOutText;
}
/**
* Set how many times to indent this text block.
* The amount of each indenatation is determined from the parent
* codedocument codegeneration policy.
* @param level the new value for the indentation level
*/
void TextBlock::setIndentationLevel(int level)
{
m_indentationLevel = level;
}
/**
* Get how many times to indent this text block.
* The amount of each indenatation is determined from the parent
* codedocument codegeneration policy.
* @return the indentation level
*/
int TextBlock::getIndentationLevel() const
{
return m_indentationLevel;
}
/**
* Get the new line chars which ends the line.
* @return the ending chars for new line
*/
QString TextBlock::getNewLineEndingChars()
{
CodeGenerationPolicy* policy = UMLApp::app()->commonPolicy();
return policy->getNewLineEndingChars();
}
/**
* Get how much a single "level" of indentation will actually indent.
* @return the unit of indentation (for one level)
*/
QString TextBlock::getIndentation()
{
CodeGenerationPolicy* policy = UMLApp::app()->commonPolicy();
return policy->getIndentation();
}
/**
* Get the actual amount of indentation for a given level of indentation.
* @param level the level of interest
* @return the indentation string
*/
QString TextBlock::getIndentationString(int level) const
{
if (!level) {
level = m_indentationLevel;
}
QString indentAmount = getIndentation();
QString indentation;
for (int i=0; i<level; ++i) {
indentation.append(indentAmount);
}
return indentation;
}
/**
* TODO: Ush. These are terrifically bad and must one day go away.
* Both methods indicate the range of lines in this textblock
* which may be edited by the codeeditor (assuming that any are
* actually editable). The default case is no lines are editable.
* The line numbering starts with '0' and a '-1' means no line
* qualifies.
* @return line number
*/
int TextBlock::firstEditableLine()
{
return 0;
}
/**
* @see firstEditableLine
*/
int TextBlock::lastEditableLine()
{
return 0;
}
/**
* Used by the CodeEditor. It provides it with an appropriate
* starting string for a new line of text within the given textblock
* (for example a string with the proper indentation).
* If the indentation amount is '0' the current indentation string will
* be used.
* <p>
* TODO: Can be refactored away and replaced with
* <a href="#getIndentationString">getIndentationString</a>.
* @param amount the number of indent steps to use
* @return the new line
*/
QString TextBlock::getNewEditorLine(int amount)
{
return getIndentationString(amount);
}
/**
* UnFormat a long text string. Typically, this means removing
* the indentaion (linePrefix) and/or newline chars from each line.
* If an indentation is not specified, then the current indentation is used.
* @param text the original text for unformatting
* @param indent the indentation
* @return the unformatted text
*/
QString TextBlock::unformatText(const QString & text, const QString & indent)
{
QString output = text;
QString myIndent = indent;
if (myIndent.isEmpty()) {
myIndent = getIndentationString();
}
if (!output.isEmpty()) {
// remove indenation from this text block.
output.remove(QRegExp(QLatin1Char('^') + myIndent));
}
return output;
}
/**
* Causes the text block to release all of its connections
* and any other text blocks that it 'owns'.
* Needed to be called prior to deletion of the textblock.
* TODO: Does nothing.
*/
void TextBlock::release()
{
}
/**
* Format a long text string to be more readable.
* @param work the original text for formatting
* @param linePrefix a line prefix
* @param breakStr a break string
* @param addBreak control to add always a break string
* @param lastLineHasBreak control to add a break string to the last line
* @return the new formatted text
*/
QString TextBlock::formatMultiLineText(const QString & work, const QString & linePrefix,
const QString & breakStr, bool addBreak, bool lastLineHasBreak)
{
QString output;
QString text = work;
QString endLine = getNewLineEndingChars();<|fim▁hole|> // we have to tack on extra match
if (!text.contains(QRegExp(breakStr + QLatin1String("\\$"))))
matches++;
for (int i=0; i < matches; ++i) {
QString line = text.section(QRegExp(breakStr), i, i);
output += linePrefix + line;
if ((i != matches-1) || lastLineHasBreak)
output += endLine; // add break to line
}
} else {
output = linePrefix + text;
if (addBreak)
output += breakStr;
}
return output;
}
/**
* Set attributes of the node that represents this class
* in the XMI document.
* @param doc the xmi document
* @param blockElement the xmi element holding the attributes
*/
void TextBlock::setAttributesOnNode(QDomDocument & doc, QDomElement & blockElement)
{
Q_UNUSED(doc);
QString endLine = UMLApp::app()->commonPolicy()->getNewLineEndingChars();
blockElement.setAttribute(QLatin1String("tag"), getTag());
// only write these if different from defaults
const QString trueStr = QLatin1String("true");
const QString falseStr = QLatin1String("false");
if (getIndentationLevel())
blockElement.setAttribute(QLatin1String("indentLevel"), QString::number(getIndentationLevel()));
if (!m_text.isEmpty())
blockElement.setAttribute(QLatin1String("text"), encodeText(m_text, endLine));
if (!getWriteOutText())
blockElement.setAttribute(QLatin1String("writeOutText"), getWriteOutText() ? trueStr : falseStr);
if (!canDelete())
blockElement.setAttribute(QLatin1String("canDelete"), canDelete() ? trueStr : falseStr);
}
/**
* Set the class attributes from a passed object.
* @param obj text block from which the attributes are taken
*/
void TextBlock::setAttributesFromObject(TextBlock * obj)
{
// DON'T set tag here.
setIndentationLevel(obj->getIndentationLevel());
setText(obj->getText());
setWriteOutText(obj->getWriteOutText());
m_canDelete = obj->canDelete();
}
/**
* Set the class attributes of this object from
* the passed element node.
* @param root the xmi element from which to load
*/
void TextBlock::setAttributesFromNode(QDomElement & root)
{
QString endLine = UMLApp::app()->commonPolicy()->getNewLineEndingChars();
setIndentationLevel(root.attribute(QLatin1String("indentLevel"), QLatin1String("0")).toInt());
setTag(root.attribute(QLatin1String("tag")));
setText(decodeText(root.attribute(QLatin1String("text")), endLine));
const QString trueStr = QLatin1String("true");
setWriteOutText(root.attribute(QLatin1String("writeOutText"), trueStr) == trueStr);
m_canDelete = root.attribute(QLatin1String("canDelete"), trueStr) == trueStr;
}
/**
* Encode text for XML storage.
* We simply convert all types of newLines to the "\n" or 

* entity.
* @param text the not yet encoded text
* @param endLine the chars at the end of each line
* @return the encoded text
*/
QString TextBlock::encodeText(const QString & text, const QString & endLine)
{
QString encoded = text;
encoded.replace(QRegExp(endLine), QLatin1String("
"));
return encoded;
}
/**
* Decode text from XML storage.
* We simply convert all newLine entity 
 to chosen line ending.
* @param text the not yet decoded text
* @param endLine the chars at the end of each line
* @return the decoded text
*/
QString TextBlock::decodeText(const QString & text, const QString & endLine)
{
QString decoded = text;
decoded.replace(QRegExp(QLatin1String("
")), endLine);
return decoded;
}
/**
* Return the text in the right format. Returned string is empty
* if m_writeOutText is false.
* @return QString
*/
QString TextBlock::toString() const
{
// simple output method
if (m_writeOutText && !m_text.isEmpty()) {
QString endLine = UMLApp::app()->commonPolicy()->getNewLineEndingChars();
return formatMultiLineText(m_text, getIndentationString(), endLine);
}
else {
return QString();
}
}
/**
* Operator '<<' for TextBlock.
*/
QDebug operator<<(QDebug os, const TextBlock& obj)
{
os.nospace() << "TextBlock: tag=" << obj.getTag()
<< ", writeOutText=" << (obj.getWriteOutText() ? "true" : "false")
<< ", canDelete=" << (obj.canDelete() ? "true" : "false")
<< ", indentationLevel=" << obj.getIndentationLevel()
<< ", parentDocument id=" << (obj.getParentDocument() ? obj.getParentDocument()->ID()
: QLatin1String("null"))
<< ", text=" << obj.getText();
return os.space();
}<|fim▁end|> | int matches = text.indexOf(QRegExp(breakStr));
if (matches >= 0) {
// check that last part of string matches, if not, then |
<|file_name|>parser_test.ts<|end_file_name|><|fim▁begin|>import {Datum} from "../ast/datum";
import {
ASSIGNMENT,
CONDITIONAL,
DEFINITION,
FORMALS,
LAMBDA_EXPRESSION,
MACRO_BLOCK,
Nonterminal,
PATTERN,
PATTERN_DATUM,
PATTERN_IDENTIFIER,
PROCEDURE_CALL,
PROGRAM,
QUASIQUOTATION,
QUOTATION,
SELF_EVALUATING,
SPLICING_UNQUOTATION,
TEMPLATE,
TRANSFORMER_SPEC,
VARIABLE
} from "../parse/nonterminals";
import {ParserImpl} from "../parse/parser_impl";
import {newReader} from "../read/reader";
import {newTokenStream} from "../scan/token_stream";
describe("parser", () => {
beforeEach(() => {
jasmine.addMatchers({toParseAs});
});
describe("variables", () => {
it("should accept well-formed variables", () => {
[
'...',
'+',
'-',
'x'
].forEach(text => expect(text).toParseAs(VARIABLE));
});
it("should reject malformed variables", () => {
expect('(').not.toParseAs(VARIABLE);
});
});
describe("quotations", () => {
it("should accept well-formed quotations", () => {
[
"'1",
"''1",
'(quote quote)',
"'quote"
].forEach(text => expect(text).toParseAs(QUOTATION));
});
it("should reject malformed quotations", () => {
[
'quote',
"''"
].forEach(text => expect(text).not.toParseAs(QUOTATION));
});
});
describe("self-evaluating", () => {
it("should accept well-formed self-evaluating forms", () => {
[
'#t',
'1',
'#\\a',
'#\\space',
'3.14159',
'"hello, world"',
'"(define foo x y)"'
].forEach(text => expect(text).toParseAs(SELF_EVALUATING));
});
it("should reject forms that are not self-evaluating", () => {
[
'(define foo (+ 1 2))',
'+'
].forEach(text => expect(text).not.toParseAs(SELF_EVALUATING));
});
});
describe("procedure calls", () => {
it("should accept well-formed procedure calls", () => {
[
'(+)',
'(define x)',
'((foo) (foo))',
'((define) foo)',
'((lambda () +) 1 2)'
].forEach(text => expect(text).toParseAs(PROCEDURE_CALL));
});
it("should reject malformed procedure calls", () => {
[
'(foo x',
'foo x)',
'()',
'(foo x y . z)'
// TODO bl parses as a macro use '((define) define)'
].forEach(text => expect(text).not.toParseAs(PROCEDURE_CALL));
});
});
describe("lambda expressions", () => {
it("should accept well-formed lambda expressions", () => {
[
'(lambda () 1)',
'(lambda x 1)',
'(lambda (x) y z)',
'(lambda (x y) (x y))',
'(lambda (x . y) z)',
'(lambda () (define x 1) (define y 2) x)',
'(lambda () (define x 1) (define y 2) x y)'
].forEach(text => expect(text).toParseAs(LAMBDA_EXPRESSION));
});
it("should reject malformed lambda expressions", () => {
[
'(lambda (x y))',
'(lambda x . y z)',
'(lambda lambda)',
'(lambda () (define x 1) (define y 2))'
].forEach(text => expect(text).not.toParseAs(LAMBDA_EXPRESSION));
});
});
describe("formals", () => {
it("should accept well-formed formals", () => {
[
'(x y z)',
'x',
'(x . z)'
].forEach(text => expect(text).toParseAs(FORMALS));
});
it("should reject malformed formals", () => {
[
'( . x)',
'(x . y . z)'
].forEach(text => expect(text).not.toParseAs(FORMALS));
});
});
describe("definitions", () => {
it("should accept well-formed definitions", () => {
[
'(define x x)',
'(define (foo x y) (foo x y))',
'(begin (define x x) (define y y))',
'(define (x . y) 1)',
'(begin)',
'(define (x) (define y 1) x)',
'(begin (define x 1) (define y 2))'
].forEach(text => expect(text).toParseAs(DEFINITION));
});
it("should reject malformed definitions", () => {
[
'define',
'(define x)',
'(begin 1)',
'(begin ())'
].forEach(text => expect(text).not.toParseAs(DEFINITION));
});
});
describe("conditionals", () => {
it("should accept well-formed conditionals", () => {
[
'(if x y z)',
'(if x y)',
'(if x (define x 1))'
].forEach(text => expect(text).toParseAs(CONDITIONAL));
});
it("should reject malformed conditionals", () => {
[
'(if x)',
'(if)'
].forEach(text => expect(text).not.toParseAs(CONDITIONAL));
});
});
describe("assignments", () => {
it("should accept well-formed assignments", () => {
[
'(set! let! met!)'
].forEach(text => expect(text).toParseAs(ASSIGNMENT));
});
it("should reject malformed assignments", () => {
[
'(set!)',
'(set! set!)',
'(set! x)'
].forEach(text => expect(text).not.toParseAs(ASSIGNMENT));
});
});
describe("transformer spec", () => {
it("should accept well-formed transformer specs", () => {
expect('(syntax-rules ())').toParseAs(TRANSFORMER_SPEC);
});
it("should reject malformed transformer specs", () => {
expect('(syntax-rules)').not.toParseAs(TRANSFORMER_SPEC);
});
});
describe("pattern identifier", () => {
it("should accept well-formed pattern identifiers", () => {
expect('define').toParseAs(PATTERN_IDENTIFIER);
expect('x').toParseAs(PATTERN_IDENTIFIER);
});
it("should reject malformed pattern identifiers", () => {
expect('...').not.toParseAs(PATTERN_IDENTIFIER);
});
});
describe("patterns", () => {
it("should accept well-formed patterns", () => {
[
'()',
'(define)',
'(define ...)',
'(define . define)',
'#()',
'#(define ...)'
].forEach(text => expect(text).toParseAs(PATTERN));
});
it("should reject malformed patterns", () => {
[
'(define . ...)',
'(...)'
].forEach(text => expect(text).not.toParseAs(PATTERN));
});
});
describe("pattern datums", () => {
it("should accept well-formed pattern datums", () => {
expect('"x"').toParseAs(PATTERN_DATUM);
});
it("should reject malformed pattern datums", () => {
expect('x').not.toParseAs(PATTERN_DATUM);
expect("'x").not.toParseAs(PATTERN_DATUM);
});
});
describe("templates", () => {
it("should accept well-formed templates", () => {
[
'()',
'#()',
'(x...)',
'(x... . x)',
'(x... y...)'
].forEach(text => expect(text).toParseAs(TEMPLATE));
});
});
describe("quasiquotation", () => {
it("should accept well-formed quasiquotations", () => {
[
'`(list ,(+ 1 2) 4)',
"`(a ,(+ 1 2) ,@(map abs '(4 -5 6)) b)",
"`((foo ,(- 10 3)) ,@(cdr '(c)) . ,(car '(cons)))",
"`#(10 5 ,(sqrt 4) ,@(map sqrt '(16 9)) 8)",
'`(a `(b ,(+ 1 2) ,(foo ,(+ 1 3) d) e) f)'
].forEach(text => expect(text).toParseAs(QUASIQUOTATION));
});
it("should reject malformed quasiquotations", () => {
expect("(a ,(+ 1 2) ,@(map abs '(4 -5 6)) b)")
.not
.toParseAs(QUASIQUOTATION);
});
});
describe("splicing unquotation", () => {
it("should accept well-formed splicing unquotations", () => {
[
",@(cdr '(c))",
"(unquote-splicing (cdr '(c)))"
].forEach(text => expect(text).toParseAs(SPLICING_UNQUOTATION));
});
it("should reject malformed splicing unquotations", () => {
[
',@',
'unquote-splicing'
].forEach(text => expect(text).not.toParseAs(SPLICING_UNQUOTATION));
});
});
describe("macro blocks", () => {
it("should accept well-formed macro blocks", () => {
[
'(let-syntax () 1)',
'(letrec-syntax () 1)',
"(let-syntax ((foo (syntax-rules () ((foo x) 'x)))) 1)",
"(letrec-syntax ((foo (syntax-rules (x) ((foo x) 'x)))) (foo))",
'(let-syntax ((foo (syntax-rules () ((foo) (+ 1 2 3))))) (define x 12) x)'
].forEach(text => expect(text).toParseAs(MACRO_BLOCK));
});
it("should reject malformed macro blocks", () => {
[
'(let-syntax ())',
'(letrec-syntax ())'
].forEach(text => expect(text).not.toParseAs(MACRO_BLOCK));
});
});
describe("programs", () => {
it("should accept well-formed programs", () => {
[
'',
' ',
' ',
' ',
'\n',
'\t',
'\n \t \n\n \t \n'
].forEach(text => expect(text).toParseAs(PROGRAM));
});
it("should reject malformed programs", () => {
[
'(',
')',
'((',
'()',
')(',
'))',
'(((',
'(()',
'()(',
'())',
')((',
')()',
'))(',
')))',
'((((',
'((()',
'(()(',
'(())',
'()((',
'()()',
'())(',<|fim▁hole|> ')(((',
')(()',
')()(',
')())',
'))((',
'))()',
')))(',
'))))'
].forEach(text => expect(text).not.toParseAs(PROGRAM));
});
});
});
function toParseAs(util: jasmine.MatchersUtil, customEqualityTesters: jasmine.CustomEqualityTester[]): jasmine.CustomMatcher {
return {
compare(actual: any, expected: Nonterminal): jasmine.CustomMatcherResult {
let datumRoot;
try {
datumRoot = newReader(newTokenStream(actual)).read();
} catch (e) {
return {
pass: false,
message: e.message,
};
}
const actualResult = (datumRoot instanceof Datum)
&& new ParserImpl(datumRoot).parse(expected);
let actualType: Nonterminal | null = null;
if (actualResult && actualResult.peekParse) {
actualType = actualResult.peekParse();
}
return {
pass: actualType === expected,
message: `expected ${actual} to parse as ${expected}, got ${actualType}`,
};
}
};
}<|fim▁end|> | '()))', |
<|file_name|>0018_auto_20161212_0725.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Generated by Django 1.10.3 on 2016-12-12 13:25
from __future__ import unicode_literals
from django.db import migrations
from django.utils.text import slugify<|fim▁hole|>
def create_slugs(apps, schema_editor):
Value = apps.get_model("product", "AttributeChoiceValue")
for value in Value.objects.all():
value.slug = slugify(value.display)
value.save()
class Migration(migrations.Migration):
dependencies = [("product", "0017_attributechoicevalue_slug")]
operations = [migrations.RunPython(create_slugs, migrations.RunPython.noop)]<|fim▁end|> | |
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from django.views.generic import ListView, TemplateView<|fim▁hole|>
class IndexView(TemplateView):
template_name = 'index.html'<|fim▁end|> | |
<|file_name|>main.py<|end_file_name|><|fim▁begin|>import npyscreen
import os
import re
import sys
import time
from docker.errors import DockerException
from npyscreen import notify_confirm
from threading import Thread
from vent.api.actions import Action
from vent.api.menu_helpers import MenuHelper
from vent.helpers.meta import Containers
from vent.helpers.meta import Cpu
from vent.helpers.meta import DropLocation
from vent.helpers.meta import Gpu
from vent.helpers.meta import Images
from vent.helpers.meta import Jobs
from vent.helpers.meta import Timestamp
from vent.helpers.meta import Uptime
from vent.helpers.logs import Logger
from vent.helpers.paths import PathDirs
from vent.menus.add import AddForm
from vent.menus.ntap import CreateNTap
from vent.menus.ntap import DeleteNTap
from vent.menus.ntap import ListNTap
from vent.menus.ntap import NICsNTap
from vent.menus.ntap import StartNTap
from vent.menus.ntap import StopNTap
from vent.menus.backup import BackupForm
from vent.menus.editor import EditorForm
from vent.menus.inventory_forms import InventoryCoreToolsForm
from vent.menus.inventory_forms import InventoryToolsForm
from vent.menus.logs import LogsForm
from vent.menus.services import ServicesForm
from vent.menus.tools import ToolForm
class MainForm(npyscreen.FormBaseNewWithMenus):
""" Main information landing form for the Vent CLI """
@staticmethod
def exit(*args, **kwargs):
os.system('reset')
os.system('stty sane')
try:
sys.exit(0)
except SystemExit: # pragma: no cover
os._exit(0)
@staticmethod
def t_status(core):
""" Get status of tools for either plugins or core """
m_helper = MenuHelper()
repos, tools = m_helper.tools_status(core)
installed = 0
custom_installed = 0
built = 0
custom_built = 0
running = 0
custom_running = 0
normal = str(len(tools['normal']))
# determine how many extra instances should be shown for running
norm = set(tools['normal'])
inst = set(tools['installed'])
run_str = str(len(tools['normal']) + len(inst - norm))
for tool in tools['running']:
# check for multi instances too for running
if tool in tools['normal']:
running += 1
elif re.sub(r'\d+$', '', tool) in tools['normal']:
running += 1
else:
custom_running += 1
for tool in tools['built']:
if tool in tools['normal']:
built += 1
else:
custom_built += 1
for tool in tools['installed']:
if tool in tools['normal']:
installed += 1
elif re.sub(r'\d+$', '', tool) not in tools['normal']:
custom_installed += 1
tools_str = str(running + custom_running) + "/" + run_str + " running"
if custom_running > 0:
tools_str += " (" + str(custom_running) + " custom)"
tools_str += ", " + str(built + custom_built) + "/" + normal + " built"
if custom_built > 0:
tools_str += " (" + str(custom_built) + " custom)"
tools_str += ", " + str(installed + custom_installed) + "/" + normal
tools_str += " installed"
if custom_built > 0:
tools_str += " (" + str(custom_installed) + " custom)"
return tools_str, (running, custom_running, normal, repos)
def while_waiting(self):
""" Update fields periodically if nothing is happening """
# give a little extra time for file descriptors to close
time.sleep(0.1)
self.addfield.value = Timestamp()
self.addfield.display()
self.addfield2.value = Uptime()
self.addfield2.display()
self.addfield3.value = str(len(Containers()))+" running"
if len(Containers()) > 0:
self.addfield3.labelColor = "GOOD"
else:
self.addfield3.labelColor = "DEFAULT"
self.addfield3.display()
# update core tool status
self.addfield5.value, values = MainForm.t_status(True)
if values[0] + values[1] == 0:
color = "DANGER"
self.addfield4.labelColor = "CAUTION"
self.addfield4.value = "Idle"
elif values[0] >= int(values[2]):
color = "GOOD"
self.addfield4.labelColor = color
self.addfield4.value = "Ready to start jobs"
else:
color = "CAUTION"
self.addfield4.labelColor = color
self.addfield4.value = "Ready to start jobs"
self.addfield5.labelColor = color
# update plugin tool status
plugin_str, values = MainForm.t_status(False)
plugin_str += ", " + str(values[3]) + " plugin(s) installed"
self.addfield6.value = plugin_str
# get jobs
jobs = Jobs()
# number of jobs, number of tool containers
self.addfield7.value = str(jobs[0]) + " jobs running (" + str(jobs[1])
self.addfield7.value += " tool containers), " + str(jobs[2])
self.addfield7.value += " completed jobs"
if jobs[0] > 0:
self.addfield4.labelColor = "GOOD"
self.addfield4.value = "Processing jobs"
self.addfield7.labelColor = "GOOD"
else:
self.addfield7.labelColor = "DEFAULT"
self.addfield4.display()
self.addfield5.display()
self.addfield6.display()
self.addfield7.display()
# if file drop location changes deal with it
logger = Logger(__name__)
status = (False, None)
if self.file_drop.value != DropLocation()[1]:
logger.info("Starting: file drop restart")
try:
self.file_drop.value = DropLocation()[1]
logger.info("Path given: " + str(self.file_drop.value))
# restart if the path is valid
if DropLocation()[0]:
status = self.api_action.clean(name='file_drop')
status = self.api_action.prep_start(name='file_drop')
else:
logger.error("file drop path name invalid" +
DropLocation()[1])
if status[0]:
tool_d = status[1]
status = self.api_action.start(tool_d)
logger.info("Status of file drop restart: " +
str(status[0]))
except Exception as e: # pragma no cover
logger.error("file drop restart failed with error: " + str(e))
logger.info("Finished: file drop restart")
self.file_drop.display()
return
@staticmethod
def core_tools(action):
""" Perform actions for core tools """
def diff(first, second):
"""
Get the elements that exist in the first list and not in the second
"""
second = set(second)
return [item for item in first if item not in second]
def popup(original, orig_type, thr, title):
"""
Start the thread and display a popup of info
until the thread is finished
"""
thr.start()
info_str = ""
while thr.is_alive():
if orig_type == 'containers':
info = diff(Containers(), original)
elif orig_type == 'images':
info = diff(Images(), original)
if info:
info_str = ""
for entry in info:
# TODO limit length of info_str to fit box
info_str += entry[0]+": "+entry[1]+"\n"
npyscreen.notify_wait(info_str, title=title)
time.sleep(1)
return
if action == 'install':
original_images = Images()
m_helper = MenuHelper()
thr = Thread(target=m_helper.cores, args=(),
kwargs={"action": "install"})
popup(original_images, "images", thr,
'Please wait, installing core containers...')
notify_confirm("Done installing core containers (any"
" already installed tools untouched).",
title='Installed core containers')
return
def add_form(self, form, form_name, form_args):
""" Add new form and switch to it """
self.parentApp.addForm(form_name, form, **form_args)
self.parentApp.change_form(form_name)
return
def remove_forms(self, form_names):
""" Remove all forms supplied """
for form in form_names:
try:
self.parentApp.removeForm(form)
except Exception as e: # pragma: no cover
pass
return
def perform_action(self, action):
""" Perform actions in the api from the CLI """
form = ToolForm
s_action = action.split("_")[0]
if 'core' in action:
form_action = s_action + ' (only core tools are shown)'
form_name = s_action.title() + " core tools"
cores = True
else:
form_action = s_action + ' (only plugin tools are shown)'
form_name = s_action.title() + " tools"
cores = False
a_type = 'containers'
if s_action in ['build']:
a_type = 'images'
forms = [action.upper() + 'TOOLS']
form_args = {'color': 'CONTROL',
'names': [s_action],
'name': form_name,
'action_dict': {'action_name': s_action,
'present_t': s_action + 'ing ' + a_type,
'past_t': s_action.title() + ' ' + a_type,
'action': form_action,
'type': a_type,
'cores': cores}}
# grammar rules
vowels = ['a', 'e', 'i', 'o', 'u']
# consonant-vowel-consonant ending
# Eg: stop -> stopping
if s_action[-1] not in vowels and \
s_action[-2] in vowels and \
s_action[-3] not in vowels:
form_args['action_dict']['present_t'] = s_action + \
s_action[-1] + 'ing ' + a_type
# word ends with a 'e'
# eg: remove -> removing
if s_action[-1] == 'e':
form_args['action_dict']['present_t'] = s_action[:-1] \
+ 'ing ' + a_type
if s_action == 'start':
form_args['names'].append('prep_start')
elif s_action == 'configure':
form_args['names'].pop()
form_args['names'].append('get_configure')
form_args['names'].append('save_configure')
form_args['names'].append('restart_tools')
if action == 'add':
form = AddForm
forms = ['ADD', 'ADDOPTIONS', 'CHOOSETOOLS']
form_args['name'] = "Add plugins"
form_args['name'] += "\t"*6 + "^Q to quit"
elif action == "inventory":
form = InventoryToolsForm
forms = ['INVENTORY']
form_args = {'color': "STANDOUT", 'name': "Inventory of tools"}
elif action == 'logs':
form = LogsForm<|fim▁hole|> elif action == 'services_core':
form = ServicesForm
forms = ['SERVICES']
form_args = {'color': "STANDOUT",
'name': "Core Services",
'core': True}
elif action == 'services':
form = ServicesForm
forms = ['SERVICES']
form_args = {'color': "STANDOUT",
'name': "Plugin Services",
'core': False}
elif action == 'services_external':
form = ServicesForm
forms = ['SERVICES']
form_args = {'color': "STANDOUT",
'name': "External Services",
'core': False,
'external': True}
elif action == "inventory_core":
form = InventoryCoreToolsForm
forms = ['COREINVENTORY']
form_args = {'color': "STANDOUT",
'name': "Inventory of core tools"}
form_args['name'] += "\t"*8 + "^T to toggle main"
if s_action in self.view_togglable:
form_args['name'] += "\t"*8 + "^V to toggle group view"
try:
self.remove_forms(forms)
thr = Thread(target=self.add_form, args=(),
kwargs={'form': form,
'form_name': forms[0],
'form_args': form_args})
thr.start()
while thr.is_alive():
npyscreen.notify('Please wait, loading form...',
title='Loading')
time.sleep(1)
except Exception as e: # pragma: no cover
pass
return
def switch_tutorial(self, action):
""" Tutorial forms """
if action == "background":
self.parentApp.change_form('TUTORIALBACKGROUND')
elif action == "terminology":
self.parentApp.change_form('TUTORIALTERMINOLOGY')
elif action == "setup":
self.parentApp.change_form('TUTORIALGETTINGSETUP')
elif action == "building_cores":
self.parentApp.change_form('TUTORIALBUILDINGCORES')
elif action == "starting_cores":
self.parentApp.change_form('TUTORIALSTARTINGCORES')
elif action == "adding_plugins":
self.parentApp.change_form('TUTORIALADDINGPLUGINS')
elif action == "adding_files":
self.parentApp.change_form('TUTORIALADDINGFILES')
elif action == "basic_troubleshooting":
self.parentApp.change_form('TUTORIALTROUBLESHOOTING')
return
def system_commands(self, action):
""" Perform system commands """
if action == 'backup':
status = self.api_action.backup()
if status[0]:
notify_confirm("Vent backup successful")
else:
notify_confirm("Vent backup could not be completed")
elif action == 'configure':
form_args = {'name': 'Change vent configuration',
'get_configure': self.api_action.get_configure,
'save_configure': self.api_action.save_configure,
'restart_tools': self.api_action.restart_tools,
'vent_cfg': True}
add_kargs = {'form': EditorForm,
'form_name': 'CONFIGUREVENT',
'form_args': form_args}
self.add_form(**add_kargs)
elif action == "reset":
okay = npyscreen.notify_ok_cancel(
"This factory reset will remove ALL of Vent's user data, "
"containers, and images. Are you sure?",
title="Confirm system command")
if okay:
status = self.api_action.reset()
if status[0]:
notify_confirm("Vent reset complete. "
"Press OK to exit Vent Manager console.")
else:
notify_confirm(status[1])
MainForm.exit()
elif action == "gpu":
gpu = Gpu(pull=True)
if gpu[0]:
notify_confirm("GPU detection successful. "
"Found: " + gpu[1])
else:
if gpu[1] == "Unknown":
notify_confirm("Unable to detect GPUs, try `make gpu` "
"from the vent repository directory. "
"Error: " + str(gpu[2]))
else:
notify_confirm("No GPUs detected.")
elif action == 'restore':
backup_dir_home = os.path.expanduser('~')
backup_dirs = [f for f in os.listdir(backup_dir_home) if
f.startswith('.vent-backup')]
form_args = {'restore': self.api_action.restore,
'dirs': backup_dirs,
'name': "Pick a version to restore from" + "\t"*8 +
"^T to toggle main",
'color': 'CONTROL'}
add_kargs = {'form': BackupForm,
'form_name': 'CHOOSEBACKUP',
'form_args': form_args}
self.add_form(**add_kargs)
elif action == "swarm":
# !! TODO
# add notify_cancel_ok popup once implemented
pass
elif action == "upgrade":
# !! TODO
# add notify_cancel_ok popup once implemented
pass
# deal with all network tap actions
elif 'ntap' in action:
# check if the tool is installed, built, and running
output = self.api_action.tool_status_output('network_tap')
# create a dict with substring as keys and forms as values
ntap_form = {'create': CreateNTap,
'delete': DeleteNTap,
'list': ListNTap,
'nics': NICsNTap,
'start': StartNTap,
'stop': StopNTap}
if output[0]:
if output[1]:
notify_confirm(output[1])
else:
# action regarding ntap come in the form of 'ntapcreate'
# 'ntapdelete', etc
tap_action = action.split('ntap')[1]
form_args = {'color': 'CONTROL',
'name': 'Network Tap Interface ' +
tap_action + "\t"*6 +
'^T to toggle main'}
self.add_form(ntap_form[tap_action], "Network Tap " +
tap_action.title(), form_args)
return
def create(self):
""" Override method for creating FormBaseNewWithMenu form """
try:
self.api_action = Action()
except DockerException as de: # pragma: no cover
notify_confirm(str(de),
title="Docker Error",
form_color='DANGER',
wrap=True)
MainForm.exit()
self.add_handlers({"^T": self.help_form, "^Q": MainForm.exit})
# all forms that can toggle view by group
self.view_togglable = ['inventory', 'remove', 'update', 'enable',
'disable', 'build']
#######################
# MAIN SCREEN WIDGETS #
#######################
self.addfield = self.add(npyscreen.TitleFixedText, name='Date:',
labelColor='DEFAULT', value=Timestamp())
self.addfield2 = self.add(npyscreen.TitleFixedText, name='Uptime:',
labelColor='DEFAULT', value=Uptime())
self.cpufield = self.add(npyscreen.TitleFixedText,
name='Logical CPUs:',
labelColor='DEFAULT', value=Cpu())
self.gpufield = self.add(npyscreen.TitleFixedText, name='GPUs:',
labelColor='DEFAULT', value=Gpu()[1])
self.location = self.add(npyscreen.TitleFixedText,
name='User Data:',
value=PathDirs().meta_dir,
labelColor='DEFAULT')
self.file_drop = self.add(npyscreen.TitleFixedText,
name='File Drop:',
value=DropLocation()[1],
labelColor='DEFAULT')
self.addfield3 = self.add(npyscreen.TitleFixedText, name='Containers:',
labelColor='DEFAULT',
value="0 "+" running")
self.addfield4 = self.add(npyscreen.TitleFixedText, name='Status:',
labelColor='CAUTION',
value="Idle")
self.addfield5 = self.add(npyscreen.TitleFixedText,
name='Core Tools:', labelColor='DANGER',
value="Not built")
self.addfield6 = self.add(npyscreen.TitleFixedText,
name='Plugin Tools:', labelColor='DEFAULT',
value="Not built")
self.addfield7 = self.add(npyscreen.TitleFixedText, name='Jobs:',
value="0 jobs running (0 tool containers),"
" 0 completed jobs", labelColor='DEFAULT')
self.multifield1 = self.add(npyscreen.MultiLineEdit, max_height=22,
editable=False, value="""
'.,
'b *
'$ #.
$: #:
*# @):
:@,@): ,.**:'
, :@@*: ..**'
'#o. .:(@'.@*"'
'bq,..:,@@*' ,*
,p$q8,:@)' .p*'
' '@@Pp@@*'
Y7'.'
:@):.
.:@:'.
.::(@:.
_
__ _____ _ __ | |_
\ \ / / _ \ '_ \| __|
\ V / __/ | | | |_
\_/ \___|_| |_|\__|
""")
################
# MENU OPTIONS #
################
# Core Tools Menu Items
self.m2 = self.add_menu(name="Core Tools", shortcut="c")
self.m2.addItem(text='Add all latest core tools',
onSelect=MainForm.core_tools,
arguments=['install'], shortcut='i')
self.m2.addItem(text='Build core tools',
onSelect=self.perform_action,
arguments=['build_core'], shortcut='b')
self.m2.addItem(text='Clean core tools',
onSelect=self.perform_action,
arguments=['clean_core'], shortcut='c')
self.m2.addItem(text='Configure core tools',
onSelect=self.perform_action,
arguments=['configure_core'], shortcut='t')
self.m2.addItem(text='Disable core tools',
onSelect=self.perform_action,
arguments=['disable_core'], shortcut='d')
self.m2.addItem(text='Enable core tools',
onSelect=self.perform_action,
arguments=['enable_core'], shortcut='e')
self.m2.addItem(text='Inventory of core tools',
onSelect=self.perform_action,
arguments=['inventory_core'], shortcut='v')
self.m2.addItem(text='Remove core tools',
onSelect=self.perform_action,
arguments=['remove_core'], shortcut='r')
self.m2.addItem(text='Start core tools',
onSelect=self.perform_action,
arguments=['start_core'], shortcut='s')
self.m2.addItem(text='Stop core tools',
onSelect=self.perform_action,
arguments=['stop_core'], shortcut='p')
self.m2.addItem(text='Update core tools',
onSelect=self.perform_action,
arguments=['update_core'], shortcut='u')
# Plugin Menu Items
self.m3 = self.add_menu(name="Plugins", shortcut="p")
self.m3.addItem(text='Add new plugin',
onSelect=self.perform_action,
arguments=['add'], shortcut='a')
self.m3.addItem(text='Build plugin tools',
onSelect=self.perform_action,
arguments=['build'], shortcut='b')
self.m3.addItem(text='Clean plugin tools',
onSelect=self.perform_action,
arguments=['clean'], shortcut='c')
self.m3.addItem(text='Configure plugin tools',
onSelect=self.perform_action,
arguments=['configure'], shortcut='t')
self.m3.addItem(text='Disable plugin tools',
onSelect=self.perform_action,
arguments=['disable'], shortcut='d')
self.m3.addItem(text='Enable plugin tools',
onSelect=self.perform_action,
arguments=['enable'], shortcut='e')
self.m3.addItem(text='Inventory of installed plugins',
onSelect=self.perform_action,
arguments=['inventory'], shortcut='i')
self.m3.addItem(text='Remove plugins',
onSelect=self.perform_action,
arguments=['remove'], shortcut='r')
self.m3.addItem(text='Start plugin tools',
onSelect=self.perform_action,
arguments=['start'], shortcut='s')
self.m3.addItem(text='Stop plugin tools',
onSelect=self.perform_action,
arguments=['stop'], shortcut='p')
self.m3.addItem(text='Update plugins',
onSelect=self.perform_action,
arguments=['update'], shortcut='u')
# Log Menu Items
self.m4 = self.add_menu(name="Logs", shortcut="l")
self.m4.addItem(text='Get container logs', arguments=['logs'],
onSelect=self.perform_action)
# Services Menu Items
self.m5 = self.add_menu(name="Services Running", shortcut='s')
self.m5.addItem(text='Core Services', onSelect=self.perform_action,
arguments=['services_core'], shortcut='c')
self.m5.addItem(text='External Services', onSelect=self.perform_action,
arguments=['services_external'], shortcut='e')
self.m5.addItem(text='Plugin Services',
onSelect=self.perform_action,
arguments=['services'], shortcut='p')
# System Commands Menu Items
self.m6 = self.add_menu(name="System Commands", shortcut='y')
self.m6.addItem(text='Backup', onSelect=self.system_commands,
arguments=['backup'], shortcut='b')
self.m6.addItem(text='Change vent configuration',
onSelect=self.system_commands, arguments=['configure'],
shortcut='c')
self.m6.addItem(text='Detect GPUs', onSelect=self.system_commands,
arguments=['gpu'], shortcut='g')
self.m6.addItem(text='Enable Swarm Mode (To Be Implemented...)',
onSelect=self.system_commands,
arguments=['swarm'], shortcut='s')
self.m6.addItem(text='Factory reset', onSelect=self.system_commands,
arguments=['reset'], shortcut='r')
self.s6 = self.m6.addNewSubmenu(name='Network Tap Interface',
shortcut='n')
self.m6.addItem(text='Restore', onSelect=self.system_commands,
arguments=['restore'], shortcut='t')
self.m6.addItem(text='Upgrade (To Be Implemented...)',
onSelect=self.system_commands,
arguments=['upgrade'], shortcut='u')
self.s6.addItem(text='Create', onSelect=self.system_commands,
shortcut='c', arguments=['ntapcreate'])
self.s6.addItem(text='Delete', onSelect=self.system_commands,
shortcut='d', arguments=['ntapdelete'])
self.s6.addItem(text='List', onSelect=self.system_commands,
shortcut='l', arguments=['ntaplist'])
self.s6.addItem(text='NICs', onSelect=self.system_commands,
shortcut='n', arguments=['ntapnics'])
self.s6.addItem(text='Start', onSelect=self.system_commands,
shortcut='s', arguments=['ntapstart'])
self.s6.addItem(text='Stop', onSelect=self.system_commands,
shortcut='t', arguments=['ntapstop'])
# Tutorial Menu Items
self.m7 = self.add_menu(name="Tutorials", shortcut="t")
self.s1 = self.m7.addNewSubmenu(name="About Vent", shortcut='v')
self.s1.addItem(text="Background", onSelect=self.switch_tutorial,
arguments=['background'], shortcut='b')
self.s1.addItem(text="Terminology", onSelect=self.switch_tutorial,
arguments=['terminology'], shortcut='t')
self.s1.addItem(text="Getting Setup", onSelect=self.switch_tutorial,
arguments=['setup'], shortcut='s')
self.s2 = self.m7.addNewSubmenu(name="Working with Cores",
shortcut='c')
self.s2.addItem(text="Building Cores", onSelect=self.switch_tutorial,
arguments=['building_cores'], shortcut='b')
self.s2.addItem(text="Starting Cores", onSelect=self.switch_tutorial,
arguments=['starting_cores'], shortcut='c')
self.s3 = self.m7.addNewSubmenu(name="Working with Plugins",
shortcut='p')
self.s3.addItem(text="Adding Plugins", onSelect=self.switch_tutorial,
arguments=['adding_plugins'], shortcut='a')
self.s4 = self.m7.addNewSubmenu(name="Files", shortcut='f')
self.s4.addItem(text="Adding Files", onSelect=self.switch_tutorial,
arguments=['adding_files'], shortcut='a')
self.s5 = self.m7.addNewSubmenu(name="Help", shortcut='s')
self.s5.addItem(text="Basic Troubleshooting",
onSelect=self.switch_tutorial,
arguments=['basic_troubleshooting'], shortcut='t')
def help_form(self, *args, **keywords):
""" Toggles to help """
self.parentApp.change_form("HELP")<|fim▁end|> | forms = ['LOGS']
form_args = {'color': "STANDOUT", 'name': "Logs"} |
<|file_name|>vertex_buffer.hpp<|end_file_name|><|fim▁begin|>#pragma once
<|fim▁hole|>#include <r4/vector.hpp>
#include <morda/render/vertex_buffer.hpp>
#include "opengl_buffer.hpp"
namespace morda{ namespace render_opengl2{
class vertex_buffer : public morda::vertex_buffer, public opengl_buffer{
public:
const GLint numComponents;
const GLenum type;
vertex_buffer(utki::span<const r4::vector4<float>> vertices);
vertex_buffer(utki::span<const r4::vector3<float>> vertices);
vertex_buffer(utki::span<const r4::vector2<float>> vertices);
vertex_buffer(utki::span<const float> vertices);
vertex_buffer(const vertex_buffer&) = delete;
vertex_buffer& operator=(const vertex_buffer&) = delete;
private:
void init(GLsizeiptr size, const GLvoid* data);
};
}}<|fim▁end|> | #include <utki/span.hpp>
|
<|file_name|>device.rs<|end_file_name|><|fim▁begin|>use std::str;
use std::ffi::{CStr, OsStr};
use std::marker::PhantomData;
use std::path::Path;
use std::str::FromStr;
use libc::{c_char, dev_t};
use ::context::Context;
use ::handle::Handle;
pub unsafe fn from_raw(device: *mut ::ffi::udev_device) -> Device {
::ffi::udev_ref(::ffi::udev_device_get_udev(device));
Device { device: device }
}
/// A structure that provides access to sysfs/kernel devices.
pub struct Device {
device: *mut ::ffi::udev_device,
}
impl Drop for Device {
fn drop(&mut self) {
unsafe {
let udev = ::ffi::udev_device_get_udev(self.device);
::ffi::udev_device_unref(self.device);
::ffi::udev_unref(udev);
}
}
}
#[doc(hidden)]
impl Handle<::ffi::udev_device> for Device {
fn as_ptr(&self) -> *mut ::ffi::udev_device {
self.device
}
}
impl Device {
/// Creates a device for a given syspath.
///
/// The `syspath` parameter should be a path to the device file within the `sysfs` file system,
/// e.g., `/sys/devices/virtual/tty/tty0`.
pub fn from_syspath(context: &Context, syspath: &Path) -> ::Result<Self> {
let syspath = try!(::util::os_str_to_cstring(syspath));
Ok(unsafe {
from_raw(try_alloc!(
::ffi::udev_device_new_from_syspath(context.as_ptr(), syspath.as_ptr())
))
})
}
/// Checks whether the device has already been handled by udev.
///
/// When a new device is connected to the system, udev initializes the device by setting
/// permissions, renaming network devices, and possibly other initialization routines. This
/// method returns `true` if udev has performed all of its work to initialize this device.
///
/// This method only applies to devices with device nodes or network interfaces. All other
/// devices return `true` by default.
pub fn is_initialized(&self) -> bool {
unsafe {
::ffi::udev_device_get_is_initialized(self.device) > 0
}
}
/// Gets the device's major/minor number.
pub fn devnum(&self) -> Option<dev_t> {
match unsafe { ::ffi::udev_device_get_devnum(self.device) } {
0 => None,
n => Some(n),
}
}
/// Returns the syspath of the device.
///
/// The path is an absolute path and includes the sys mount point. For example, the syspath for
/// `tty0` could be `/sys/devices/virtual/tty/tty0`, which includes the sys mount point,
/// `/sys`.
pub fn syspath(&self) -> Option<&Path> {
::util::ptr_to_path(unsafe {
::ffi::udev_device_get_syspath(self.device)
})
}
/// Returns the kernel devpath value of the device.
///
/// The path does not contain the sys mount point, but does start with a `/`. For example, the
/// devpath for `tty0` could be `/devices/virtual/tty/tty0`.
pub fn devpath(&self) -> Option<&OsStr> {
::util::ptr_to_os_str(unsafe {
::ffi::udev_device_get_devpath(self.device)
})
}
/// Returns the path to the device node belonging to the device.
///
/// The path is an absolute path and starts with the device directory. For example, the device
/// node for `tty0` could be `/dev/tty0`.
pub fn devnode(&self) -> Option<&Path> {
::util::ptr_to_path(unsafe {
::ffi::udev_device_get_devnode(self.device)
})
}
/// Returns the parent of the device.
pub fn parent(&self) -> Option<Device> {
let ptr = unsafe { ::ffi::udev_device_get_parent(self.device) };
if !ptr.is_null() {
unsafe {
::ffi::udev_device_ref(ptr);
Some(from_raw(ptr))
}
}
else {
None
}
}
/// Returns the subsystem name of the device.
///
/// The subsystem name is a string that indicates which kernel subsystem the device belongs to.
/// Examples of subsystem names are `tty`, `vtconsole`, `block`, `scsi`, and `net`.
pub fn subsystem(&self) -> Option<&OsStr> {
::util::ptr_to_os_str(unsafe {
::ffi::udev_device_get_subsystem(self.device)
})
}
/// Returns the kernel device name for the device.
///
/// The sysname is a string that differentiates the device from others in the same subsystem.
/// For example, `tty0` is the sysname for a TTY device that differentiates it from others,
/// such as `tty1`.
pub fn sysname(&self) -> Option<&OsStr> {
::util::ptr_to_os_str(unsafe {
::ffi::udev_device_get_sysname(self.device)
})
}
/// Returns the instance number of the device.
///
/// The instance number is used to differentiate many devices of the same type. For example,
/// `/dev/tty0` and `/dev/tty1` are both TTY devices but have instance numbers of 0 and 1,
/// respectively.
///
/// Some devices don't have instance numbers, such as `/dev/console`, in which case the method
/// returns `None`.
pub fn sysnum(&self) -> Option<usize> {
let ptr = unsafe { ::ffi::udev_device_get_sysnum(self.device) };
if !ptr.is_null() {
match str::from_utf8(unsafe { CStr::from_ptr(ptr) }.to_bytes()) {
Ok(s) => FromStr::from_str(s).ok(),
Err(_) => None,
}
}
else {
None
}
}
/// Returns the devtype name of the device.
pub fn devtype(&self) -> Option<&OsStr> {
::util::ptr_to_os_str(unsafe { ::ffi::udev_device_get_devtype(self.device) })
}
/// Returns the name of the kernel driver attached to the device.
pub fn driver(&self) -> Option<&OsStr> {
::util::ptr_to_os_str(unsafe { ::ffi::udev_device_get_driver(self.device) })
}
/// Retrieves the value of a device property.
pub fn property_value<T: AsRef<OsStr>>(&self, property: T) -> Option<&OsStr> {
match ::util::os_str_to_cstring(property) {
Ok(prop) => {
::util::ptr_to_os_str(unsafe {
::ffi::udev_device_get_property_value(self.device, prop.as_ptr())
})
},
Err(_) => None,
}
}
/// Retrieves the value of a device attribute.
pub fn attribute_value<T: AsRef<OsStr>>(&self, attribute: T) -> Option<&OsStr> {
match ::util::os_str_to_cstring(attribute) {
Ok(attr) => {
::util::ptr_to_os_str(unsafe {
::ffi::udev_device_get_sysattr_value(self.device, attr.as_ptr())
})
},
Err(_) => None,
}
}
/// Sets the value of a device attribute.
pub fn set_attribute_value<T: AsRef<OsStr>, U: AsRef<OsStr>>(&mut self, attribute: T, value: U) -> ::Result<()> {
let attribute = try!(::util::os_str_to_cstring(attribute));
let value = try!(::util::os_str_to_cstring(value));
::util::errno_to_result(unsafe {
::ffi::udev_device_set_sysattr_value(self.device, attribute.as_ptr(), value.as_ptr() as *mut c_char)
})
}
/// Returns an iterator over the device's properties.
///
/// ## Example
///
/// This example prints out all of a device's properties:
///
/// ```no_run
/// # use std::path::Path;
/// # let mut context = libudev::Context::new().unwrap();
/// # let device = libudev::Device::from_syspath(&context, Path::new("/sys/devices/virtual/tty/tty0")).unwrap();
/// for property in device.properties() {
/// println!("{:?} = {:?}", property.name(), property.value());
/// }
/// ```
pub fn properties(&self) -> Properties {
Properties {
_device: PhantomData,
entry: unsafe { ::ffi::udev_device_get_properties_list_entry(self.device) },
}
}
/// Returns an iterator over the device's attributes.
///
/// ## Example
///
/// This example prints out all of a device's attributes:
///
/// ```no_run
/// # use std::path::Path;
/// # let mut context = libudev::Context::new().unwrap();
/// # let device = libudev::Device::from_syspath(&context, Path::new("/sys/devices/virtual/tty/tty0")).unwrap();
/// for attribute in device.attributes() {
/// println!("{:?} = {:?}", attribute.name(), attribute.value());
/// }
/// ```
pub fn attributes(&self) -> Attributes {
Attributes {
device: self,
entry: unsafe { ::ffi::udev_device_get_sysattr_list_entry(self.device) },
}
}
}
/// Iterator over a device's properties.
pub struct Properties<'a> {
_device: PhantomData<&'a Device>,
entry: *mut ::ffi::udev_list_entry,
}
impl<'a> Iterator for Properties<'a> {
type Item = Property<'a>;
fn next(&mut self) -> Option<Property<'a>> {
if !self.entry.is_null() {
unsafe {
let name = ::util::ptr_to_os_str_unchecked(::ffi::udev_list_entry_get_name(self.entry));
let value = ::util::ptr_to_os_str_unchecked(::ffi::udev_list_entry_get_value(self.entry));
self.entry = ::ffi::udev_list_entry_get_next(self.entry);
Some(Property {
name: name,
value: value,
})
}
}
else {
None
}
}
fn size_hint(&self) -> (usize, Option<usize>) {
(0, None)
}
}
/// A device property.
pub struct Property<'a> {
name: &'a OsStr,
value: &'a OsStr,
}
impl<'a> Property<'a> {
/// Returns the property name.
pub fn name(&self) -> &OsStr {
self.name
}
/// Returns the property value.
pub fn value(&self) -> &OsStr {
self.value
}
}
/// Iterator over a device's attributes.
pub struct Attributes<'a> {
device: &'a Device,
entry: *mut ::ffi::udev_list_entry,
}
impl<'a> Iterator for Attributes<'a> {
type Item = Attribute<'a>;
fn next(&mut self) -> Option<Attribute<'a>> {
if !self.entry.is_null() {
let name = unsafe { ::util::ptr_to_os_str_unchecked(::ffi::udev_list_entry_get_name(self.entry)) };
self.entry = unsafe { ::ffi::udev_list_entry_get_next(self.entry) };
Some(Attribute {
device: self.device,
name: name,
})<|fim▁hole|> }
}
fn size_hint(&self) -> (usize, Option<usize>) {
(0, None)
}
}
/// A device attribute.
pub struct Attribute<'a> {
device: &'a Device,
name: &'a OsStr,
}
impl<'a> Attribute<'a> {
/// Returns the attribute name.
pub fn name(&self) -> &OsStr {
self.name
}
/// Returns the attribute value.
pub fn value(&self) -> Option<&OsStr> {
self.device.attribute_value(self.name)
}
}<|fim▁end|> | }
else {
None |
<|file_name|>doc.go<|end_file_name|><|fim▁begin|>// Copyright (c) 2016 Tigera, Inc. All rights reserved.
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/*
Package client implements the northbound client used to manage Calico configuration.
This client is the main entry point for applications that are managing or querying
Calico configuration.
This client provides a typed interface for managing different resource types. The
definitions for each resource type are defined in the following package:
github.com/projectcalico/libcalico-go/lib/api
The client has a number of methods that return interfaces for managing:
- BGP Peer resources
- Policy resources
- IP Pool resources
- Host endpoint resources
- Workload endpoint resources
- Profile resources
- IP Address Management (IPAM)
See [resource definitions](http://docs.projectcalico.org/latest/reference/calicoctl/resources/) for details about the set of management commands for each
resource type.
The resource management interfaces have a common set of commands to create, delete,
update and retrieve resource instances. For example, an application using this
client to manage host endpoint resources would create an instance of this client, create a
new HostEndpoints interface and call the appropriate methods on that interface. For example:
// NewFromEnv() creates a new client and defaults to access an etcd backend datastore at<|fim▁hole|>
// Obtain the interface for managing host endpoint resources.
hostendpoints := client.HostEndpoints()
// Create a new host endpoint. All Create() methods return an error of type
// common.ErrorResourceAlreadyExists if the resource specified by its
// unique identifiers already exists.
hostEndpoint, err := hostEndpoints.Create(&api.HostEndpoint{
Metadata: api.HostEndpointMetadata{
Name: "endpoint1",
Hostname: "hostname1",
},
Spec: api.HostEndpointSpec{
InterfaceName: "eth0"
},
}
// Update an existing host endpoint. All Update() methods return an error of type
// common.ErrorResourceDoesNotExist if the resource specified by its
// unique identifiers does not exist.
hostEndpoint, err = hostEndpoints.Update(&api.HostEndpoint{
Metadata: api.HostEndpointMetadata{
Name: "endpoint1",
Hostname: "hostname1",
},
Spec: api.HostEndpointSpec{
InterfaceName: "eth0",
Profiles: []string{"profile1"},
},
}
// Apply (update or create) a hostEndpoint. All Apply() methods will update a resource
// if it already exists, and will create a new resource if it does not.
hostEndpoint, err = hostEndpoints.Apply(&api.HostEndpoint{
Metadata: api.HostEndpointMetadata{
Name: "endpoint1",
Hostname: "hostname1",
},
Spec: api.HostEndpointSpec{
InterfaceName: "eth1",
Profiles: []string{"profile1"},
},
}
// Delete a hostEndpoint. All Delete() methods return an error of type
// common.ErrorResourceDoesNotExist if the resource specified by its
// unique identifiers does not exist.
hostEndpoint, err = hostEndpoints.Delete(api.HostEndpointMetadata{
Name: "endpoint1",
Hostname: "hostname1",
})
// Get a hostEndpoint. All Get() methods return an error of type
// common.ErrorResourceDoesNotExist if the resource specified by its
// unique identifiers does not exist.
hostEndpoint, err = hostEndpoints.Get(api.HostEndpointMetadata{
Name: "endpoint1",
Hostname: "hostname1",
})
// List all hostEndpoints. All List() methods take a (sub-)set of the resource
// identifiers and return the corresponding list resource type that has an
// Items field containing a list of resources that match the supplied
// identifiers.
hostEndpointList, err := hostEndpoints.List(api.HostEndpointMetadata{})
*/
package client<|fim▁end|> | // http://127.0.0.1:2379. For alternative backend access details, set the appropriate
// ENV variables specified in the CalicoAPIConfigSpec structure.
client, err := client.NewFromEnv() |
<|file_name|>tables.go<|end_file_name|><|fim▁begin|>/*
Copyright 2016 Google Inc. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Tables about what Buildifier can and cannot edit.
// Perhaps eventually this will be
// derived from the BUILD encyclopedia.
package tables
// IsLabelArg: a named argument to a rule call is considered to have a value
// that can be treated as a label or list of labels if the name
// is one of these names. There is a separate blacklist for
// rule-specific exceptions.
var IsLabelArg = map[string]bool{
"app_target": true,
"appdir": true,
"base_package": true,
"build_deps": true,
"cc_deps": true,
"ccdeps": true,
"common_deps": true,
"compile_deps": true,
"compiler": true,
"data": true,
"default_visibility": true,
"dep": true,
"deps": true,
"deps_java": true,
"dont_depend_on": true,
"env_deps": true,
"envscripts": true,
"exported_deps": true,
"exports": true,
"externs_list": true,
"files": true,
"globals": true,
"implementation": true,
"implements": true,
"includes": true,
"interface": true,
"jar": true,
"jars": true,
"javadeps": true,
"lib_deps": true,
"library": true,
"malloc": true,
"model": true,
"mods": true,
"module_deps": true,
"module_target": true,
"of": true,
"plugins": true,
"proto_deps": true,
"proto_target": true,
"protos": true,
"resource": true,
"resources": true,
"runtime_deps": true,
"scope": true,
"shared_deps": true,
"similar_deps": true,
"source_jar": true,
"src": true,
"srcs": true,
"stripped_targets": true,
"suites": true,
"swigdeps": true,
"target": true,
"target_devices": true,
"target_platforms": true,
"template": true,
"test": true,
"tests": true,
"tests_deps": true,
"tool": true,
"tools": true,
"visibility": true,
}
// LabelBlacklist is the list of call arguments that cannot be
// shortened, because they are not interpreted using the same
// rules as for other labels.
var LabelBlacklist = map[string]bool{
// Shortening this can cause visibility checks to fail.
"package_group.includes": true,
}
// By default, edit.types.IsList consults lang.TypeOf to determine if an arg is a list.
// You may override this using IsListArg. Specifying a name here overrides any value
// in lang.TypeOf.
var IsListArg = map[string]bool{}
// IsSortableListArg: a named argument to a rule call is considered to be a sortable list
// if the name is one of these names. There is a separate blacklist for
// rule-specific exceptions.
var IsSortableListArg = map[string]bool{
"cc_deps": true,
"common_deps": true,
"compile_deps": true,
"configs": true,
"constraints": true,
"data": true,
"default_visibility": true,
"deps": true,
"deps_java": true,
"exported_deps": true,
"exports": true,
"filegroups": true,
"files": true,
"hdrs": true,
"imports": true,
"includes": true,
"inherits": true,
"javadeps": true,
"lib_deps": true,
"module_deps": true,
"out": true,
"outs": true,
"packages": true,
"plugin_modules": true,
"proto_deps": true,
"protos": true,
"pubs": true,
"resources": true,
"runtime_deps": true,
"shared_deps": true,
"similar_deps": true,
"srcs": true,
"swigdeps": true,
"swig_includes": true,
"tags": true,
"tests": true,<|fim▁hole|>}
// SortableBlacklist records specific rule arguments that must not be reordered.
var SortableBlacklist = map[string]bool{
"genrule.outs": true,
"genrule.srcs": true,
}
// SortableWhitelist records specific rule arguments that are guaranteed
// to be reorderable, because bazel re-sorts the list itself after reading the BUILD file.
var SortableWhitelist = map[string]bool{
"cc_inc_library.hdrs": true,
"cc_library.hdrs": true,
"java_library.srcs": true,
"java_library.resources": true,
"java_binary.srcs": true,
"java_binary.resources": true,
"java_test.srcs": true,
"java_test.resources": true,
"java_library.constraints": true,
"java_import.constraints": true,
}
// NamePriority maps an argument name to its sorting priority.
//
// NOTE(bazel-team): These are the old buildifier rules. It is likely that this table
// will change, perhaps swapping in a separate table for each call,
// derived from the order used in the Build Encyclopedia.
var NamePriority = map[string]int{
"name": -99,
"gwt_name": -98,
"package_name": -97,
"visible_node_name": -96, // for boq_initial_css_modules and boq_jswire_test_suite
"size": -95,
"timeout": -94,
"testonly": -93,
"src": -92,
"srcdir": -91,
"srcs": -90,
"out": -89,
"outs": -88,
"hdrs": -87,
"has_services": -86, // before api versions, for proto
"include": -85, // before exclude, for glob
"of": -84, // for check_dependencies
"baseline": -83, // for searchbox_library
// All others sort here, at 0.
"destdir": 1,
"exports": 2,
"runtime_deps": 3,
"deps": 4,
"implementation": 5,
"implements": 6,
"alwayslink": 7,
// default condition in a dictionary literal passed to select should be
// the last one by convention.
"//conditions:default": 50,
}
var StripLabelLeadingSlashes = false
var ShortenAbsoluteLabelsToRelative = false
// OverrideTables allows a user of the build package to override the special-case rules. The user-provided tables replace the built-in tables.
func OverrideTables(labelArg, blacklist, listArg, sortableListArg, sortBlacklist, sortWhitelist map[string]bool, namePriority map[string]int, stripLabelLeadingSlashes, shortenAbsoluteLabelsToRelative bool) {
IsLabelArg = labelArg
LabelBlacklist = blacklist
IsListArg = listArg
IsSortableListArg = sortableListArg
SortableBlacklist = sortBlacklist
SortableWhitelist = sortWhitelist
NamePriority = namePriority
StripLabelLeadingSlashes = stripLabelLeadingSlashes
ShortenAbsoluteLabelsToRelative = shortenAbsoluteLabelsToRelative
}
// MergeTables allows a user of the build package to override the special-case rules. The user-provided tables are merged into the built-in tables.
func MergeTables(labelArg, blacklist, listArg, sortableListArg, sortBlacklist, sortWhitelist map[string]bool, namePriority map[string]int, stripLabelLeadingSlashes, shortenAbsoluteLabelsToRelative bool) {
for k, v := range labelArg {
IsLabelArg[k] = v
}
for k, v := range blacklist {
LabelBlacklist[k] = v
}
for k, v := range listArg {
IsListArg[k] = v
}
for k, v := range sortableListArg {
IsSortableListArg[k] = v
}
for k, v := range sortBlacklist {
SortableBlacklist[k] = v
}
for k, v := range sortWhitelist {
SortableWhitelist[k] = v
}
for k, v := range namePriority {
NamePriority[k] = v
}
StripLabelLeadingSlashes = stripLabelLeadingSlashes || StripLabelLeadingSlashes
ShortenAbsoluteLabelsToRelative = shortenAbsoluteLabelsToRelative || ShortenAbsoluteLabelsToRelative
}<|fim▁end|> | "tools": true,
"to_start_extensions": true,
"visibility": true, |
<|file_name|>poly.rs<|end_file_name|><|fim▁begin|>use data::IntPoint;
use draw::RGB;
#[derive(Debug, Clone)]
pub struct Poly {
pub contours: Vec<Vec<IntPoint>>,
pub color: RGB,
}
impl Poly {
#[inline]
pub fn new(contours: Vec<Vec<IntPoint>>, color: RGB) -> Self {
Poly {
contours: contours,<|fim▁hole|> color: color,
}
}
}<|fim▁end|> | |
<|file_name|>SessionsInvalidationTestGenerated.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2010-2020 JetBrains s.r.o. and Kotlin Programming Language contributors.
* Use of this source code is governed by the Apache 2.0 license that can be found in the license/LICENSE.txt file.
*/
package org.jetbrains.kotlin.idea.fir.low.level.api.sessions;
import com.intellij.testFramework.TestDataPath;
import org.jetbrains.kotlin.test.JUnit3RunnerWithInners;
import org.jetbrains.kotlin.test.KotlinTestUtils;
import org.jetbrains.kotlin.test.TestMetadata;
import org.junit.runner.RunWith;<|fim▁hole|>
import java.io.File;
import java.util.regex.Pattern;
/** This class is generated by {@link org.jetbrains.kotlin.generators.tests.TestsPackage}. DO NOT MODIFY MANUALLY */
@SuppressWarnings("all")
@TestMetadata("idea/idea-frontend-fir/idea-fir-low-level-api/testdata/sessionInvalidation")
@TestDataPath("$PROJECT_ROOT")
@RunWith(JUnit3RunnerWithInners.class)
public class SessionsInvalidationTestGenerated extends AbstractSessionsInvalidationTest {
private void runTest(String testDataFilePath) throws Exception {
KotlinTestUtils.runTest(this::doTest, this, testDataFilePath);
}
public void testAllFilesPresentInSessionInvalidation() throws Exception {
KotlinTestUtils.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("idea/idea-frontend-fir/idea-fir-low-level-api/testdata/sessionInvalidation"), Pattern.compile("^([^\\.]+)$"), null, false);
}
@TestMetadata("binaryTree")
public void testBinaryTree() throws Exception {
runTest("idea/idea-frontend-fir/idea-fir-low-level-api/testdata/sessionInvalidation/binaryTree/");
}
@TestMetadata("binaryTreeNoInvalidated")
public void testBinaryTreeNoInvalidated() throws Exception {
runTest("idea/idea-frontend-fir/idea-fir-low-level-api/testdata/sessionInvalidation/binaryTreeNoInvalidated/");
}
@TestMetadata("binaryTreeWithAdditionalEdge")
public void testBinaryTreeWithAdditionalEdge() throws Exception {
runTest("idea/idea-frontend-fir/idea-fir-low-level-api/testdata/sessionInvalidation/binaryTreeWithAdditionalEdge/");
}
@TestMetadata("binaryTreeWithInvalidInRoot")
public void testBinaryTreeWithInvalidInRoot() throws Exception {
runTest("idea/idea-frontend-fir/idea-fir-low-level-api/testdata/sessionInvalidation/binaryTreeWithInvalidInRoot/");
}
@TestMetadata("linear")
public void testLinear() throws Exception {
runTest("idea/idea-frontend-fir/idea-fir-low-level-api/testdata/sessionInvalidation/linear/");
}
@TestMetadata("rhombus")
public void testRhombus() throws Exception {
runTest("idea/idea-frontend-fir/idea-fir-low-level-api/testdata/sessionInvalidation/rhombus/");
}
@TestMetadata("rhombusWithTwoInvalid")
public void testRhombusWithTwoInvalid() throws Exception {
runTest("idea/idea-frontend-fir/idea-fir-low-level-api/testdata/sessionInvalidation/rhombusWithTwoInvalid/");
}
}<|fim▁end|> | |
<|file_name|>dfg-compare-final-object-to-final-object-or-other-when-proven-final-object.js<|end_file_name|><|fim▁begin|>description(
"Tests that the CompareEq optimization for the case where one side is predicted final object and the other side is predicted either final object or other (i.e. null or undefined) doesn't assert when the other side is also proven final object."
);
function foo(a, b) {
return [b.f, a == b];
}
for (var i = 0; i < 100; ++i) {
if (i%2) {
var o = {f:42};<|fim▁hole|> } else
shouldThrow("foo({f:42}, null)");
}<|fim▁end|> | shouldBe("foo(o, o)", "[42, true]"); |
<|file_name|>Student.java<|end_file_name|><|fim▁begin|>/**
* Created by txs on 2016/10/17.
*/
public class Student {
String name;
int grade;
@Override
public String toString() {
String temp = "";
temp += "name: " + name + "\n";
temp += "grade: " + grade + "\n";
return temp;
}
<|fim▁hole|> @Override
public boolean equals(Object obj) {
if(this==obj) return true;
boolean r = false;
if(obj instanceof Student){
Student temp = (Student)obj;
if(this.name.equals(temp.name)
&& this.grade == temp.grade)
r = true;
}
return r;
}
}<|fim▁end|> | |
<|file_name|>__manifest__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Tests flow of API keys',
'category': 'Tools',<|fim▁hole|><|fim▁end|> | 'depends': ['web_tour'],
'data': ['views/assets.xml'],
} |
<|file_name|>issue-16441.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.<|fim▁hole|>// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// pretty-expanded FIXME #23616
struct Empty;
// This used to cause an ICE
extern "C" fn ice(_a: Empty) {}
fn main() {
}<|fim▁end|> | //
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or |
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import
from django.conf.urls import include, url
from tastypie.api import Api
from locations.api import v1, v2
from locations.api.sword import views
v1_api = Api(api_name="v1")
v1_api.register(v1.SpaceResource())
v1_api.register(v1.LocationResource())
v1_api.register(v1.PackageResource())<|fim▁hole|>v2_api.register(v2.SpaceResource())
v2_api.register(v2.LocationResource())
v2_api.register(v2.PackageResource())
v2_api.register(v2.PipelineResource())
v2_api.register(v2.AsyncResource())
urlpatterns = [
url(r"", include(v1_api.urls)),
url(r"v1/sword/$", views.service_document, name="sword_service_document"),
url(r"", include(v2_api.urls)),
url(r"v2/sword/$", views.service_document, name="sword_service_document"),
]<|fim▁end|> | v1_api.register(v1.PipelineResource())
v1_api.register(v1.AsyncResource())
v2_api = Api(api_name="v2") |
<|file_name|>x86_64.js<|end_file_name|><|fim▁begin|><|fim▁hole|> "r15": "int64",
"r14": "int64",
"r13": "int64",
"r12": "int64",
"rbp": "int64",
"rbx": "int64",
"r11": "int64",
"r10": "int64",
"r9": "int64",
"r8": "int64",
"rax": "int64",
"rcx": "int64",
"rdx": "int64",
"rsi": "int64",
"rdi": "int64",
"orig_rax": "int64",
"rip": "int64",
"cs": "int64",
"eflags": "int64",
"rsp": "int64",
"ss": "int64",
"fs_base": "int64",
"gs_base": "int64",
"ds": "int64",
"es": "int64",
"fs": "int64",
"gs": "int64",
});
export default {
Registers
};<|fim▁end|> | import Struct from "ref-struct-napi";
const Registers = Struct({ |
<|file_name|>style_format.py<|end_file_name|><|fim▁begin|># Copyright 2019 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os.path
import subprocess
import sys
_enable_style_format = None
_clang_format_command_path = None
_gn_command_path = None
def init(root_src_dir, enable_style_format=True):
assert isinstance(root_src_dir, str)
assert isinstance(enable_style_format, bool)
global _enable_style_format
global _clang_format_command_path
global _gn_command_path
assert _enable_style_format is None
assert _clang_format_command_path is None
assert _gn_command_path is None
_enable_style_format = enable_style_format
root_src_dir = os.path.abspath(root_src_dir)
# Determine //buildtools/<platform>/ directory
if sys.platform.startswith("linux"):
platform = "linux64"
exe_suffix = ""
elif sys.platform.startswith("darwin"):
platform = "mac"
exe_suffix = ""
elif sys.platform.startswith(("cygwin", "win")):
platform = "win"
exe_suffix = ".exe"
else:
assert False, "Unknown platform: {}".format(sys.platform)
buildtools_platform_dir = os.path.join(root_src_dir, "buildtools",
platform)
# //buildtools/<platform>/clang-format
_clang_format_command_path = os.path.join(
buildtools_platform_dir, "clang-format{}".format(exe_suffix))
# //buildtools/<platform>/gn
_gn_command_path = os.path.join(buildtools_platform_dir,
"gn{}".format(exe_suffix))
def auto_format(contents, filename):
assert isinstance(filename, str)
_, ext = os.path.splitext(filename)
if ext in (".gn", ".gni"):
return gn_format(contents, filename)
return clang_format(contents, filename)<|fim▁hole|>
def clang_format(contents, filename=None):
command_line = [_clang_format_command_path]
if filename is not None:
command_line.append('-assume-filename={}'.format(filename))
return _invoke_format_command(command_line, filename, contents)
def gn_format(contents, filename=None):
command_line = [_gn_command_path, "format", "--stdin"]
if filename is not None:
command_line.append('-assume-filename={}'.format(filename))
return _invoke_format_command(command_line, filename, contents)
def _invoke_format_command(command_line, filename, contents):
if not _enable_style_format:
return StyleFormatResult(stdout_output=contents,
stderr_output="",
exit_code=0,
filename=filename)
kwargs = {}
if sys.version_info.major != 2:
kwargs['encoding'] = 'utf-8'
proc = subprocess.Popen(command_line,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
**kwargs)
stdout_output, stderr_output = proc.communicate(input=contents)
exit_code = proc.wait()
return StyleFormatResult(
stdout_output=stdout_output,
stderr_output=stderr_output,
exit_code=exit_code,
filename=filename)
class StyleFormatResult(object):
def __init__(self, stdout_output, stderr_output, exit_code, filename):
self._stdout_output = stdout_output
self._stderr_output = stderr_output
self._exit_code = exit_code
self._filename = filename
@property
def did_succeed(self):
return self._exit_code == 0
@property
def contents(self):
assert self.did_succeed
return self._stdout_output
@property
def error_message(self):
return self._stderr_output
@property
def filename(self):
return self._filename<|fim▁end|> | |
<|file_name|>MainTest.java<|end_file_name|><|fim▁begin|>/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.spring;
import java.util.List;
import org.apache.camel.CamelContext;
import org.apache.camel.Exchange;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.component.mock.MockEndpoint;
import org.apache.camel.spring.example.MyProcessor;
import org.junit.Assert;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class MainTest extends Assert {
private static final Logger LOG = LoggerFactory.getLogger(MainTest.class);
@Test
public void testMain() throws Exception {<|fim▁hole|> Main main = new Main();
main.addRouteBuilder(new RouteBuilder() {
@Override
public void configure() throws Exception {
from("file://src/test/data?initialDelay=0&delay=10&noop=true").process(new MyProcessor()).to("mock:results");
}
});
main.start();
CamelContext camelContext = main.getCamelContext();
MockEndpoint endpoint = camelContext.getEndpoint("mock:results", MockEndpoint.class);
// in case we add more files in src/test/data
endpoint.expectedMinimumMessageCount(2);
endpoint.assertIsSatisfied();
List<Exchange> list = endpoint.getReceivedExchanges();
LOG.debug("Received: " + list);
main.stop();
}
}<|fim▁end|> | // lets make a simple route |
<|file_name|>dht_bencode.py<|end_file_name|><|fim▁begin|># -*- coding:utf-8 -*-
def decode(data):
try:
value, idx = __decode(data, 0)
retval = (True, value)
except Exception as e:
retval = (False, e.message)
finally:
return retval
def encode(data):
try:
value = __encode(data)
retval = (True, value)
except Exception, e:
retval = (False, e.message)
finally:
return retval
# 内部函数
# 解析bencode数据
def __decode(data, start_idx):
if data[start_idx] == 'i':
value, start_idx = __decode_int(data, start_idx + 1)
elif data[start_idx].isdigit():
value, start_idx = __decode_str(data, start_idx)
elif data[start_idx] == 'l':
value, start_idx = __decode_list(data, start_idx + 1)
elif data[start_idx] == 'd':
value, start_idx = __decode_dict(data, start_idx + 1)
else:
raise ValueError('__decode: not in i, l, d')
return value, start_idx
# 解析整数
def __decode_int(data, start_idx):
end_idx = data.index('e', start_idx)
try:
value = int(data[start_idx: end_idx])
except Exception:
raise Exception('__decode_int: error')
return value, end_idx + 1
# 解析字符串
def __decode_str(data, start_idx):
try:
end_idx = data.index(':', start_idx)
str_len = int(data[start_idx: end_idx])
start_idx = end_idx + 1
end_idx = start_idx + str_len
value = data[start_idx: end_idx]
except Exception:
raise Exception('__decode_str: error')
return value, end_idx
# 解析列表
def __decode_list(data, start_idx):
values = []
while data[start_idx] != 'e':
value, start_idx = __decode(data, start_idx)
values.append(value)
return values, start_idx + 1
# 解析字典
def __decode_dict(data, start_idx):
dict_value = dict()
while data[start_idx] != 'e':
key, start_idx = __decode(data, start_idx)
value, start_idx = __decode(data, start_idx)
dict_value[key] = value
return dict_value, start_idx + 1
# 数据编码
def __encode(data):
if isinstance(data, int):
value = __encode_int(data)
elif isinstance(data, str):
value = __encode_str(data)
elif isinstance(data, dict):<|fim▁hole|> value = __encode_list(data)
else:
raise Exception('__encode: Error')
return value
# 数字编码
def __encode_int(data):
return 'i' + str(data) + 'e'
# 字符串编码
def __encode_str(data):
str_len = len(data)
return str(str_len) + ':' + data
# 列表编码
def __encode_list(data):
ret = 'l'
for datai in data:
ret += __encode(datai)
return ret + 'e'
# 字典编码
def __encode_dict(data):
ret = 'd'
for key, value in data.items():
ret += __encode(key)
ret += __encode(value)
return ret + 'e'<|fim▁end|> | value = __encode_dict(data)
elif isinstance(data, list): |
<|file_name|>gzip.hpp<|end_file_name|><|fim▁begin|>// (C) Copyright Jonathan Turkanis 2003.
// Distributed under the Boost Software License, Version 1.0. (See accompanying
// file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt.)
// See http://www.boost.org/libs/iostreams for documentation.
// Contains the definitions of the class templates gzip_compressor and
// gzip_decompressor for reading and writing files in the gzip file format
// (RFC 1952). Based in part on work of Jonathan de Halleux; see [...]
#ifndef BOOST_IOSTREAMS_GZIP_HPP_INCLUDED
#define BOOST_IOSTREAMS_GZIP_HPP_INCLUDED
#if defined(_MSC_VER) && (_MSC_VER >= 1020)
# pragma once
#endif
#include <boost/config.hpp> // STATIC_CONSTANT, STDC_NAMESPACE,
// DINKUMWARE_STDLIB, __STL_CONFIG_H.
#include <algorithm> // min.
#include <cstdio> // EOF.
#include <cstddef> // size_t.
#include <ctime> // std::time_t.
#include <memory> // allocator.
#include <boost/config.hpp> // Put size_t in std.
#include <boost/detail/workaround.hpp>
#include <boost/cstdint.hpp> // uint8_t, uint32_t.
#include <boost/iostreams/constants.hpp> // buffer size.
#include <boost/iostreams/detail/adapter/non_blocking_adapter.hpp>
#include <boost/iostreams/detail/adapter/range_adapter.hpp>
#include <boost/iostreams/detail/char_traits.hpp>
#include <boost/iostreams/detail/error.hpp>
#include <boost/iostreams/detail/ios.hpp> // failure.
#include <boost/iostreams/operations.hpp>
#include <boost/iostreams/device/back_inserter.hpp>
#include <boost/iostreams/filter/zlib.hpp>
#include <boost/iostreams/pipeline.hpp>
// Must come last.
#if defined(BOOST_MSVC)
# pragma warning(push)
# pragma warning(disable: 4309) // Truncation of constant value.
#endif
#ifdef BOOST_NO_STDC_NAMESPACE
namespace std { using ::time_t; }
#endif
namespace boost { namespace iostreams {
namespace gzip {
using namespace boost::iostreams::zlib;
// Error codes used by gzip_error.
const int zlib_error = 1;
const int bad_crc = 2; // Recorded crc doesn't match data.
const int bad_length = 3; // Recorded length doesn't match data.
const int bad_header = 4; // Malformed header.
const int bad_footer = 5; // Malformed footer.
namespace magic {
// Magic numbers used by gzip header.
const int id1 = 0x1f;
const int id2 = 0x8b;
} // End namespace magic.
namespace method {
// Codes used for the 'CM' byte of the gzip header.
const int deflate = 8;
} // End namespace method.
namespace flags {
// Codes used for the 'FLG' byte of the gzip header.
const int text = 1;
const int header_crc = 2;
const int extra = 4;
const int name = 8;
const int comment = 16;
} // End namespace flags.
namespace extra_flags {
// Codes used for the 'XFL' byte of the gzip header.
const int best_compression = 2;
const int best_speed = 4;
} // End namespace extra_flags.
// Codes used for the 'OS' byte of the gzip header.
const int os_fat = 0;
const int os_amiga = 1;
const int os_vms = 2;
const int os_unix = 3;
const int os_vm_cms = 4;
const int os_atari = 5;
const int os_hpfs = 6;
const int os_macintosh = 7;
const int os_z_system = 8;
const int os_cp_m = 9;
const int os_tops_20 = 10;
const int os_ntfs = 11;
const int os_qdos = 12;
const int os_acorn = 13;
const int os_unknown = 255;
} // End namespace gzip.
//
// Class name: gzip_params.
// Description: Subclass of zlib_params with an additional field
// representing a file name.
//
struct gzip_params : zlib_params {
// Non-explicit constructor.
gzip_params( int level = gzip::default_compression,
int method = gzip::deflated,
int window_bits = gzip::default_window_bits,
int mem_level = gzip::default_mem_level,
int strategy = gzip::default_strategy,
std::string file_name = "",
std::string comment = "",
std::time_t mtime = 0 )
: zlib_params(level, method, window_bits, mem_level, strategy),
file_name(file_name), mtime(mtime)
{ }
std::string file_name;
std::string comment;
std::time_t mtime;
};
//
// Class name: gzip_error.
// Description: Subclass of std::ios_base::failure thrown to indicate
// zlib errors other than out-of-memory conditions.
//
class gzip_error : public BOOST_IOSTREAMS_FAILURE {
public:
explicit gzip_error(int error)
: BOOST_IOSTREAMS_FAILURE("gzip error"),
error_(error), zlib_error_code_(zlib::okay) { }
explicit gzip_error(const zlib_error& e)
: BOOST_IOSTREAMS_FAILURE("gzip error"),
error_(gzip::zlib_error), zlib_error_code_(e.error())
{ }
int error() const { return error_; }
int zlib_error_code() const { return zlib_error_code_; }
private:
int error_;
int zlib_error_code_;
};
//
// Template name: gzip_compressor
// Description: Model of OutputFilter implementing compression in the
// gzip format.
//
template<typename Alloc = std::allocator<char> >
class basic_gzip_compressor : basic_zlib_compressor<Alloc> {
private:
typedef basic_zlib_compressor<Alloc> base_type;
public:
typedef char char_type;
struct category
: dual_use,
filter_tag,
multichar_tag,
closable_tag
{ };
basic_gzip_compressor( const gzip_params& = gzip::default_compression,
int buffer_size = default_device_buffer_size );
template<typename Source>
std::streamsize read(Source& src, char_type* s, std::streamsize n)
{
<|fim▁hole|> streamsize result = 0;
// Read header.
if (!(flags_ & f_header_done))
result += read_string(s, n, header_);
// Read body.
if (!(flags_ & f_body_done)) {
// Read from basic_zlib_filter.
streamsize amt = base_type::read(src, s + result, n - result);
if (amt != -1) {
result += amt;
if (amt < n - result) { // Double-check for EOF.
amt = base_type::read(src, s + result, n - result);
if (amt != -1)
result += amt;
}
}
if (amt == -1)
prepare_footer();
}
// Read footer.
if ((flags_ & f_body_done) != 0 && result < n)
result += read_string(s + result, n - result, footer_);
return result != 0 ? result : -1;
}
template<typename Sink>
std::streamsize write(Sink& snk, const char_type* s, std::streamsize n)
{
if (!(flags_ & f_header_done)) {
std::streamsize amt =
static_cast<std::streamsize>(header_.size() - offset_);
offset_ += boost::iostreams::write(snk, header_.data() + offset_, amt);
if (offset_ == header_.size())
flags_ |= f_header_done;
else
return 0;
}
return base_type::write(snk, s, n);
}
template<typename Sink>
void close(Sink& snk, BOOST_IOS::openmode m)
{
namespace io = boost::iostreams;
if (m & BOOST_IOS::out) {
// Close zlib compressor.
base_type::close(snk, BOOST_IOS::out);
if (flags_ & f_header_done) {
// Write final fields of gzip file format.
write_long(this->crc(), snk);
write_long(this->total_in(), snk);
}
}
#if BOOST_WORKAROUND(__GNUC__, == 2) && defined(__STL_CONFIG_H) || \
BOOST_WORKAROUND(BOOST_DINKUMWARE_STDLIB, == 1) \
/**/
footer_.erase(0, std::string::npos);
#else
footer_.clear();
#endif
offset_ = 0;
flags_ = 0;
}
private:
static gzip_params normalize_params(gzip_params p);
void prepare_footer();
std::streamsize read_string(char* s, std::streamsize n, std::string& str);
template<typename Sink>
static void write_long(long n, Sink& next)
{
boost::iostreams::put(next, static_cast<char>(0xFF & n));
boost::iostreams::put(next, static_cast<char>(0xFF & (n >> 8)));
boost::iostreams::put(next, static_cast<char>(0xFF & (n >> 16)));
boost::iostreams::put(next, static_cast<char>(0xFF & (n >> 24)));
}
enum flag_type {
f_header_done = 1,
f_body_done = f_header_done << 1,
f_footer_done = f_body_done << 1
};
std::string header_;
std::string footer_;
std::size_t offset_;
int flags_;
};
BOOST_IOSTREAMS_PIPABLE(basic_gzip_compressor, 1)
typedef basic_gzip_compressor<> gzip_compressor;
//
// Template name: basic_gzip_decompressor
// Description: Model of InputFilter implementing compression in the
// gzip format.
//
template<typename Alloc = std::allocator<char> >
class basic_gzip_decompressor : basic_zlib_decompressor<Alloc> {
public:
typedef char char_type;
struct category
: //multichar_input_filter_tag ,
multichar_tag,
filter_tag,
input_seekable,
closable_tag
//seekable_filter_tag
{ };
basic_gzip_decompressor( int window_bits = gzip::default_window_bits,
int buffer_size = default_device_buffer_size );
template <typename Source>
std::streampos seek(Source &src, stream_offset off,
BOOST_IOS::seekdir way)
{
if (way != BOOST_IOS::beg)
{
throw detail::cant_seek();
}
non_blocking_adapter<Source> nb(src);
std::streampos rval;
boost::iostreams::seek(nb, 0, std::ios_base::beg);
// reset the decoder
//impl_type::reset(false, true);
base_type::close(src, BOOST_IOS::in);
flags_ = 0;
// now seek
std::streamsize nuint32s = off/sizeof(uint32_t);
std::streamsize nuint8s = off%sizeof(uint32_t);
uint32_t four_bytes;
uint8_t one_byte;
while (nuint32s > 0)
{
read(src, (char_type*)(&four_bytes), sizeof(uint32_t));
--nuint32s;
rval += sizeof(uint32_t);
}
while (nuint8s > 0)
{
read(src, (char_type*)(&one_byte), sizeof(uint8_t));
--nuint8s;
rval += sizeof(uint8_t);
}
return (rval);
}
template<typename Source>
std::streamsize read(Source& src, char_type* s, std::streamsize n)
{
if ((flags_ & f_header_read) == 0) {
non_blocking_adapter<Source> nb(src);
read_header(nb);
flags_ |= f_header_read;
}
if ((flags_ & f_footer_read) != 0)
return -1;
try {
std::streamsize result = 0;
std::streamsize amt;
if ((amt = base_type::read(src, s, n)) != -1) {
result += amt;
if (amt < n) { // Double check for EOF.
amt = base_type::read(src, s + result, n - result);
if (amt != -1)
result += amt;
}
}
if (amt == -1) {
non_blocking_adapter<Source> nb(src);
read_footer(nb);
flags_ |= f_footer_read;
}
return result;
} catch (const zlib_error& e) {
throw gzip_error(e);
}
}
template<typename Source>
void close(Source& src)
{
try {
base_type::close(src, BOOST_IOS::in);
flags_ = 0;
} catch (const zlib_error& e) {
throw gzip_error(e);
}
}
std::string file_name() const { return file_name_; }
std::string comment() const { return comment_; }
bool text() const { return (flags_ & gzip::flags::text) != 0; }
int os() const { return os_; }
std::time_t mtime() const { return mtime_; }
private:
typedef basic_zlib_decompressor<Alloc> base_type;
typedef BOOST_IOSTREAMS_CHAR_TRAITS(char) traits_type;
static bool is_eof(int c) { return traits_type::eq_int_type(c, EOF); }
static gzip_params make_params(int window_bits);
template<typename Source>
static uint8_t read_uint8(Source& src, int error)
{
int c;
if ((c = boost::iostreams::get(src)) == EOF || c == WOULD_BLOCK)
throw gzip_error(error);
return static_cast<uint8_t>(traits_type::to_char_type(c));
}
template<typename Source>
static uint32_t read_uint32(Source& src, int error)
{
uint8_t b1 = read_uint8(src, error);
uint8_t b2 = read_uint8(src, error);
uint8_t b3 = read_uint8(src, error);
uint8_t b4 = read_uint8(src, error);
return b1 + (b2 << 8) + (b3 << 16) + (b4 << 24);
}
template<typename Source>
std::string read_string(Source& src)
{
std::string result;
while (true) {
int c;
if (is_eof(c = boost::iostreams::get(src)))
throw gzip_error(gzip::bad_header);
else if (c == 0)
return result;
else
result += static_cast<char>(c);
}
}
template<typename Source>
void read_header(Source& src) // Source is non-blocking.
{
// Reset saved values.
#if BOOST_WORKAROUND(__GNUC__, == 2) && defined(__STL_CONFIG_H) || \
BOOST_WORKAROUND(BOOST_DINKUMWARE_STDLIB, == 1) \
/**/
file_name_.erase(0, std::string::npos);
comment_.erase(0, std::string::npos);
#else
file_name_.clear();
comment_.clear();
#endif
os_ = gzip::os_unknown;
mtime_ = 0;
int flags;
// Read header, without checking header crc.
if ( boost::iostreams::get(src) != gzip::magic::id1 || // ID1.
boost::iostreams::get(src) != gzip::magic::id2 || // ID2.
is_eof(boost::iostreams::get(src)) || // CM.
is_eof(flags = boost::iostreams::get(src)) ) // FLG.
{
throw gzip_error(gzip::bad_header);
}
mtime_ = read_uint32(src, gzip::bad_header); // MTIME.
read_uint8(src, gzip::bad_header); // XFL.
os_ = read_uint8(src, gzip::bad_header); // OS.
if (flags & boost::iostreams::gzip::flags::text)
flags_ |= f_text;
// Skip extra field. (From J. Halleaux; see note at top.)
if (flags & gzip::flags::extra) {
int length =
static_cast<int>(
read_uint8(src, gzip::bad_header) +
(read_uint8(src, gzip::bad_header) << 8)
);
// length is garbage if EOF but the loop below will quit anyway.
do { }
while (length-- != 0 && !is_eof(boost::iostreams::get(src)));
}
if (flags & gzip::flags::name) // Read file name.
file_name_ = read_string(src);
if (flags & gzip::flags::comment) // Read comment.
comment_ = read_string(src);
if (flags & gzip::flags::header_crc) { // Skip header crc.
read_uint8(src, gzip::bad_header);
read_uint8(src, gzip::bad_header);
}
}
template<typename Source>
void read_footer(Source& src)
{
typename base_type::string_type footer =
this->unconsumed_input();
int c;
while (!is_eof(c = boost::iostreams::get(src)))
footer += c;
detail::range_adapter<input, std::string>
rng(footer.begin(), footer.end());
if (read_uint32(rng, gzip::bad_footer) != this->crc())
throw gzip_error(gzip::bad_crc);
if (static_cast<int>(read_uint32(rng, gzip::bad_footer)) != this->total_out())
throw gzip_error(gzip::bad_length);
}
enum flag_type {
f_header_read = 1,
f_footer_read = f_header_read << 1,
f_text = f_footer_read << 1
};
std::string file_name_;
std::string comment_;
int os_;
std::time_t mtime_;
int flags_;
};
BOOST_IOSTREAMS_PIPABLE(basic_gzip_decompressor, 1)
typedef basic_gzip_decompressor<> gzip_decompressor;
//------------------Implementation of gzip_compressor-------------------------//
template<typename Alloc>
basic_gzip_compressor<Alloc>::basic_gzip_compressor
(const gzip_params& p, int buffer_size)
: base_type(normalize_params(p), buffer_size),
offset_(0), flags_(0)
{
// Calculate gzip header.
bool has_name = !p.file_name.empty();
bool has_comment = !p.comment.empty();
std::string::size_type length =
10 +
(has_name ? p.file_name.size() + 1 : 0) +
(has_comment ? p.comment.size() + 1 : 0);
// + 2; // Header crc confuses gunzip.
int flags =
//gzip::flags::header_crc +
(has_name ? gzip::flags::name : 0) +
(has_comment ? gzip::flags::comment : 0);
int extra_flags =
( p.level == zlib::best_compression ?
gzip::extra_flags::best_compression :
0 ) +
( p.level == zlib::best_speed ?
gzip::extra_flags::best_speed :
0 );
header_.reserve(length);
header_ += gzip::magic::id1; // ID1.
header_ += gzip::magic::id2; // ID2.
header_ += gzip::method::deflate; // CM.
header_ += static_cast<char>(flags); // FLG.
header_ += static_cast<char>(0xFF & p.mtime); // MTIME.
header_ += static_cast<char>(0xFF & (p.mtime >> 8));
header_ += static_cast<char>(0xFF & (p.mtime >> 16));
header_ += static_cast<char>(0xFF & (p.mtime >> 24));
header_ += static_cast<char>(extra_flags); // XFL.
header_ += static_cast<char>(gzip::os_unknown); // OS.
if (has_name) {
header_ += p.file_name;
header_ += '\0';
}
if (has_comment) {
header_ += p.comment;
header_ += '\0';
}
}
template<typename Alloc>
gzip_params basic_gzip_compressor<Alloc>::normalize_params(gzip_params p)
{
p.noheader = true;
p.calculate_crc = true;
return p;
}
template<typename Alloc>
void basic_gzip_compressor<Alloc>::prepare_footer()
{
boost::iostreams::back_insert_device<std::string> out(footer_);
write_long(this->crc(), out);
write_long(this->total_in(), out);
flags_ |= f_body_done;
offset_ = 0;
}
template<typename Alloc>
std::streamsize basic_gzip_compressor<Alloc>::read_string
(char* s, std::streamsize n, std::string& str)
{
using namespace std;
streamsize avail =
static_cast<streamsize>(str.size() - offset_);
streamsize amt = (std::min)(avail, n);
std::copy( str.data() + offset_,
str.data() + offset_ + amt,
s );
offset_ += amt;
if ( !(flags_ & f_header_done) &&
offset_ == static_cast<std::size_t>(str.size()) )
{
flags_ |= f_header_done;
}
return amt;
}
//------------------Implementation of gzip_decompressor-----------------------//
template<typename Alloc>
basic_gzip_decompressor<Alloc>::basic_gzip_decompressor
(int window_bits, int buffer_size)
: base_type(make_params(window_bits), buffer_size),
os_(gzip::os_unknown), mtime_(0), flags_(0)
{ }
template<typename Alloc>
gzip_params basic_gzip_decompressor<Alloc>::make_params(int window_bits)
{
gzip_params p;
p.window_bits = window_bits;
p.noheader = true;
p.calculate_crc = true;
return p;
}
//----------------------------------------------------------------------------//
} } // End namespaces iostreams, boost.
#if defined(BOOST_MSVC)
# pragma warning(pop)
#endif
#endif // #ifndef BOOST_IOSTREAMS_GZIP_HPP_INCLUDED<|fim▁end|> | using namespace std;
|
<|file_name|>multiple_files.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
extern crate rand;
use rand::{task_rng, Rng};
use std::{char, os, str};
use std::io::{File, Process};
// creates unicode_input_multiple_files_{main,chars}.rs, where the
// former imports the latter. `_chars` just contains an indentifier
// made up of random characters, because will emit an error message
// about the ident being in the wrong place, with a span (and creating
// this span used to upset the compiler).
fn random_char() -> char {
let mut rng = task_rng();
// a subset of the XID_start unicode table (ensuring that the
// compiler doesn't fail with an "unrecognised token" error)
let (lo, hi): (u32, u32) = match rng.gen_range(1, 4 + 1) {
1 => (0x41, 0x5a),
2 => (0xf8, 0x1ba),
3 => (0x1401, 0x166c),
_ => (0x10400, 0x1044f)
};
char::from_u32(rng.gen_range(lo, hi + 1)).unwrap()
}
fn main() {
let args = os::args();
let rustc = args[1].as_slice();
let tmpdir = Path::new(args[2].as_slice());
let main_file = tmpdir.join("unicode_input_multiple_files_main.rs");
let main_file_str = main_file.as_str().unwrap();
{
let _ = File::create(&main_file).unwrap()
.write_str("mod unicode_input_multiple_files_chars;");
}<|fim▁hole|> let randoms = tmpdir.join("unicode_input_multiple_files_chars.rs");
let mut w = File::create(&randoms).unwrap();
for _ in range(0, 30) {
let _ = w.write_char(random_char());
}
}
// rustc is passed to us with --out-dir and -L etc., so we
// can't exec it directly
let result = Process::output("sh", [~"-c", rustc + " " + main_file_str]).unwrap();
let err = str::from_utf8_lossy(result.error.as_slice());
// positive test so that this test will be updated when the
// compiler changes.
assert!(err.as_slice().contains("expected item but found"))
}
}<|fim▁end|> |
for _ in range(0, 100) {
{ |
<|file_name|>test_corpora_dictionary.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html
"""
Unit tests for the `corpora.Dictionary` class.
"""
from collections import Mapping
import logging
import unittest
import codecs
import os
import os.path
import scipy
import gensim
from gensim.corpora import Dictionary
from gensim.utils import to_utf8
from gensim.test.utils import get_tmpfile, common_texts
from six import PY3
from six.moves import zip
class TestDictionary(unittest.TestCase):
def setUp(self):
self.texts = common_texts
def testDocFreqOneDoc(self):
texts = [['human', 'interface', 'computer']]
d = Dictionary(texts)
expected = {0: 1, 1: 1, 2: 1}
self.assertEqual(d.dfs, expected)
def testDocFreqAndToken2IdForSeveralDocsWithOneWord(self):
# two docs
texts = [['human'], ['human']]
d = Dictionary(texts)
expected = {0: 2}
self.assertEqual(d.dfs, expected)
# only one token (human) should exist
expected = {'human': 0}
self.assertEqual(d.token2id, expected)
# three docs
texts = [['human'], ['human'], ['human']]
d = Dictionary(texts)
expected = {0: 3}
self.assertEqual(d.dfs, expected)
# only one token (human) should exist
expected = {'human': 0}
self.assertEqual(d.token2id, expected)
# four docs
texts = [['human'], ['human'], ['human'], ['human']]
d = Dictionary(texts)
expected = {0: 4}
self.assertEqual(d.dfs, expected)
# only one token (human) should exist
expected = {'human': 0}
self.assertEqual(d.token2id, expected)
def testDocFreqForOneDocWithSeveralWord(self):
# two words
texts = [['human', 'cat']]
d = Dictionary(texts)
expected = {0: 1, 1: 1}
self.assertEqual(d.dfs, expected)
# three words
texts = [['human', 'cat', 'minors']]
d = Dictionary(texts)
expected = {0: 1, 1: 1, 2: 1}
self.assertEqual(d.dfs, expected)
def testBuild(self):
d = Dictionary(self.texts)
# Since we don't specify the order in which dictionaries are built,
# we cannot reliably test for the mapping; only the keys and values.
expected_keys = list(range(12))
expected_values = [2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 3]
self.assertEqual(sorted(d.dfs.keys()), expected_keys)
self.assertEqual(sorted(d.dfs.values()), expected_values)
expected_keys = sorted([
'computer', 'eps', 'graph', 'human', 'interface',
'minors', 'response', 'survey', 'system', 'time', 'trees', 'user'
])
expected_values = list(range(12))
self.assertEqual(sorted(d.token2id.keys()), expected_keys)
self.assertEqual(sorted(d.token2id.values()), expected_values)
def testMerge(self):
d = Dictionary(self.texts)
f = Dictionary(self.texts[:3])
g = Dictionary(self.texts[3:])
f.merge_with(g)
self.assertEqual(sorted(d.token2id.keys()), sorted(f.token2id.keys()))
def testFilter(self):
d = Dictionary(self.texts)
d.filter_extremes(no_below=2, no_above=1.0, keep_n=4)
expected = {0: 3, 1: 3, 2: 3, 3: 3}
self.assertEqual(d.dfs, expected)
def testFilterKeepTokens_keepTokens(self):
# provide keep_tokens argument, keep the tokens given
d = Dictionary(self.texts)
d.filter_extremes(no_below=3, no_above=1.0, keep_tokens=['human', 'survey'])
expected = {'graph', 'trees', 'human', 'system', 'user', 'survey'}
self.assertEqual(set(d.token2id.keys()), expected)
def testFilterKeepTokens_unchangedFunctionality(self):
# do not provide keep_tokens argument, filter_extremes functionality is unchanged
d = Dictionary(self.texts)
d.filter_extremes(no_below=3, no_above=1.0)
expected = {'graph', 'trees', 'system', 'user'}
self.assertEqual(set(d.token2id.keys()), expected)
def testFilterKeepTokens_unseenToken(self):
# do provide keep_tokens argument with unseen tokens, filter_extremes functionality is unchanged
d = Dictionary(self.texts)
d.filter_extremes(no_below=3, no_above=1.0, keep_tokens=['unknown_token'])
expected = {'graph', 'trees', 'system', 'user'}
self.assertEqual(set(d.token2id.keys()), expected)
def testFilterMostFrequent(self):
d = Dictionary(self.texts)
d.filter_n_most_frequent(4)
expected = {0: 2, 1: 2, 2: 2, 3: 2, 4: 2, 5: 2, 6: 2, 7: 2}
self.assertEqual(d.dfs, expected)
def testFilterTokens(self):
self.maxDiff = 10000
d = Dictionary(self.texts)
removed_word = d[0]
d.filter_tokens([0])
expected = {
'computer': 0, 'eps': 8, 'graph': 10, 'human': 1,
'interface': 2, 'minors': 11, 'response': 3, 'survey': 4,
'system': 5, 'time': 6, 'trees': 9, 'user': 7
}
del expected[removed_word]
self.assertEqual(sorted(d.token2id.keys()), sorted(expected.keys()))
expected[removed_word] = len(expected)
d.add_documents([[removed_word]])
self.assertEqual(sorted(d.token2id.keys()), sorted(expected.keys()))
def test_doc2bow(self):
d = Dictionary([["žluťoučký"], ["žluťoučký"]])
# pass a utf8 string
self.assertEqual(d.doc2bow(["žluťoučký"]), [(0, 1)])
# doc2bow must raise a TypeError if passed a string instead of array of strings by accident
self.assertRaises(TypeError, d.doc2bow, "žluťoučký")
# unicode must be converted to utf8<|fim▁hole|> tmpf = get_tmpfile('save_dict_test.txt')
small_text = [
["prvé", "slovo"],
["slovo", "druhé"],
["druhé", "slovo"]
]
d = Dictionary(small_text)
d.save_as_text(tmpf)
with codecs.open(tmpf, 'r', encoding='utf-8') as file:
serialized_lines = file.readlines()
self.assertEqual(serialized_lines[0], u"3\n")
self.assertEqual(len(serialized_lines), 4)
# We do not know, which word will have which index
self.assertEqual(serialized_lines[1][1:], u"\tdruhé\t2\n")
self.assertEqual(serialized_lines[2][1:], u"\tprvé\t1\n")
self.assertEqual(serialized_lines[3][1:], u"\tslovo\t3\n")
d.save_as_text(tmpf, sort_by_word=False)
with codecs.open(tmpf, 'r', encoding='utf-8') as file:
serialized_lines = file.readlines()
self.assertEqual(serialized_lines[0], u"3\n")
self.assertEqual(len(serialized_lines), 4)
self.assertEqual(serialized_lines[1][1:], u"\tslovo\t3\n")
self.assertEqual(serialized_lines[2][1:], u"\tdruhé\t2\n")
self.assertEqual(serialized_lines[3][1:], u"\tprvé\t1\n")
def test_loadFromText_legacy(self):
"""
`Dictionary` can be loaded from textfile in legacy format.
Legacy format does not have num_docs on the first line.
"""
tmpf = get_tmpfile('load_dict_test_legacy.txt')
no_num_docs_serialization = to_utf8("1\tprvé\t1\n2\tslovo\t2\n")
with open(tmpf, "wb") as file:
file.write(no_num_docs_serialization)
d = Dictionary.load_from_text(tmpf)
self.assertEqual(d.token2id[u"prvé"], 1)
self.assertEqual(d.token2id[u"slovo"], 2)
self.assertEqual(d.dfs[1], 1)
self.assertEqual(d.dfs[2], 2)
self.assertEqual(d.num_docs, 0)
def test_loadFromText(self):
"""`Dictionary` can be loaded from textfile."""
tmpf = get_tmpfile('load_dict_test.txt')
no_num_docs_serialization = to_utf8("2\n1\tprvé\t1\n2\tslovo\t2\n")
with open(tmpf, "wb") as file:
file.write(no_num_docs_serialization)
d = Dictionary.load_from_text(tmpf)
self.assertEqual(d.token2id[u"prvé"], 1)
self.assertEqual(d.token2id[u"slovo"], 2)
self.assertEqual(d.dfs[1], 1)
self.assertEqual(d.dfs[2], 2)
self.assertEqual(d.num_docs, 2)
def test_saveAsText_and_loadFromText(self):
"""`Dictionary` can be saved as textfile and loaded again from textfile. """
tmpf = get_tmpfile('dict_test.txt')
for sort_by_word in [True, False]:
d = Dictionary(self.texts)
d.save_as_text(tmpf, sort_by_word=sort_by_word)
self.assertTrue(os.path.exists(tmpf))
d_loaded = Dictionary.load_from_text(tmpf)
self.assertNotEqual(d_loaded, None)
self.assertEqual(d_loaded.token2id, d.token2id)
def test_from_corpus(self):
"""build `Dictionary` from an existing corpus"""
documents = [
"Human machine interface for lab abc computer applications",
"A survey of user opinion of computer system response time",
"The EPS user interface management system",
"System and human system engineering testing of EPS",
"Relation of user perceived response time to error measurement",
"The generation of random binary unordered trees",
"The intersection graph of paths in trees",
"Graph minors IV Widths of trees and well quasi ordering",
"Graph minors A survey"
]
stoplist = set('for a of the and to in'.split())
texts = [
[word for word in document.lower().split() if word not in stoplist]
for document in documents]
# remove words that appear only once
all_tokens = sum(texts, [])
tokens_once = set(word for word in set(all_tokens) if all_tokens.count(word) == 1)
texts = [[word for word in text if word not in tokens_once] for text in texts]
dictionary = Dictionary(texts)
corpus = [dictionary.doc2bow(text) for text in texts]
# Create dictionary from corpus without a token map
dictionary_from_corpus = Dictionary.from_corpus(corpus)
dict_token2id_vals = sorted(dictionary.token2id.values())
dict_from_corpus_vals = sorted(dictionary_from_corpus.token2id.values())
self.assertEqual(dict_token2id_vals, dict_from_corpus_vals)
self.assertEqual(dictionary.dfs, dictionary_from_corpus.dfs)
self.assertEqual(dictionary.num_docs, dictionary_from_corpus.num_docs)
self.assertEqual(dictionary.num_pos, dictionary_from_corpus.num_pos)
self.assertEqual(dictionary.num_nnz, dictionary_from_corpus.num_nnz)
# Create dictionary from corpus with an id=>token map
dictionary_from_corpus_2 = Dictionary.from_corpus(corpus, id2word=dictionary)
self.assertEqual(dictionary.token2id, dictionary_from_corpus_2.token2id)
self.assertEqual(dictionary.dfs, dictionary_from_corpus_2.dfs)
self.assertEqual(dictionary.num_docs, dictionary_from_corpus_2.num_docs)
self.assertEqual(dictionary.num_pos, dictionary_from_corpus_2.num_pos)
self.assertEqual(dictionary.num_nnz, dictionary_from_corpus_2.num_nnz)
# Ensure Sparse2Corpus is compatible with from_corpus
bow = gensim.matutils.Sparse2Corpus(scipy.sparse.rand(10, 100))
dictionary = Dictionary.from_corpus(bow)
self.assertEqual(dictionary.num_docs, 100)
def test_dict_interface(self):
"""Test Python 2 dict-like interface in both Python 2 and 3."""
d = Dictionary(self.texts)
self.assertTrue(isinstance(d, Mapping))
self.assertEqual(list(zip(d.keys(), d.values())), list(d.items()))
# Even in Py3, we want the iter* members.
self.assertEqual(list(d.items()), list(d.iteritems()))
self.assertEqual(list(d.keys()), list(d.iterkeys()))
self.assertEqual(list(d.values()), list(d.itervalues()))
# XXX Do we want list results from the dict members in Py3 too?
if not PY3:
self.assertTrue(isinstance(d.items(), list))
self.assertTrue(isinstance(d.keys(), list))
self.assertTrue(isinstance(d.values(), list))
# endclass TestDictionary
if __name__ == '__main__':
logging.basicConfig(level=logging.WARNING)
unittest.main()<|fim▁end|> | self.assertEqual(d.doc2bow([u'\u017elu\u0165ou\u010dk\xfd']), [(0, 1)])
def test_saveAsText(self):
"""`Dictionary` can be saved as textfile. """ |
<|file_name|>__main__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""anagram_solver.__main__: executed when directory is called as script."""
from .anagram_solver import main
<|fim▁hole|><|fim▁end|> | main() |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>"""
Setup Module
This module is used to make a distribution of
the game using distutils.
"""
from distutils.core import setup
setup(
name = 'Breakout',
version = '1.0',
description = 'A remake of the classic video game',
author = 'Derek Morey',
author_email = '[email protected]',
license = 'GPL',
url = 'https://github.com/Oisota/Breakout',
download_url = 'https://github.com/Oisota/Breakout/archive/master.zip',
keywords = ['breakout', 'arcade', 'game', 'pygame', 'python',],
platforms = ['linux', 'windows'],
scripts = ['breakout.py','breakout-editor.py'],
packages = ['breakout','breakout.game','breakout.utils','breakout.editor'],
package_data = {'breakout':['assets/images/*.gif',
'assets/images/*.png',
'assets/sounds/*.wav',
'assets/levels/*.json']},
requires = ['sys', 'os', 'random', 'tkinter', 'pygame', 'json'],
classifiers = ['Programming Language :: Python',
'Programming Language :: Python :: 3',
'Development Status :: 4 - Beta',
'Environment :: Other Environment',
'Framework :: Pygame',
'Intended Audience :: End Users/Desktop',
'Intended Audience :: Developers',<|fim▁hole|> 'Natural Language :: English',
'Operating System :: OS Independent',
'Topic :: Games/Entertainment',
'Topic :: Games/Entertainment :: Arcade'],
long_description =
"""
Breakout
--------
This is a remake of the classic game Breakout. I made this game for the sole
purpose of educating myself about python, pygame, and game development in general.
Feel free to use or modify my code in any way.
"""
)<|fim▁end|> | 'Intended Audience :: Education',
'License :: OSI Approved :: GNU General Public License (GPL)', |
<|file_name|>test_page.py<|end_file_name|><|fim▁begin|>import fbchat
from fbchat import PageData
<|fim▁hole|> "id": "123456",
"name": "Some school",
"profile_picture": {"uri": "https://scontent-arn2-1.xx.fbcdn.net/v/..."},
"url": "https://www.facebook.com/some-school/",
"category_type": "SCHOOL",
"city": None,
}
assert PageData(
session=session,
id="123456",
photo=fbchat.Image(url="https://scontent-arn2-1.xx.fbcdn.net/v/..."),
name="Some school",
url="https://www.facebook.com/some-school/",
city=None,
category="SCHOOL",
) == PageData._from_graphql(session, data)<|fim▁end|> | def test_page_from_graphql(session):
data = { |
<|file_name|>constants.js<|end_file_name|><|fim▁begin|>/*
*
* HomeItemPage constants<|fim▁hole|>export const DEFAULT_ACTION = 'app/HomeItemPage/DEFAULT_ACTION';<|fim▁end|> | *
*/
|
<|file_name|>index.tsx<|end_file_name|><|fim▁begin|>import { Zerotorescue } from 'CONTRIBUTORS';
import NewsRegularArticle from 'interface/NewsRegularArticle';
import React from 'react';
export const title = 'A note about unlisted logs';
export default (
<NewsRegularArticle title={title} publishedAt="2017-01-31" publishedBy={Zerotorescue}>
Because Warcraft Logs offers no way to access private logs through the API, your logs must<|fim▁hole|> change the existing logs to the <i>unlisted</i> privacy option instead.
<br />
<br />
Do note that due to a restrictive API request limit we have to aggressively cache all API
requests we send to Warcraft Logs. This means that once you run a log through the analyzer, the
(secret) link for that log will continue to be accessible even if you change the original log
(back) to the private privacy option on Warcraft Logs. Only the fights that you accessed will
remain cached indefinitely.
<br />
<br />
We will never share links to unlisted or private (analyzed) logs, nor include them recognizably
in any public lists.
</NewsRegularArticle>
);<|fim▁end|> | either be unlisted or public if you want to analyze them. If your guild has private logs you
will have to <a href="https://www.warcraftlogs.com/help/start/">upload your own logs</a> or |
<|file_name|>makerTemplateViewBuilder.py<|end_file_name|><|fim▁begin|>from makerUtilities import writeFile
from makerUtilities import readFile
import os
def scaffold(systemDir, defaultTheme):
return (
"""<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8" />
<script src='file://"""<|fim▁hole|> + os.path.join(systemDir, "jquery.min.js")
+ """'></script>
<style type="text/css">
html {
background: -webkit-gradient(linear, left top, left bottom, from(#000), to(rgb(93,94,120)));
background-attachment:fixed;
}
body {
font-family: "Helvetica Neue";
font-size: 14px;
width:auto;
/* max-width:694px; */
color:#fff;
padding:20px 20px;
-webkit-transform: perspective( 600px );
}
a {
color: #ddd;
}
.thumbnail a {
text-decoration:none;
color:#000;
cursor:default;
}
p {
font-weight:lighter;
color:#fff;
letter-spacing:0.09em;
float:left;
font-size:0.9em;
line-height:1.45em;
text-align:left;
margin:-6px 0px 24px 10px;
}
h5 {
font-weight:lighter;
letter-spacing:0.050em;
margin:-28px 0px 0px 8px;
line-height:3em;
font-size:22px;
cursor:default;
}
img {
border:1px solid #333;
width:100%;
height:100%;
-webkit-box-reflect: below 0px -webkit-gradient(linear, left top, left bottom, from(transparent), color-stop(50%, transparent), to(rgba(0,0,0,0.2)));
-webkit-transform: perspective( 600px ) rotateY( 0deg);
margin-bottom:40px;
}
.row {
width:100%;
margin:0px 0px 40px 10px;
float:left;
clear:both;
}
.thumbnail {
width:17%;
padding:20px 20px 10px 20px;
margin:0px 20px 0px 0px;
float:left;
clear:right;
background:none;
}
.thumbnail img {
height:100px;
}
.thumbnail p {
text-align:center;
margin:-24px 0px 0px 0px;
width:100%;
font-size:14px;
cursor:default;
}
.thumbnail.selected {
border:1px solid #777;
padding:20px 20px 10px 20px;
-webkit-border-radius:10px;
background: -webkit-gradient(linear, left top, left bottom, from(rgba(140,140,140,0.1)), to(rgba(170,170,170,0.2)));
}
.info {
width:92%;
float:left;
clear:both;
display:none;
margin:40px 10px 0px 10px;
}
.info p {
float:left;
clear:right;
cursor:default;
}
.info img {
width:280px;
height:auto;
float:left;
clear:right;
margin:0px 48px 0px 8px;
-webkit-transform: perspective( 600px ) rotateY( 10deg );
/*
-webkit-transition: width, 0.5s;
*/
}
/*
.info img:hover {
width:320px;
-webkit-transform: perspective( 600px ) rotateY( 0deg );
}
*/
.info h5 {
margin-top:0px;
}
.info h5, p {
width:380px;
float:left;
}
a.button {
cursor:default;
color:#000;
}
a.button:active {
color:#000;
background: -webkit-gradient(linear, left top, left bottom, from(#eee), to(#bbb));
}
</style>
<script type="text/javascript">
$(document).ready(function(){
$('#"""
+ defaultTheme
+ """').addClass('selected');
$('#info-"""
+ defaultTheme
+ """').show();
$('.thumbnail').click(function(){
$('.info').hide();
$('.thumbnail').removeClass('selected')
$(this).addClass('selected');
$($(this).data('info')).show();
});
});
</script>
</head>
<body>
"""
+ createThumbnails(systemDir)
+ createInfo(systemDir)
+ """
</body>
</html>
"""
)
def buildView(systemDir, viewPath):
writeFile(
os.path.join(viewPath, "yourTemplates.html"),
scaffold(systemDir, defaultTemplate()),
)
return os.path.join(viewPath, "yourTemplates.html")
def defaultTemplate():
# ===========================================================================
# This is used to set the default template for the application
# ===========================================================================
return "Simple-Markdown"
def createThumbnails(systemDir):
thumbnails = "<div class='row'>\n"
for template in os.listdir(os.path.join(systemDir, "templates")):
if not template.startswith("."):
thumbnails += makeThumbnail(systemDir, template)
thumbnails += "</div>"
return thumbnails
def createInfo(systemDir):
info = "<div class='row'>\n"
for template in os.listdir(os.path.join(systemDir, "templates")):
if not template.startswith("."):
s = readFile(
os.path.join(systemDir, "templates", template, "parts", "info.json")
)
data = eval(s)
info += makeInfo(systemDir, template, data)
info += "</div>"
return info
def makeInfo(systemDir, templateName, data):
previewImage = os.path.join(
systemDir, "templates", templateName, "parts/preview.jpg"
)
info = (
"""
<div class="info" id="info-"""
+ data["Title"]
+ """">
<img src='"""
+ previewImage
+ """' />
<h5>"""
+ data["Title"]
+ """</h5>
<p>"""
+ data["Description"]
+ """<br /><br />
Credit: """
+ data["Credit"]
+ """<br />
Support: <a href='"""
+ data["Support"]
+ """'>www.makercms.org</a><br />
</p>
</div>
"""
)
return info
def makeThumbnail(systemDir, templateName):
previewImage = os.path.join(
systemDir, "templates", templateName, "parts/preview.jpg"
)
thumbnail = (
"""
<div class='thumbnail' id='"""
+ templateName
+ """' data-info='#info-"""
+ templateName
+ """'>
<a href='--"""
+ templateName
+ """--'>
<img src='"""
+ previewImage
+ """' />
<p>"""
+ templateName
+ """</p></a>
</div>
"""
)
return thumbnail<|fim▁end|> | |
<|file_name|>cli.rs<|end_file_name|><|fim▁begin|>use clap;
use clap::{App, Arg, Error, SubCommand};
use std::net::{IpAddr, Ipv4Addr, Ipv6Addr};
use std::str::FromStr;
use std::{ffi::OsString, path::PathBuf};
#[derive(Debug, Clone)]
pub struct Server {
pub bind_addr: String,
pub port: u16,
pub key: String,
pub dns: IpAddr,
}
#[derive(Debug, Clone)]
pub struct Client {
pub remote_addr: String,
pub port: u16,
pub key: String,
pub default_route: bool,
}
#[derive(Debug, Clone)]
pub enum Args {
Client(Client),
Server(Server),
}
pub fn get_args() -> Result<Args, String> {
let matches = App::new("kytan: High Performance Peer-to-Peer VPN")
.version("1.0")
.subcommand(
SubCommand::with_name("server")
.help("client mode")
.arg(
Arg::with_name("bind")
.short("l")
.long("listen")
.default_value("0.0.0.0")
.help("set the listen address")
.takes_value(true),
)
.arg(
Arg::with_name("port")
.short("p")
.long("port")
.default_value("9527")
.help("set the listen port")
.takes_value(true),
)
.arg(
Arg::with_name("key")
.short("k")
.long("key")
.help("set the key for encryption communication")
.takes_value(true),
)
.arg(
Arg::with_name("dns")
.short("d")
.long("dns")
.default_value("8.8.8.8")
.help("set dns for client, default 8.8.8.8")
.takes_value(true),
),
)
.subcommand(
SubCommand::with_name("client")
.help("server mode")
.arg(
Arg::with_name("server")
.short("s")
.long("server")
.help("set the remote server address")
.takes_value(true),
)
.arg(
Arg::with_name("port")
.short("p")
.long("port")
.help("set the remote port")
.takes_value(true),
)
.arg(
Arg::with_name("key")
.short("k")
.long("key")
.help("set the key for encryption communication")
.takes_value(true),
)
.arg(
Arg::with_name("no-default-route")
.short("n")
.long("no-default-route")
.help("do not set default route"),
),
)
.get_matches();
if let Some(matches) = matches.subcommand_matches("client") {
let ip_str = matches
.value_of("server")
.ok_or_else(|| "can not find client host value")
.unwrap();
let port_str = matches
.value_of("port")
.ok_or_else(|| "can not find client port value")
.unwrap();
let key_str = matches
.value_of("key")
.ok_or_else(|| "can not find client key value")
.unwrap();
// let remote_addr = IpAddr::V4(Ipv4Addr::from_str(ip_str).map_err(|e| e.to_string())?);
let port = port_str.parse::<u16>().map_err(|e| e.to_string())?;
let default_route = match matches.is_present("no-default-route") {
false => true,
true => false,
};
Ok(Args::Client(Client {
remote_addr: ip_str.to_string(),
port: port,
key: key_str.to_string(),
default_route: default_route,
}))
} else if let Some(matches) = matches.subcommand_matches("server") {
let ip_str = matches
.value_of("bind")
.ok_or_else(|| "can not find server host value")<|fim▁hole|> .value_of("port")
.ok_or_else(|| "can not find server port value")
.unwrap();
let key_str = matches
.value_of("key")
.ok_or_else(|| "can not find server key value")
.unwrap();
let dns = matches
.value_of("dns")
.ok_or_else(|| "can not find dns value")?;
// let bind_addr = IpAddr::V4(Ipv4Addr::from_str(ip_str).map_err(|e| e.to_string())?);
let dns = IpAddr::V4(Ipv4Addr::from_str(dns).map_err(|e| e.to_string())?);
let port = port_str.parse::<u16>().map_err(|e| e.to_string())?;
Ok(Args::Server(Server {
bind_addr: ip_str.to_string(),
port: port,
key: key_str.to_string(),
dns: dns,
}))
} else {
unimplemented!()
}
}<|fim▁end|> | .unwrap();
let port_str = matches |
<|file_name|>cstring.rs<|end_file_name|><|fim▁begin|>use libc::c_char;
use std::ffi::CStr;
use std::str::Utf8Error;
use std::ffi::CString;
pub struct CStringUtils {}
impl CStringUtils {
pub fn c_str_to_string(cstr: *const c_char) -> Result<Option<String>, Utf8Error> {
if cstr.is_null() {
return Ok(None);
}
unsafe {
match CStr::from_ptr(cstr).to_str() {
Ok(str) => Ok(Some(str.to_string())),
Err(err) => Err(err)
}
}
}
pub fn c_str_to_str<'a>(cstr: *const c_char) -> Result<Option<&'a str>, Utf8Error> {
if cstr.is_null() {
return Ok(None);
}
unsafe {
match CStr::from_ptr(cstr).to_str() {
Ok(s) => Ok(Some(s)),
Err(err) => Err(err)
}<|fim▁hole|> CString::new(s).unwrap()
}
}
//TODO DOCUMENT WHAT THIS DOES
macro_rules! check_useful_c_str {
($x:ident, $e:expr) => {
let $x = match CStringUtils::c_str_to_string($x) {
Ok(Some(val)) => val,
_ => return VcxError::from_msg($e, "Invalid pointer has been passed").into()
};
if $x.is_empty() {
return VcxError::from_msg($e, "Empty string has been passed").into()
}
}
}
macro_rules! check_useful_opt_c_str {
($x:ident, $e:expr) => {
let $x = match CStringUtils::c_str_to_string($x) {
Ok(opt_val) => opt_val,
Err(_) => return VcxError::from_msg($e, "Invalid pointer has been passed").into()
};
}
}
/// Vector helpers
macro_rules! check_useful_c_byte_array {
($ptr:ident, $len:expr, $err1:expr, $err2:expr) => {
if $ptr.is_null() {
return VcxError::from_msg($err1, "Invalid pointer has been passed").into()
}
if $len <= 0 {
return VcxError::from_msg($err2, "Array length must be greater than 0").into()
}
let $ptr = unsafe { $crate::std::slice::from_raw_parts($ptr, $len as usize) };
let $ptr = $ptr.to_vec();
}
}
//Returnable pointer is valid only before first vector modification
pub fn vec_to_pointer(v: &Vec<u8>) -> (*const u8, u32) {
let len = v.len() as u32;
(v.as_ptr() as *const u8, len)
}<|fim▁end|> | }
}
pub fn string_to_cstring(s: String) -> CString { |
<|file_name|>Sector.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2014 Wouter Pinnoo
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package eu.pinnoo.garbagecalendar.data;
import java.io.Serializable;
/**
*
* @author Wouter Pinnoo <[email protected]>
*/
public class Sector implements Serializable {
private static final long serialVersionUID = -843402748713889036L;
private AreaType type;
private String code;
<|fim▁hole|> }
@Override
public boolean equals(Object o) {
if (o instanceof Sector) {
Sector s = (Sector) o;
return s.getCode().equals(getCode())
&& s.getType().equals(getType());
}
return false;
}
@Override
public int hashCode() {
int hash = 7;
hash = 23 * hash + (this.type != null ? this.type.hashCode() : 0);
hash = 23 * hash + (this.code != null ? this.code.hashCode() : 0);
return hash;
}
@Override
public String toString() {
return type.toString() + code;
}
public Sector(String str) {
if (str.matches("[LlVv][0-9][0-9]")) {
switch (str.charAt(0)) {
case 'L':
type = AreaType.L;
break;
case 'V':
type = AreaType.V;
break;
default:
type = AreaType.NONE;
}
code = str.substring(1);
} else {
type = AreaType.CITY;
code = str;
}
}
public AreaType getType() {
return type;
}
public String getCode() {
return code;
}
}<|fim▁end|> | public Sector(AreaType type, String code) {
this.type = type;
this.code = code; |
<|file_name|>serviceworkerglobalscope.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use devtools;
use devtools_traits::DevtoolScriptControlMsg;
use dom::abstractworker::WorkerScriptMsg;
use dom::bindings::codegen::Bindings::EventHandlerBinding::EventHandlerNonNull;
use dom::bindings::codegen::Bindings::ServiceWorkerGlobalScopeBinding;
use dom::bindings::codegen::Bindings::ServiceWorkerGlobalScopeBinding::ServiceWorkerGlobalScopeMethods;
use dom::bindings::inheritance::Castable;
use dom::bindings::js::{Root, RootCollection};
use dom::bindings::reflector::DomObject;
use dom::bindings::str::DOMString;
use dom::event::Event;
use dom::eventtarget::EventTarget;
use dom::extendableevent::ExtendableEvent;
use dom::extendablemessageevent::ExtendableMessageEvent;
use dom::globalscope::GlobalScope;
use dom::workerglobalscope::WorkerGlobalScope;
use dom_struct::dom_struct;
use ipc_channel::ipc::{self, IpcSender, IpcReceiver};
use ipc_channel::router::ROUTER;
use js::jsapi::{JS_SetInterruptCallback, JSAutoCompartment, JSContext};
use js::jsval::UndefinedValue;
use js::rust::Runtime;
use net_traits::{load_whole_resource, IpcSend, CustomResponseMediator};
use net_traits::request::{CredentialsMode, Destination, RequestInit, Type as RequestType};
use script_runtime::{CommonScriptMsg, StackRootTLS, get_reports, new_rt_and_cx, ScriptChan};
use script_traits::{TimerEvent, WorkerGlobalScopeInit, ScopeThings, ServiceWorkerMsg, WorkerScriptLoadOrigin};
use servo_config::prefs::PREFS;
use servo_rand::random;
use servo_url::ServoUrl;
use std::sync::mpsc::{Receiver, RecvError, Select, Sender, channel};
use std::thread;
use std::time::Duration;
use style::thread_state::{self, IN_WORKER, SCRIPT};
/// Messages used to control service worker event loop
pub enum ServiceWorkerScriptMsg {
/// Message common to all workers
CommonWorker(WorkerScriptMsg),
// Message to request a custom response by the service worker
Response(CustomResponseMediator)
}
pub enum MixedMessage {
FromServiceWorker(ServiceWorkerScriptMsg),
FromDevtools(DevtoolScriptControlMsg),
FromTimeoutThread(())
}
#[derive(JSTraceable, Clone)]
pub struct ServiceWorkerChan {
pub sender: Sender<ServiceWorkerScriptMsg>
}
impl ScriptChan for ServiceWorkerChan {
fn send(&self, msg: CommonScriptMsg) -> Result<(), ()> {
self.sender
.send(ServiceWorkerScriptMsg::CommonWorker(WorkerScriptMsg::Common(msg)))
.map_err(|_| ())
}
fn clone(&self) -> Box<ScriptChan + Send> {
box ServiceWorkerChan {
sender: self.sender.clone(),
}
}
}
#[dom_struct]
pub struct ServiceWorkerGlobalScope {
workerglobalscope: WorkerGlobalScope,
#[ignore_heap_size_of = "Defined in std"]
receiver: Receiver<ServiceWorkerScriptMsg>,
#[ignore_heap_size_of = "Defined in std"]
own_sender: Sender<ServiceWorkerScriptMsg>,
#[ignore_heap_size_of = "Defined in std"]
timer_event_port: Receiver<()>,
#[ignore_heap_size_of = "Defined in std"]
swmanager_sender: IpcSender<ServiceWorkerMsg>,
scope_url: ServoUrl,
}
impl ServiceWorkerGlobalScope {
fn new_inherited(init: WorkerGlobalScopeInit,
worker_url: ServoUrl,
from_devtools_receiver: Receiver<DevtoolScriptControlMsg>,
runtime: Runtime,
own_sender: Sender<ServiceWorkerScriptMsg>,
receiver: Receiver<ServiceWorkerScriptMsg>,
timer_event_chan: IpcSender<TimerEvent>,
timer_event_port: Receiver<()>,
swmanager_sender: IpcSender<ServiceWorkerMsg>,
scope_url: ServoUrl)
-> ServiceWorkerGlobalScope {
ServiceWorkerGlobalScope {
workerglobalscope: WorkerGlobalScope::new_inherited(init,
worker_url,
runtime,
from_devtools_receiver,
timer_event_chan,
None),
receiver: receiver,
timer_event_port: timer_event_port,
own_sender: own_sender,
swmanager_sender: swmanager_sender,
scope_url: scope_url
}
}<|fim▁hole|> from_devtools_receiver: Receiver<DevtoolScriptControlMsg>,
runtime: Runtime,
own_sender: Sender<ServiceWorkerScriptMsg>,
receiver: Receiver<ServiceWorkerScriptMsg>,
timer_event_chan: IpcSender<TimerEvent>,
timer_event_port: Receiver<()>,
swmanager_sender: IpcSender<ServiceWorkerMsg>,
scope_url: ServoUrl)
-> Root<ServiceWorkerGlobalScope> {
let cx = runtime.cx();
let scope = box ServiceWorkerGlobalScope::new_inherited(init,
worker_url,
from_devtools_receiver,
runtime,
own_sender,
receiver,
timer_event_chan,
timer_event_port,
swmanager_sender,
scope_url);
unsafe {
ServiceWorkerGlobalScopeBinding::Wrap(cx, scope)
}
}
#[allow(unsafe_code)]
pub fn run_serviceworker_scope(scope_things: ScopeThings,
own_sender: Sender<ServiceWorkerScriptMsg>,
receiver: Receiver<ServiceWorkerScriptMsg>,
devtools_receiver: IpcReceiver<DevtoolScriptControlMsg>,
swmanager_sender: IpcSender<ServiceWorkerMsg>,
scope_url: ServoUrl) {
let ScopeThings { script_url,
init,
worker_load_origin,
.. } = scope_things;
let serialized_worker_url = script_url.to_string();
let origin = GlobalScope::current().expect("No current global object").origin().immutable().clone();
thread::Builder::new().name(format!("ServiceWorker for {}", serialized_worker_url)).spawn(move || {
thread_state::initialize(SCRIPT | IN_WORKER);
let roots = RootCollection::new();
let _stack_roots_tls = StackRootTLS::new(&roots);
let WorkerScriptLoadOrigin { referrer_url, referrer_policy, pipeline_id } = worker_load_origin;
let request = RequestInit {
url: script_url.clone(),
type_: RequestType::Script,
destination: Destination::ServiceWorker,
credentials_mode: CredentialsMode::Include,
use_url_credentials: true,
pipeline_id: pipeline_id,
referrer_url: referrer_url,
referrer_policy: referrer_policy,
origin,
.. RequestInit::default()
};
let (url, source) = match load_whole_resource(request,
&init.resource_threads.sender()) {
Err(_) => {
println!("error loading script {}", serialized_worker_url);
return;
}
Ok((metadata, bytes)) => {
(metadata.final_url, String::from_utf8(bytes).unwrap())
}
};
let runtime = unsafe { new_rt_and_cx() };
let (devtools_mpsc_chan, devtools_mpsc_port) = channel();
ROUTER.route_ipc_receiver_to_mpsc_sender(devtools_receiver, devtools_mpsc_chan);
// TODO XXXcreativcoder use this timer_ipc_port, when we have a service worker instance here
let (timer_ipc_chan, _timer_ipc_port) = ipc::channel().unwrap();
let (timer_chan, timer_port) = channel();
let global = ServiceWorkerGlobalScope::new(
init, url, devtools_mpsc_port, runtime,
own_sender, receiver,
timer_ipc_chan, timer_port, swmanager_sender, scope_url);
let scope = global.upcast::<WorkerGlobalScope>();
unsafe {
// Handle interrupt requests
JS_SetInterruptCallback(scope.runtime(), Some(interrupt_callback));
}
scope.execute_script(DOMString::from(source));
// Service workers are time limited
thread::Builder::new().name("SWTimeoutThread".to_owned()).spawn(move || {
let sw_lifetime_timeout = PREFS.get("dom.serviceworker.timeout_seconds").as_u64().unwrap();
thread::sleep(Duration::new(sw_lifetime_timeout, 0));
let _ = timer_chan.send(());
}).expect("Thread spawning failed");
global.dispatch_activate();
let reporter_name = format!("service-worker-reporter-{}", random::<u64>());
scope.upcast::<GlobalScope>().mem_profiler_chan().run_with_memory_reporting(|| {
// https://html.spec.whatwg.org/multipage/#event-loop-processing-model
// Step 1
while let Ok(event) = global.receive_event() {
// Step 3
if !global.handle_event(event) {
break;
}
// Step 6
global.upcast::<WorkerGlobalScope>().perform_a_microtask_checkpoint();
}
}, reporter_name, scope.script_chan(), CommonScriptMsg::CollectReports);
}).expect("Thread spawning failed");
}
fn handle_event(&self, event: MixedMessage) -> bool {
match event {
MixedMessage::FromDevtools(msg) => {
match msg {
DevtoolScriptControlMsg::EvaluateJS(_pipe_id, string, sender) =>
devtools::handle_evaluate_js(self.upcast(), string, sender),
DevtoolScriptControlMsg::GetCachedMessages(pipe_id, message_types, sender) =>
devtools::handle_get_cached_messages(pipe_id, message_types, sender),
DevtoolScriptControlMsg::WantsLiveNotifications(_pipe_id, bool_val) =>
devtools::handle_wants_live_notifications(self.upcast(), bool_val),
_ => debug!("got an unusable devtools control message inside the worker!"),
}
true
}
MixedMessage::FromServiceWorker(msg) => {
self.handle_script_event(msg);
true
}
MixedMessage::FromTimeoutThread(_) => {
let _ = self.swmanager_sender.send(ServiceWorkerMsg::Timeout(self.scope_url.clone()));
false
}
}
}
fn handle_script_event(&self, msg: ServiceWorkerScriptMsg) {
use self::ServiceWorkerScriptMsg::*;
match msg {
CommonWorker(WorkerScriptMsg::DOMMessage(data)) => {
let scope = self.upcast::<WorkerGlobalScope>();
let target = self.upcast();
let _ac = JSAutoCompartment::new(scope.get_cx(), scope.reflector().get_jsobject().get());
rooted!(in(scope.get_cx()) let mut message = UndefinedValue());
data.read(scope.upcast(), message.handle_mut());
ExtendableMessageEvent::dispatch_jsval(target, scope.upcast(), message.handle());
},
CommonWorker(WorkerScriptMsg::Common(CommonScriptMsg::RunnableMsg(_, runnable))) => {
runnable.handler()
},
CommonWorker(WorkerScriptMsg::Common(CommonScriptMsg::CollectReports(reports_chan))) => {
let scope = self.upcast::<WorkerGlobalScope>();
let cx = scope.get_cx();
let path_seg = format!("url({})", scope.get_url());
let reports = get_reports(cx, path_seg);
reports_chan.send(reports);
},
Response(mediator) => {
// TODO XXXcreativcoder This will eventually use a FetchEvent interface to fire event
// when we have the Request and Response dom api's implemented
// https://slightlyoff.github.io/ServiceWorker/spec/service_worker_1/index.html#fetch-event-section
self.upcast::<EventTarget>().fire_event(atom!("fetch"));
let _ = mediator.response_chan.send(None);
}
}
}
#[allow(unsafe_code)]
fn receive_event(&self) -> Result<MixedMessage, RecvError> {
let scope = self.upcast::<WorkerGlobalScope>();
let worker_port = &self.receiver;
let devtools_port = scope.from_devtools_receiver();
let timer_event_port = &self.timer_event_port;
let sel = Select::new();
let mut worker_handle = sel.handle(worker_port);
let mut devtools_handle = sel.handle(devtools_port);
let mut timer_port_handle = sel.handle(timer_event_port);
unsafe {
worker_handle.add();
if scope.from_devtools_sender().is_some() {
devtools_handle.add();
}
timer_port_handle.add();
}
let ret = sel.wait();
if ret == worker_handle.id() {
Ok(MixedMessage::FromServiceWorker(worker_port.recv()?))
}else if ret == devtools_handle.id() {
Ok(MixedMessage::FromDevtools(devtools_port.recv()?))
} else if ret == timer_port_handle.id() {
Ok(MixedMessage::FromTimeoutThread(timer_event_port.recv()?))
} else {
panic!("unexpected select result!")
}
}
pub fn process_event(&self, msg: CommonScriptMsg) {
self.handle_script_event(ServiceWorkerScriptMsg::CommonWorker(WorkerScriptMsg::Common(msg)));
}
pub fn script_chan(&self) -> Box<ScriptChan + Send> {
box ServiceWorkerChan {
sender: self.own_sender.clone()
}
}
fn dispatch_activate(&self) {
let event = ExtendableEvent::new(self, atom!("activate"), false, false);
let event = (&*event).upcast::<Event>();
self.upcast::<EventTarget>().dispatch_event(event);
}
}
#[allow(unsafe_code)]
unsafe extern "C" fn interrupt_callback(cx: *mut JSContext) -> bool {
let worker =
Root::downcast::<WorkerGlobalScope>(GlobalScope::from_context(cx))
.expect("global is not a worker scope");
assert!(worker.is::<ServiceWorkerGlobalScope>());
// A false response causes the script to terminate
!worker.is_closing()
}
impl ServiceWorkerGlobalScopeMethods for ServiceWorkerGlobalScope {
// https://w3c.github.io/ServiceWorker/#service-worker-global-scope-onmessage-attribute
event_handler!(message, GetOnmessage, SetOnmessage);
}<|fim▁end|> |
#[allow(unsafe_code)]
pub fn new(init: WorkerGlobalScopeInit,
worker_url: ServoUrl, |
<|file_name|>api.hpp<|end_file_name|><|fim▁begin|>#pragma once
#include "base/worker_thread.hpp"<|fim▁hole|>#include "ugc/types.hpp"
#include <functional>
class Index;
struct FeatureID;
namespace ugc
{
class Api
{
public:
using UGCCallback = std::function<void(UGC const &)>;
using UGCUpdateCallback = std::function<void(UGCUpdate const &)>;
explicit Api(Index const & index, std::string const & filename);
void GetUGC(FeatureID const & id, UGCCallback callback);
void GetUGCUpdate(FeatureID const & id, UGCUpdateCallback callback);
void SetUGCUpdate(FeatureID const & id, UGCUpdate const & ugc);
static UGC MakeTestUGC1(Time now = Clock::now());
static UGC MakeTestUGC2(Time now = Clock::now());
private:
void GetUGCImpl(FeatureID const & id, UGCCallback callback);
void GetUGCUpdateImpl(FeatureID const & id, UGCUpdateCallback callback);
void SetUGCUpdateImpl(FeatureID const & id, UGCUpdate const & ugc);
Index const & m_index;
base::WorkerThread m_thread;
Storage m_storage;
};
} // namespace ugc<|fim▁end|> |
#include "ugc/storage.hpp" |
<|file_name|>random_primitive_float_range.rs<|end_file_name|><|fim▁begin|>use itertools::Itertools;
use malachite_base::num::basic::floats::PrimitiveFloat;
use malachite_base::num::float::NiceFloat;
use malachite_base::num::random::random_primitive_float_range;
use malachite_base::random::EXAMPLE_SEED;
use malachite_base_test_util::stats::common_values_map::common_values_map;
use malachite_base_test_util::stats::median;
use malachite_base_test_util::stats::moments::{moment_stats, CheckedToF64, MomentStats};
use std::panic::catch_unwind;
fn random_primitive_float_range_helper<T: CheckedToF64 + PrimitiveFloat>(
a: T,
b: T,
expected_values: &[T],
expected_common_values: &[(T, usize)],
expected_median: (T, Option<T>),
expected_moment_stats: MomentStats,
) {
let xs = random_primitive_float_range::<T>(EXAMPLE_SEED, a, b);
let actual_values = xs.clone().take(20).map(NiceFloat).collect_vec();
let actual_common_values = common_values_map(1000000, 10, xs.clone().map(NiceFloat));
let actual_median = median(xs.clone().map(NiceFloat).take(1000000));
let actual_moment_stats = moment_stats(xs.take(1000000));
let (lo, hi) = expected_median;
assert_eq!(
(
actual_values,
actual_common_values.as_slice(),
actual_median,
actual_moment_stats
),
(
expected_values.iter().cloned().map(NiceFloat).collect_vec(),
expected_common_values
.iter()
.map(|&(x, freq)| (NiceFloat(x), freq))
.collect_vec()
.as_slice(),
(NiceFloat(lo), hi.map(NiceFloat)),
expected_moment_stats
)
);
}
#[test]
fn test_random_primitive_float_range() {
// f32, a = 1.0, b = 2.0
let values = &[
1.5463697, 1.2951918, 1.7384838, 1.2143862, 1.1419607, 1.0917295, 1.7257521, 1.849941,
1.1442195, 1.363777, 1.052571, 1.0717841, 1.9104315, 1.3754328, 1.590667, 1.0705026,
1.8980603, 1.8630176, 1.0212592, 1.3380667,
];
let common_values = &[
(1.9376882, 5),
(1.012385, 4),
(1.439915, 4),
(1.709473, 4),
(1.754993, 4),
(1.944844, 4),
(1.971242, 4),
(1.978845, 4),
(1.0289025, 4),
(1.0466498, 4),
];
let sample_median = (1.499921, None);
let sample_moment_stats = MomentStats {
mean: NiceFloat(1.49979057649457),
standard_deviation: NiceFloat(0.2887387766808365),
skewness: NiceFloat(0.0002622267624830283),
excess_kurtosis: NiceFloat(-1.1997935828388204),
};
random_primitive_float_range_helper::<f32>(
1.0,
2.0,
values,
common_values,
sample_median,
sample_moment_stats,
);
// f32, a = -0.1, b = 0.1
let values = &[
5.664681e-11,
1.2492925e-35,
2.3242339e-29,
4.699183e-7,
-2.8244436e-36,
-2.264039e-37,
-0.0000017299129,
1.40616e-23,
2.7418007e-27,
1.5418819e-16,
-1.8473076e-36,
-2.4935917e-21,
-3.373897e-37,
-7.5386525e-15,
-2.2595721e-7,
-8.293393e-39,
0.0025248893,
1.1819218e-25,
2.3384073e-23,
3.1464167e-24,
];
let common_values = &[
(0.02590246, 2),
(-0.09233444, 2),
(0.001610253, 2),
(0.010553952, 2),
(0.020663222, 2),
(0.031000609, 2),
(1.30495e-38, 2),
(1.409154e-8, 2),
(2.599722e-7, 2),
(3.67508e-29, 2),
];
let sample_median = (-1.472737e-39, Some(-1.471169e-39));
let sample_moment_stats = MomentStats {
mean: NiceFloat(-3.190292459186594e-6),
standard_deviation: NiceFloat(0.007506907081695582),
skewness: NiceFloat(-0.02559343794273501),
excess_kurtosis: NiceFloat(84.97988219106435),
};
random_primitive_float_range_helper::<f32>(
-0.1,
0.1,
values,
common_values,
sample_median,
sample_moment_stats,
);
// f32, a = e, b = π
let values = &[
2.811021, 3.0798163, 2.8497639, 2.9021935, 3.0803769, 3.0796993, 3.088304, 2.872187,
2.8092258, 2.7708528, 3.0054183, 2.7851858, 2.745991, 2.9290476, 2.913056, 2.899723,
2.9672115, 2.875196, 3.01054, 3.0299006,
];
let common_values = &[
(2.7395, 7),
(2.7335808, 7),
(2.8363338, 7),
(3.0879333, 7),
(2.760186, 6),
(2.799341, 6),
(2.933202, 6),
(2.978166, 6),
(3.012332, 6),
(3.034496, 6),
];
let sample_median = (2.9301434, Some(2.930144));
let sample_moment_stats = MomentStats {
mean: NiceFloat(2.9300705904196347),
standard_deviation: NiceFloat(0.12218018336191779),
skewness: NiceFloat(-0.0024072138827345158),
excess_kurtosis: NiceFloat(-1.1980037439170255),
};
random_primitive_float_range_helper::<f32>(
core::f32::consts::E,
core::f32::consts::PI,
values,
common_values,
sample_median,
sample_moment_stats,
);
// f32, a = 100.0, b = 101.0
let values = &[
100.96766, 100.10573, 100.102974, 100.47697, 100.441444, 100.94259, 100.696365, 100.36691,
100.79254, 100.435005, 100.23124, 100.153755, 100.25385, 100.64986, 100.26314, 100.148544,
100.28187, 100.3743, 100.18771, 100.901344,
];
let common_values = &[
(100.15877, 24),
(100.081535, 22),
(100.26679, 21),
(100.56587, 21),
(100.894196, 21),
(100.3593, 20),
(100.4054, 20),
(100.30979, 20),
(100.45853, 20),
(100.49529, 20),
];
let sample_median = (100.50088, None);
let sample_moment_stats = MomentStats {
mean: NiceFloat(100.50054111846543),
standard_deviation: NiceFloat(0.2888116297082562),<|fim▁hole|> 100.0,
101.0,
values,
common_values,
sample_median,
sample_moment_stats,
);
// f32, a = 1.0e38, b = Infinity
let values = &[
1.4647999e38,
3.1018272e38,
1.582411e38,
1.5544886e38,
1.5924082e38,
2.9619212e38,
2.8168304e38,
2.9816339e38,
1.2098325e38,
2.5528384e38,
1.0473973e38,
2.2168899e38,
1.8072246e38,
1.732986e38,
1.0828477e38,
1.3966511e38,
2.61352e38,
1.6959917e38,
1.727243e38,
2.8140436e38,
];
let common_values = &[
(1.223221e38, 4),
(1.372136e38, 4),
(1.0892582e38, 4),
(1.4897022e38, 4),
(1.5085965e38, 4),
(1.5266252e38, 4),
(1.8360457e38, 4),
(2.5784374e38, 4),
(2.6144523e38, 4),
(2.7852527e38, 4),
];
let sample_median = (1.8507265e38, Some(1.8507311e38));
let sample_moment_stats = MomentStats {
mean: NiceFloat(2.0095713198371904e38),
standard_deviation: NiceFloat(7.129528670871142e37),
skewness: NiceFloat(0.37808793164351623),
excess_kurtosis: NiceFloat(-1.168840184381319),
};
random_primitive_float_range_helper::<f32>(
1.0e38,
f32::POSITIVE_INFINITY,
values,
common_values,
sample_median,
sample_moment_stats,
);
// f32, a = -f32::MIN_POSITIVE_SUBNORMAL, b = f32::MIN_POSITIVE_SUBNORMAL
let values = &[
-0.0, -1.0e-45, -0.0, 0.0, -0.0, -0.0, -1.0e-45, -0.0, -1.0e-45, 0.0, -0.0, -1.0e-45, -0.0,
0.0, 0.0, -1.0e-45, -0.0, -1.0e-45, 0.0, 0.0,
];
let common_values = &[(-0.0, 333784), (0.0, 333516), (-1.0e-45, 332700)];
let sample_median = (-0.0, None);
let sample_moment_stats = MomentStats {
mean: NiceFloat(-4.662119990808644e-46),
standard_deviation: NiceFloat(6.602643154251322e-46),
skewness: NiceFloat(-0.7101318209186737),
excess_kurtosis: NiceFloat(-1.4957127969187527),
};
random_primitive_float_range_helper::<f32>(
-f32::MIN_POSITIVE_SUBNORMAL,
f32::MIN_POSITIVE_SUBNORMAL,
values,
common_values,
sample_median,
sample_moment_stats,
);
// f32, a = -0.0, b = f32::MIN_POSITIVE_SUBNORMAL
let values = &[
0.0, -0.0, -0.0, -0.0, 0.0, 0.0, 0.0, -0.0, 0.0, 0.0, 0.0, 0.0, -0.0, 0.0, 0.0, 0.0, 0.0,
-0.0, 0.0, -0.0,
];
let common_values = &[(0.0, 500473), (-0.0, 499527)];
let sample_median = (0.0, None);
let sample_moment_stats = MomentStats {
mean: NiceFloat(0.0),
standard_deviation: NiceFloat(0.0),
skewness: NiceFloat(f64::NAN),
excess_kurtosis: NiceFloat(f64::NAN),
};
random_primitive_float_range_helper::<f32>(
-0.0,
f32::MIN_POSITIVE_SUBNORMAL,
values,
common_values,
sample_median,
sample_moment_stats,
);
// f32, a = 0.0, b = f32::MIN_POSITIVE_SUBNORMAL
let values = &[0.0; 20];
let common_values = &[(0.0, 1000000)];
let sample_median = (0.0, None);
let sample_moment_stats = MomentStats {
mean: NiceFloat(0.0),
standard_deviation: NiceFloat(0.0),
skewness: NiceFloat(f64::NAN),
excess_kurtosis: NiceFloat(f64::NAN),
};
random_primitive_float_range_helper::<f32>(
0.0,
f32::MIN_POSITIVE_SUBNORMAL,
values,
common_values,
sample_median,
sample_moment_stats,
);
// f32, a = -f32::MIN_POSITIVE_SUBNORMAL, b = -0.0
let values = &[-1.0e-45; 20];
let common_values = &[(-1.0e-45, 1000000)];
let sample_median = (-1.0e-45, None);
let sample_moment_stats = MomentStats {
mean: NiceFloat(-1.401298464324817e-45),
standard_deviation: NiceFloat(0.0),
skewness: NiceFloat(f64::NAN),
excess_kurtosis: NiceFloat(f64::NAN),
};
random_primitive_float_range_helper::<f32>(
-f32::MIN_POSITIVE_SUBNORMAL,
-0.0,
values,
common_values,
sample_median,
sample_moment_stats,
);
// f32, a = -f32::MIN_POSITIVE_SUBNORMAL, b = 0.0
let values = &[
-0.0, -1.0e-45, -1.0e-45, -1.0e-45, -0.0, -0.0, -0.0, -1.0e-45, -0.0, -0.0, -0.0, -0.0,
-1.0e-45, -0.0, -0.0, -0.0, -0.0, -1.0e-45, -0.0, -1.0e-45,
];
let common_values = &[(-0.0, 500473), (-1.0e-45, 499527)];
let sample_median = (-0.0, None);
let sample_moment_stats = MomentStats {
mean: NiceFloat(-6.9998641798878095e-46),
standard_deviation: NiceFloat(7.006492689759787e-46),
skewness: NiceFloat(-0.0018920008465908337),
excess_kurtosis: NiceFloat(-1.9999964203328955),
};
random_primitive_float_range_helper::<f32>(
-f32::MIN_POSITIVE_SUBNORMAL,
0.0,
values,
common_values,
sample_median,
sample_moment_stats,
);
// f32, a = -Infinity, b = Infinity
let values = &[
-2.3484665e-27,
2.2879888e-18,
-2.0729896e-12,
3.3600117e28,
-9.0217234e-32,
3564911.0,
-0.000013376945,
-1.885545e18,
8.249455e-29,
2.2178013e-38,
-6.306773e-34,
5.199601e31,
7.6132625e33,
0.00015323664,
9.4768183e36,
-0.0005665587,
8.873326e-30,
0.09273134,
-7.774831e33,
4.315623e-8,
];
let common_values = &[
(5.71262, 2),
(780.036, 2),
(224535.3, 2),
(58.67172, 2),
(73439.85, 2),
(-58.01006, 2),
(-66297.15, 2),
(-66476.91, 2),
(13200.071, 2),
(3306.3635, 2),
];
let sample_median = (4.601794e-39, Some(4.606577e-39));
let sample_moment_stats = MomentStats {
mean: NiceFloat(-1.871815997376882e34),
standard_deviation: NiceFloat(1.8597574260800838e37),
skewness: NiceFloat(-0.04588420234596291),
excess_kurtosis: NiceFloat(174.30920609573673),
};
random_primitive_float_range_helper::<f32>(
f32::NEGATIVE_INFINITY,
f32::POSITIVE_INFINITY,
values,
common_values,
sample_median,
sample_moment_stats,
);
// f32, a = -0.0, b = 0.0
let values = &[-0.0; 20];
let common_values = &[(-0.0, 1000000)];
let sample_median = (-0.0, None);
let sample_moment_stats = MomentStats {
mean: NiceFloat(0.0),
standard_deviation: NiceFloat(0.0),
skewness: NiceFloat(f64::NAN),
excess_kurtosis: NiceFloat(f64::NAN),
};
random_primitive_float_range_helper::<f32>(
-0.0,
0.0,
values,
common_values,
sample_median,
sample_moment_stats,
);
// f64, a = 1.0, b = 2.0
let values = &[
1.5514873723431857,
1.7356480435333936,
1.2240680379087014,
1.5721098095143498,
1.445723211731554,
1.443348441346778,
1.128043347677334,
1.9657544165271619,
1.259133073045527,
1.9463717627559034,
1.827615676661706,
1.3546147198266414,
1.3547277462886724,
1.6644379935168552,
1.7300004987549573,
1.1347106338290753,
1.6337434960012935,
1.9398684976828995,
1.5480087631774717,
1.5114010060819247,
];
let common_values = &[
(1.3443697926, 1),
(1.3820769412, 1),
(1.4136496448, 1),
(1.05230401048, 1),
(1.06345642396, 1),
(1.08636222403, 1),
(1.08890959097, 1),
(1.10364420294, 1),
(1.17100333598, 1),
(1.21003284406, 1),
];
let sample_median = (1.4997587655631748, Some(1.4997590736389839));
let sample_moment_stats = MomentStats {
mean: NiceFloat(1.5002317198585347),
standard_deviation: NiceFloat(0.2886284765385832),
skewness: NiceFloat(0.0005691088300059665),
excess_kurtosis: NiceFloat(-1.1997562526471726),
};
random_primitive_float_range_helper::<f64>(
1.0,
2.0,
values,
common_values,
sample_median,
sample_moment_stats,
);
// f64, a = -0.1, b = 0.1
let values = &[
-7.283095678343042e-164,
4.085787276271492e-169,
-1.6885972585325658e-191,
-1.5059586906723643e-66,
-6.637230143944272e-36,
2.0111059084569595e-54,
-3.2171834547379634e-195,
-1.4304898186595632e-260,
-5.910214544689135e-300,
4.248352948466203e-63,
-3.6882240870537675e-31,
8.12900376877632e-277,
8.630695763640745e-286,
-2.7842211494385523e-123,
-4.271131813514248e-164,
1.613930919542087e-167,
-5.39182068994581e-107,
-1.4532461060667818e-9,
-1.9793582955127234e-289,
5.420373932282823e-196,
];
let common_values = &[
(5.62015686679e-6, 1),
(-0.09576016351376, 1),
(-3.9141428595e-60, 1),
(-4.5355157777e-28, 1),
(0.008342058495796, 1),
(0.012335893098144, 1),
(0.014079819535342, 1),
(0.014718940078426, 1),
(0.031741597598458, 1),
(0.033991243007763, 1),
];
let sample_median = (1.566509212534917e-309, Some(1.56863192120459e-309));
let sample_moment_stats = MomentStats {
mean: NiceFloat(4.4695816858634463e-7),
standard_deviation: NiceFloat(0.002635102953882735),
skewness: NiceFloat(0.27772415900587566),
excess_kurtosis: NiceFloat(707.152044677798),
};
random_primitive_float_range_helper::<f64>(
-0.1,
0.1,
values,
common_values,
sample_median,
sample_moment_stats,
);
// f64, a = e, b = π
let values = &[
2.8212565731454164,
3.103466176726195,
2.888459041537496,
2.94833744629582,
2.9285662350147255,
3.059002590500268,
2.808432051804475,
3.077033595571352,
3.0898242789403123,
3.093937352570613,
2.7596383425151814,
3.1049928702292573,
2.7453107067232327,
3.0779370799622736,
2.9748071250720396,
2.927927166467895,
2.81511226878185,
2.928920013122519,
2.964625285981546,
3.046598518604858,
];
let common_values = &[
(2.7683806707, 1),
(2.8058681766, 1),
(2.8522842725, 1),
(2.8873246989, 1),
(2.72492950364, 1),
(2.73164898148, 1),
(2.73476073924, 1),
(2.73598990929, 1),
(2.73653142351, 1),
(2.74563905301, 1),
];
let sample_median = (2.930132942011006, Some(2.9301336276615912));
let sample_moment_stats = MomentStats {
mean: NiceFloat(2.929964069913902),
standard_deviation: NiceFloat(0.12226749948876238),
skewness: NiceFloat(-0.0013881669668324012),
excess_kurtosis: NiceFloat(-1.2003731669148405),
};
random_primitive_float_range_helper::<f64>(
core::f64::consts::E,
core::f64::consts::PI,
values,
common_values,
sample_median,
sample_moment_stats,
);
// f64, a = 100.0, b = 101.0
let values = &[
100.29519182996388,
100.21438631278083,
100.09172953867444,
100.84994110175992,
100.36377705862755,
100.07178414494646,
100.37543295746225,
100.07050270922983,
100.86301766610865,
100.33806669965496,
100.35496099272225,
100.93577122524063,
100.00524419289253,
100.29363379918549,
100.98421354539467,
100.68228296091216,
100.93250012468873,
100.1553701412652,
100.95333990532461,
100.2218641465098,
];
let common_values = &[
(100.10137554, 1),
(100.34387327, 1),
(100.223865218, 1),
(100.237336607, 1),
(100.241016737, 1),
(100.358275298, 1),
(100.490668361, 1),
(100.563824325, 1),
(100.567992111, 1),
(100.619353436, 1),
];
let sample_median = (100.49999381186375, Some(100.49999461609349));
let sample_moment_stats = MomentStats {
mean: NiceFloat(100.4998603968099),
standard_deviation: NiceFloat(0.28878031747138194),
skewness: NiceFloat(-0.00018856944159801264),
excess_kurtosis: NiceFloat(-1.2006169795569301),
};
random_primitive_float_range_helper::<f64>(
100.0,
101.0,
values,
common_values,
sample_median,
sample_moment_stats,
);
// f64, a = 1.0e38, b = Infinity
let values = &[
1.3219190533477493e200,
3.652437632585123e180,
2.0420353527516904e248,
2.505458962964126e276,
2.659899792371364e116,
2.7125386559147274e90,
9.536479965391043e185,
9.567216720381635e239,
5.16993041287954e245,
4.939547529284952e179,
3.1175116898205872e224,
1.7555281884088452e42,
5.429209768108731e84,
1.0447670959436904e299,
1.9580250342195754e105,
8.848423533619703e204,
3.4434065546244285e79,
3.6093218170205304e216,
8.464035133686624e293,
1.22423660941592e120,
];
let common_values = &[
(2.141438721e116, 1),
(8.7676954155e86, 1),
(1.28439118539e55, 1),
(1.79171075176e53, 1),
(2.10333657725e74, 1),
(2.3236426209e231, 1),
(2.95823857742e58, 1),
(3.1078914828e141, 1),
(3.38975629714e61, 1),
(4.28790184556e74, 1),
];
let sample_median = (1.2523958970084127e173, Some(1.2542732495420994e173));
let sample_moment_stats = MomentStats {
mean: NiceFloat(2.939399538027295e305),
standard_deviation: NiceFloat(f64::POSITIVE_INFINITY),
skewness: NiceFloat(f64::NAN),
excess_kurtosis: NiceFloat(f64::NAN),
};
random_primitive_float_range_helper::<f64>(
1.0e38,
f64::POSITIVE_INFINITY,
values,
common_values,
sample_median,
sample_moment_stats,
);
// f64, a = -f64::MIN_POSITIVE_SUBNORMAL, b = f64::MIN_POSITIVE_SUBNORMAL
let values = &[
-0.0, -5.0e-324, -0.0, 0.0, -0.0, -0.0, -5.0e-324, -0.0, -5.0e-324, 0.0, -0.0, -5.0e-324,
-0.0, 0.0, 0.0, -5.0e-324, -0.0, -5.0e-324, 0.0, 0.0,
];
let common_values = &[(-0.0, 333784), (0.0, 333516), (-5.0e-324, 332700)];
let sample_median = (-0.0, None);
let sample_moment_stats = MomentStats {
mean: NiceFloat(0.0),
standard_deviation: NiceFloat(0.0),
skewness: NiceFloat(f64::NAN),
excess_kurtosis: NiceFloat(f64::NAN),
};
random_primitive_float_range_helper::<f64>(
-f64::MIN_POSITIVE_SUBNORMAL,
f64::MIN_POSITIVE_SUBNORMAL,
values,
common_values,
sample_median,
sample_moment_stats,
);
// f64, a = -0.0, b = f64::MIN_POSITIVE_SUBNORMAL
let values = &[
0.0, -0.0, -0.0, -0.0, 0.0, 0.0, 0.0, -0.0, 0.0, 0.0, 0.0, 0.0, -0.0, 0.0, 0.0, 0.0, 0.0,
-0.0, 0.0, -0.0,
];
let common_values = &[(0.0, 500473), (-0.0, 499527)];
let sample_median = (0.0, None);
let sample_moment_stats = MomentStats {
mean: NiceFloat(0.0),
standard_deviation: NiceFloat(0.0),
skewness: NiceFloat(f64::NAN),
excess_kurtosis: NiceFloat(f64::NAN),
};
random_primitive_float_range_helper::<f64>(
-0.0,
f64::MIN_POSITIVE_SUBNORMAL,
values,
common_values,
sample_median,
sample_moment_stats,
);
// f64, a = 0.0, b = f64::MIN_POSITIVE_SUBNORMAL
let values = &[0.0; 20];
let common_values = &[(0.0, 1000000)];
let sample_median = (0.0, None);
let sample_moment_stats = MomentStats {
mean: NiceFloat(0.0),
standard_deviation: NiceFloat(0.0),
skewness: NiceFloat(f64::NAN),
excess_kurtosis: NiceFloat(f64::NAN),
};
random_primitive_float_range_helper::<f64>(
0.0,
f64::MIN_POSITIVE_SUBNORMAL,
values,
common_values,
sample_median,
sample_moment_stats,
);
// f64, a = -f64::MIN_POSITIVE_SUBNORMAL, b = -0.0
let values = &[-5.0e-324; 20];
let common_values = &[(-5.0e-324, 1000000)];
let sample_median = (-5.0e-324, None);
let sample_moment_stats = MomentStats {
mean: NiceFloat(-5.0e-324),
standard_deviation: NiceFloat(0.0),
skewness: NiceFloat(f64::NAN),
excess_kurtosis: NiceFloat(f64::NAN),
};
random_primitive_float_range_helper::<f64>(
-f64::MIN_POSITIVE_SUBNORMAL,
-0.0,
values,
common_values,
sample_median,
sample_moment_stats,
);
// f64, a = -f64::MIN_POSITIVE_SUBNORMAL, b = 0.0
let values = &[
-0.0, -5.0e-324, -5.0e-324, -5.0e-324, -0.0, -0.0, -0.0, -5.0e-324, -0.0, -0.0, -0.0, -0.0,
-5.0e-324, -0.0, -0.0, -0.0, -0.0, -5.0e-324, -0.0, -5.0e-324,
];
let common_values = &[(-0.0, 500473), (-5.0e-324, 499527)];
let sample_median = (-0.0, None);
let sample_moment_stats = MomentStats {
mean: NiceFloat(0.0),
standard_deviation: NiceFloat(0.0),
skewness: NiceFloat(f64::NAN),
excess_kurtosis: NiceFloat(f64::NAN),
};
random_primitive_float_range_helper::<f64>(
-f64::MIN_POSITIVE_SUBNORMAL,
0.0,
values,
common_values,
sample_median,
sample_moment_stats,
);
// f64, a = -Infinity, b = Infinity
let values = &[
3.106206640558341e-146,
7.268713316268921e223,
1.1685126708702852e48,
-1.0824685183946236e146,
3.114605160661583e-306,
2.2453015573637674e249,
1.2548860979388685e-35,
-8.287939157477947e-27,
2.1255041535787165e-13,
4.815129234795048e-64,
1.3850402674408148e-17,
-1.253571770758962e207,
-1.4941028004491906e142,
4.366611961454907e-51,
-7.063699168119985e17,
-7.062565582436957e90,
1.1662950933663382e-221,
2.1976577668343592e-97,
-2.8212944266870196e-137,
1.2250916338748408e-222,
];
let common_values = &[
(-9967188.16722, 1),
(1808.830612999, 1),
(32578528203.69, 1),
(5643444.695113, 1),
(812845035127.8, 1),
(-13741970740.45, 1),
(-1434325.082519, 1),
(-33781527.93352, 1),
(-374012916597.5, 1),
(-46629353341.91, 1),
];
let sample_median = (2.772306592172272e-308, Some(2.7820731194979217e-308));
let sample_moment_stats = MomentStats {
mean: NiceFloat(7.922018643581038e303),
standard_deviation: NiceFloat(f64::POSITIVE_INFINITY),
skewness: NiceFloat(f64::NAN),
excess_kurtosis: NiceFloat(f64::NAN),
};
random_primitive_float_range_helper::<f64>(
f64::NEGATIVE_INFINITY,
f64::POSITIVE_INFINITY,
values,
common_values,
sample_median,
sample_moment_stats,
);
// f64, a = -0.0, b = 0.0
let values = &[-0.0; 20];
let common_values = &[(-0.0, 1000000)];
let sample_median = (-0.0, None);
let sample_moment_stats = MomentStats {
mean: NiceFloat(0.0),
standard_deviation: NiceFloat(0.0),
skewness: NiceFloat(f64::NAN),
excess_kurtosis: NiceFloat(f64::NAN),
};
random_primitive_float_range_helper::<f64>(
-0.0,
0.0,
values,
common_values,
sample_median,
sample_moment_stats,
);
}
fn random_primitive_float_range_fail_helper<T: PrimitiveFloat>() {
assert_panic!(random_primitive_float_range::<T>(
EXAMPLE_SEED,
T::ZERO,
T::ZERO
));
assert_panic!(random_primitive_float_range::<T>(
EXAMPLE_SEED,
T::ONE,
T::ZERO
));
assert_panic!(random_primitive_float_range::<T>(
EXAMPLE_SEED,
T::ONE,
T::NAN
));
}
#[test]
fn random_primitive_float_range_fail() {
apply_fn_to_primitive_floats!(random_primitive_float_range_fail_helper);
}<|fim▁end|> | skewness: NiceFloat(-0.003221278138738849),
excess_kurtosis: NiceFloat(-1.2016989304148467),
};
random_primitive_float_range_helper::<f32>( |
<|file_name|>service.go<|end_file_name|><|fim▁begin|>// Code generated by private/model/cli/gen-api/main.go. DO NOT EDIT.
package personalize
import (
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/client"
"github.com/aws/aws-sdk-go/aws/client/metadata"
"github.com/aws/aws-sdk-go/aws/request"
"github.com/aws/aws-sdk-go/aws/signer/v4"
"github.com/aws/aws-sdk-go/private/protocol/jsonrpc"
)
// Personalize provides the API operation methods for making requests to
// Amazon Personalize. See this package's package overview docs
// for details on the service.
//
// Personalize methods are safe to use concurrently. It is not safe to
// modify mutate any of the struct's properties though.
type Personalize struct {
*client.Client
}
// Used for custom client initialization logic
var initClient func(*client.Client)
// Used for custom request initialization logic
var initRequest func(*request.Request)
// Service information constants
const (
ServiceName = "Personalize" // Name of service.
EndpointsID = "personalize" // ID to lookup a service endpoint with.
ServiceID = "Personalize" // ServiceID is a unique identifer of a specific service.
)
// New creates a new instance of the Personalize client with a session.
// If additional configuration is needed for the client instance use the optional
// aws.Config parameter to add your extra config.
//
// Example:
// // Create a Personalize client from just a session.
// svc := personalize.New(mySession)
//
// // Create a Personalize client with additional configuration
// svc := personalize.New(mySession, aws.NewConfig().WithRegion("us-west-2"))
func New(p client.ConfigProvider, cfgs ...*aws.Config) *Personalize {
c := p.ClientConfig(EndpointsID, cfgs...)
if c.SigningNameDerived || len(c.SigningName) == 0 {
c.SigningName = "personalize"
}
return newClient(*c.Config, c.Handlers, c.Endpoint, c.SigningRegion, c.SigningName)
}
// newClient creates, initializes and returns a new service client instance.
func newClient(cfg aws.Config, handlers request.Handlers, endpoint, signingRegion, signingName string) *Personalize {
svc := &Personalize{
Client: client.New(
cfg,
metadata.ClientInfo{
ServiceName: ServiceName,
ServiceID: ServiceID,
SigningName: signingName,
SigningRegion: signingRegion,
Endpoint: endpoint,
APIVersion: "2018-05-22",
JSONVersion: "1.1",
TargetPrefix: "AmazonPersonalize",
},
handlers,
),
}
// Handlers
svc.Handlers.Sign.PushBackNamed(v4.SignRequestHandler)
svc.Handlers.Build.PushBackNamed(jsonrpc.BuildHandler)
svc.Handlers.Unmarshal.PushBackNamed(jsonrpc.UnmarshalHandler)
svc.Handlers.UnmarshalMeta.PushBackNamed(jsonrpc.UnmarshalMetaHandler)
svc.Handlers.UnmarshalError.PushBackNamed(jsonrpc.UnmarshalErrorHandler)
// Run custom client initialization if present
if initClient != nil {
initClient(svc.Client)
}
return svc
}
// newRequest creates a new request for a Personalize operation and runs any
// custom request initialization.
func (c *Personalize) newRequest(op *request.Operation, params, data interface{}) *request.Request {
req := c.NewRequest(op, params, data)<|fim▁hole|> initRequest(req)
}
return req
}<|fim▁end|> |
// Run custom request initialization if present
if initRequest != nil { |
<|file_name|>cp437.py<|end_file_name|><|fim▁begin|>""" Python Character Mapping Codec cp437 generated from 'VENDORS/MICSFT/PC/CP437.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_map)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_map)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='cp437',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Map
decoding_map = codecs.make_identity_dict(range(256))
decoding_map.update({
0x0080: 0x00c7, # LATIN CAPITAL LETTER C WITH CEDILLA
0x0081: 0x00fc, # LATIN SMALL LETTER U WITH DIAERESIS
0x0082: 0x00e9, # LATIN SMALL LETTER E WITH ACUTE
0x0083: 0x00e2, # LATIN SMALL LETTER A WITH CIRCUMFLEX
0x0084: 0x00e4, # LATIN SMALL LETTER A WITH DIAERESIS
0x0085: 0x00e0, # LATIN SMALL LETTER A WITH GRAVE
0x0086: 0x00e5, # LATIN SMALL LETTER A WITH RING ABOVE
0x0087: 0x00e7, # LATIN SMALL LETTER C WITH CEDILLA
0x0088: 0x00ea, # LATIN SMALL LETTER E WITH CIRCUMFLEX
0x0089: 0x00eb, # LATIN SMALL LETTER E WITH DIAERESIS
0x008a: 0x00e8, # LATIN SMALL LETTER E WITH GRAVE
0x008b: 0x00ef, # LATIN SMALL LETTER I WITH DIAERESIS
0x008c: 0x00ee, # LATIN SMALL LETTER I WITH CIRCUMFLEX
0x008d: 0x00ec, # LATIN SMALL LETTER I WITH GRAVE
0x008e: 0x00c4, # LATIN CAPITAL LETTER A WITH DIAERESIS
0x008f: 0x00c5, # LATIN CAPITAL LETTER A WITH RING ABOVE
0x0090: 0x00c9, # LATIN CAPITAL LETTER E WITH ACUTE
0x0091: 0x00e6, # LATIN SMALL LIGATURE AE
0x0092: 0x00c6, # LATIN CAPITAL LIGATURE AE
0x0093: 0x00f4, # LATIN SMALL LETTER O WITH CIRCUMFLEX
0x0094: 0x00f6, # LATIN SMALL LETTER O WITH DIAERESIS
0x0095: 0x00f2, # LATIN SMALL LETTER O WITH GRAVE
0x0096: 0x00fb, # LATIN SMALL LETTER U WITH CIRCUMFLEX
0x0097: 0x00f9, # LATIN SMALL LETTER U WITH GRAVE
0x0098: 0x00ff, # LATIN SMALL LETTER Y WITH DIAERESIS
0x0099: 0x00d6, # LATIN CAPITAL LETTER O WITH DIAERESIS
0x009a: 0x00dc, # LATIN CAPITAL LETTER U WITH DIAERESIS
0x009b: 0x00a2, # CENT SIGN
0x009c: 0x00a3, # POUND SIGN
0x009d: 0x00a5, # YEN SIGN
0x009e: 0x20a7, # PESETA SIGN
0x009f: 0x0192, # LATIN SMALL LETTER F WITH HOOK
0x00a0: 0x00e1, # LATIN SMALL LETTER A WITH ACUTE
0x00a1: 0x00ed, # LATIN SMALL LETTER I WITH ACUTE
0x00a2: 0x00f3, # LATIN SMALL LETTER O WITH ACUTE
0x00a3: 0x00fa, # LATIN SMALL LETTER U WITH ACUTE
0x00a4: 0x00f1, # LATIN SMALL LETTER N WITH TILDE
0x00a5: 0x00d1, # LATIN CAPITAL LETTER N WITH TILDE
0x00a6: 0x00aa, # FEMININE ORDINAL INDICATOR
0x00a7: 0x00ba, # MASCULINE ORDINAL INDICATOR
0x00a8: 0x00bf, # INVERTED QUESTION MARK
0x00a9: 0x2310, # REVERSED NOT SIGN
0x00aa: 0x00ac, # NOT SIGN
0x00ab: 0x00bd, # VULGAR FRACTION ONE HALF
0x00ac: 0x00bc, # VULGAR FRACTION ONE QUARTER
0x00ad: 0x00a1, # INVERTED EXCLAMATION MARK
0x00ae: 0x00ab, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00af: 0x00bb, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00b0: 0x2591, # LIGHT SHADE
0x00b1: 0x2592, # MEDIUM SHADE
0x00b2: 0x2593, # DARK SHADE
0x00b3: 0x2502, # BOX DRAWINGS LIGHT VERTICAL
0x00b4: 0x2524, # BOX DRAWINGS LIGHT VERTICAL AND LEFT
0x00b5: 0x2561, # BOX DRAWINGS VERTICAL SINGLE AND LEFT DOUBLE
0x00b6: 0x2562, # BOX DRAWINGS VERTICAL DOUBLE AND LEFT SINGLE
0x00b7: 0x2556, # BOX DRAWINGS DOWN DOUBLE AND LEFT SINGLE
0x00b8: 0x2555, # BOX DRAWINGS DOWN SINGLE AND LEFT DOUBLE
0x00b9: 0x2563, # BOX DRAWINGS DOUBLE VERTICAL AND LEFT
0x00ba: 0x2551, # BOX DRAWINGS DOUBLE VERTICAL
0x00bb: 0x2557, # BOX DRAWINGS DOUBLE DOWN AND LEFT
0x00bc: 0x255d, # BOX DRAWINGS DOUBLE UP AND LEFT
0x00bd: 0x255c, # BOX DRAWINGS UP DOUBLE AND LEFT SINGLE
0x00be: 0x255b, # BOX DRAWINGS UP SINGLE AND LEFT DOUBLE
0x00bf: 0x2510, # BOX DRAWINGS LIGHT DOWN AND LEFT
0x00c0: 0x2514, # BOX DRAWINGS LIGHT UP AND RIGHT
0x00c1: 0x2534, # BOX DRAWINGS LIGHT UP AND HORIZONTAL
0x00c2: 0x252c, # BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
0x00c3: 0x251c, # BOX DRAWINGS LIGHT VERTICAL AND RIGHT
0x00c4: 0x2500, # BOX DRAWINGS LIGHT HORIZONTAL
0x00c5: 0x253c, # BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
0x00c6: 0x255e, # BOX DRAWINGS VERTICAL SINGLE AND RIGHT DOUBLE
0x00c7: 0x255f, # BOX DRAWINGS VERTICAL DOUBLE AND RIGHT SINGLE
0x00c8: 0x255a, # BOX DRAWINGS DOUBLE UP AND RIGHT
0x00c9: 0x2554, # BOX DRAWINGS DOUBLE DOWN AND RIGHT
0x00ca: 0x2569, # BOX DRAWINGS DOUBLE UP AND HORIZONTAL
0x00cb: 0x2566, # BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
0x00cc: 0x2560, # BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
0x00cd: 0x2550, # BOX DRAWINGS DOUBLE HORIZONTAL
0x00ce: 0x256c, # BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
0x00cf: 0x2567, # BOX DRAWINGS UP SINGLE AND HORIZONTAL DOUBLE
0x00d0: 0x2568, # BOX DRAWINGS UP DOUBLE AND HORIZONTAL SINGLE
0x00d1: 0x2564, # BOX DRAWINGS DOWN SINGLE AND HORIZONTAL DOUBLE
0x00d2: 0x2565, # BOX DRAWINGS DOWN DOUBLE AND HORIZONTAL SINGLE
0x00d3: 0x2559, # BOX DRAWINGS UP DOUBLE AND RIGHT SINGLE
0x00d4: 0x2558, # BOX DRAWINGS UP SINGLE AND RIGHT DOUBLE
0x00d5: 0x2552, # BOX DRAWINGS DOWN SINGLE AND RIGHT DOUBLE
0x00d6: 0x2553, # BOX DRAWINGS DOWN DOUBLE AND RIGHT SINGLE
0x00d7: 0x256b, # BOX DRAWINGS VERTICAL DOUBLE AND HORIZONTAL SINGLE
0x00d8: 0x256a, # BOX DRAWINGS VERTICAL SINGLE AND HORIZONTAL DOUBLE
0x00d9: 0x2518, # BOX DRAWINGS LIGHT UP AND LEFT
0x00da: 0x250c, # BOX DRAWINGS LIGHT DOWN AND RIGHT
0x00db: 0x2588, # FULL BLOCK
0x00dc: 0x2584, # LOWER HALF BLOCK
0x00dd: 0x258c, # LEFT HALF BLOCK
0x00de: 0x2590, # RIGHT HALF BLOCK
0x00df: 0x2580, # UPPER HALF BLOCK
0x00e0: 0x03b1, # GREEK SMALL LETTER ALPHA
0x00e1: 0x00df, # LATIN SMALL LETTER SHARP S
0x00e2: 0x0393, # GREEK CAPITAL LETTER GAMMA
0x00e3: 0x03c0, # GREEK SMALL LETTER PI
0x00e4: 0x03a3, # GREEK CAPITAL LETTER SIGMA
0x00e5: 0x03c3, # GREEK SMALL LETTER SIGMA
0x00e6: 0x00b5, # MICRO SIGN
0x00e7: 0x03c4, # GREEK SMALL LETTER TAU
0x00e8: 0x03a6, # GREEK CAPITAL LETTER PHI
0x00e9: 0x0398, # GREEK CAPITAL LETTER THETA
0x00ea: 0x03a9, # GREEK CAPITAL LETTER OMEGA
0x00eb: 0x03b4, # GREEK SMALL LETTER DELTA
0x00ec: 0x221e, # INFINITY
0x00ed: 0x03c6, # GREEK SMALL LETTER PHI
0x00ee: 0x03b5, # GREEK SMALL LETTER EPSILON
0x00ef: 0x2229, # INTERSECTION
0x00f0: 0x2261, # IDENTICAL TO
0x00f1: 0x00b1, # PLUS-MINUS SIGN
0x00f2: 0x2265, # GREATER-THAN OR EQUAL TO
0x00f3: 0x2264, # LESS-THAN OR EQUAL TO
0x00f4: 0x2320, # TOP HALF INTEGRAL
0x00f5: 0x2321, # BOTTOM HALF INTEGRAL
0x00f6: 0x00f7, # DIVISION SIGN
0x00f7: 0x2248, # ALMOST EQUAL TO
0x00f8: 0x00b0, # DEGREE SIGN
0x00f9: 0x2219, # BULLET OPERATOR
0x00fa: 0x00b7, # MIDDLE DOT
0x00fb: 0x221a, # SQUARE ROOT
0x00fc: 0x207f, # SUPERSCRIPT LATIN SMALL LETTER N
0x00fd: 0x00b2, # SUPERSCRIPT TWO
0x00fe: 0x25a0, # BLACK SQUARE
0x00ff: 0x00a0, # NO-BREAK SPACE
})
### Decoding Table
decoding_table = (
'\x00' # 0x0000 -> NULL
'\x01' # 0x0001 -> START OF HEADING
'\x02' # 0x0002 -> START OF TEXT
'\x03' # 0x0003 -> END OF TEXT
'\x04' # 0x0004 -> END OF TRANSMISSION
'\x05' # 0x0005 -> ENQUIRY
'\x06' # 0x0006 -> ACKNOWLEDGE
'\x07' # 0x0007 -> BELL
'\x08' # 0x0008 -> BACKSPACE
'\t' # 0x0009 -> HORIZONTAL TABULATION
'\n' # 0x000a -> LINE FEED
'\x0b' # 0x000b -> VERTICAL TABULATION
'\x0c' # 0x000c -> FORM FEED
'\r' # 0x000d -> CARRIAGE RETURN
'\x0e' # 0x000e -> SHIFT OUT
'\x0f' # 0x000f -> SHIFT IN
'\x10' # 0x0010 -> DATA LINK ESCAPE
'\x11' # 0x0011 -> DEVICE CONTROL ONE
'\x12' # 0x0012 -> DEVICE CONTROL TWO
'\x13' # 0x0013 -> DEVICE CONTROL THREE
'\x14' # 0x0014 -> DEVICE CONTROL FOUR
'\x15' # 0x0015 -> NEGATIVE ACKNOWLEDGE
'\x16' # 0x0016 -> SYNCHRONOUS IDLE
'\x17' # 0x0017 -> END OF TRANSMISSION BLOCK
'\x18' # 0x0018 -> CANCEL
'\x19' # 0x0019 -> END OF MEDIUM
'\x1a' # 0x001a -> SUBSTITUTE
'\x1b' # 0x001b -> ESCAPE
'\x1c' # 0x001c -> FILE SEPARATOR
'\x1d' # 0x001d -> GROUP SEPARATOR
'\x1e' # 0x001e -> RECORD SEPARATOR
'\x1f' # 0x001f -> UNIT SEPARATOR
' ' # 0x0020 -> SPACE
'!' # 0x0021 -> EXCLAMATION MARK
'"' # 0x0022 -> QUOTATION MARK
'#' # 0x0023 -> NUMBER SIGN
'$' # 0x0024 -> DOLLAR SIGN
'%' # 0x0025 -> PERCENT SIGN
'&' # 0x0026 -> AMPERSAND
"'" # 0x0027 -> APOSTROPHE
'(' # 0x0028 -> LEFT PARENTHESIS
')' # 0x0029 -> RIGHT PARENTHESIS
'*' # 0x002a -> ASTERISK
'+' # 0x002b -> PLUS SIGN
',' # 0x002c -> COMMA
'-' # 0x002d -> HYPHEN-MINUS
'.' # 0x002e -> FULL STOP
'/' # 0x002f -> SOLIDUS
'0' # 0x0030 -> DIGIT ZERO
'1' # 0x0031 -> DIGIT ONE
'2' # 0x0032 -> DIGIT TWO
'3' # 0x0033 -> DIGIT THREE
'4' # 0x0034 -> DIGIT FOUR
'5' # 0x0035 -> DIGIT FIVE
'6' # 0x0036 -> DIGIT SIX
'7' # 0x0037 -> DIGIT SEVEN
'8' # 0x0038 -> DIGIT EIGHT
'9' # 0x0039 -> DIGIT NINE
':' # 0x003a -> COLON
';' # 0x003b -> SEMICOLON
'<' # 0x003c -> LESS-THAN SIGN
'=' # 0x003d -> EQUALS SIGN
'>' # 0x003e -> GREATER-THAN SIGN
'?' # 0x003f -> QUESTION MARK
'@' # 0x0040 -> COMMERCIAL AT
'A' # 0x0041 -> LATIN CAPITAL LETTER A
'B' # 0x0042 -> LATIN CAPITAL LETTER B
'C' # 0x0043 -> LATIN CAPITAL LETTER C
'D' # 0x0044 -> LATIN CAPITAL LETTER D
'E' # 0x0045 -> LATIN CAPITAL LETTER E
'F' # 0x0046 -> LATIN CAPITAL LETTER F
'G' # 0x0047 -> LATIN CAPITAL LETTER G
'H' # 0x0048 -> LATIN CAPITAL LETTER H
'I' # 0x0049 -> LATIN CAPITAL LETTER I
'J' # 0x004a -> LATIN CAPITAL LETTER J
'K' # 0x004b -> LATIN CAPITAL LETTER K
'L' # 0x004c -> LATIN CAPITAL LETTER L
'M' # 0x004d -> LATIN CAPITAL LETTER M
'N' # 0x004e -> LATIN CAPITAL LETTER N
'O' # 0x004f -> LATIN CAPITAL LETTER O
'P' # 0x0050 -> LATIN CAPITAL LETTER P
'Q' # 0x0051 -> LATIN CAPITAL LETTER Q
'R' # 0x0052 -> LATIN CAPITAL LETTER R
'S' # 0x0053 -> LATIN CAPITAL LETTER S
'T' # 0x0054 -> LATIN CAPITAL LETTER T
'U' # 0x0055 -> LATIN CAPITAL LETTER U
'V' # 0x0056 -> LATIN CAPITAL LETTER V
'W' # 0x0057 -> LATIN CAPITAL LETTER W
'X' # 0x0058 -> LATIN CAPITAL LETTER X
'Y' # 0x0059 -> LATIN CAPITAL LETTER Y
'Z' # 0x005a -> LATIN CAPITAL LETTER Z
'[' # 0x005b -> LEFT SQUARE BRACKET
'\\' # 0x005c -> REVERSE SOLIDUS
']' # 0x005d -> RIGHT SQUARE BRACKET
'^' # 0x005e -> CIRCUMFLEX ACCENT
'_' # 0x005f -> LOW LINE
'`' # 0x0060 -> GRAVE ACCENT
'a' # 0x0061 -> LATIN SMALL LETTER A
'b' # 0x0062 -> LATIN SMALL LETTER B
'c' # 0x0063 -> LATIN SMALL LETTER C
'd' # 0x0064 -> LATIN SMALL LETTER D
'e' # 0x0065 -> LATIN SMALL LETTER E
'f' # 0x0066 -> LATIN SMALL LETTER F
'g' # 0x0067 -> LATIN SMALL LETTER G
'h' # 0x0068 -> LATIN SMALL LETTER H
'i' # 0x0069 -> LATIN SMALL LETTER I
'j' # 0x006a -> LATIN SMALL LETTER J
'k' # 0x006b -> LATIN SMALL LETTER K
'l' # 0x006c -> LATIN SMALL LETTER L
'm' # 0x006d -> LATIN SMALL LETTER M
'n' # 0x006e -> LATIN SMALL LETTER N
'o' # 0x006f -> LATIN SMALL LETTER O
'p' # 0x0070 -> LATIN SMALL LETTER P
'q' # 0x0071 -> LATIN SMALL LETTER Q
'r' # 0x0072 -> LATIN SMALL LETTER R
's' # 0x0073 -> LATIN SMALL LETTER S
't' # 0x0074 -> LATIN SMALL LETTER T
'u' # 0x0075 -> LATIN SMALL LETTER U
'v' # 0x0076 -> LATIN SMALL LETTER V
'w' # 0x0077 -> LATIN SMALL LETTER W
'x' # 0x0078 -> LATIN SMALL LETTER X
'y' # 0x0079 -> LATIN SMALL LETTER Y
'z' # 0x007a -> LATIN SMALL LETTER Z
'{' # 0x007b -> LEFT CURLY BRACKET
'|' # 0x007c -> VERTICAL LINE
'}' # 0x007d -> RIGHT CURLY BRACKET
'~' # 0x007e -> TILDE
'\x7f' # 0x007f -> DELETE
'\xc7' # 0x0080 -> LATIN CAPITAL LETTER C WITH CEDILLA
'\xfc' # 0x0081 -> LATIN SMALL LETTER U WITH DIAERESIS
'\xe9' # 0x0082 -> LATIN SMALL LETTER E WITH ACUTE
'\xe2' # 0x0083 -> LATIN SMALL LETTER A WITH CIRCUMFLEX
'\xe4' # 0x0084 -> LATIN SMALL LETTER A WITH DIAERESIS
'\xe0' # 0x0085 -> LATIN SMALL LETTER A WITH GRAVE
'\xe5' # 0x0086 -> LATIN SMALL LETTER A WITH RING ABOVE
'\xe7' # 0x0087 -> LATIN SMALL LETTER C WITH CEDILLA
'\xea' # 0x0088 -> LATIN SMALL LETTER E WITH CIRCUMFLEX
'\xeb' # 0x0089 -> LATIN SMALL LETTER E WITH DIAERESIS<|fim▁hole|> '\xee' # 0x008c -> LATIN SMALL LETTER I WITH CIRCUMFLEX
'\xec' # 0x008d -> LATIN SMALL LETTER I WITH GRAVE
'\xc4' # 0x008e -> LATIN CAPITAL LETTER A WITH DIAERESIS
'\xc5' # 0x008f -> LATIN CAPITAL LETTER A WITH RING ABOVE
'\xc9' # 0x0090 -> LATIN CAPITAL LETTER E WITH ACUTE
'\xe6' # 0x0091 -> LATIN SMALL LIGATURE AE
'\xc6' # 0x0092 -> LATIN CAPITAL LIGATURE AE
'\xf4' # 0x0093 -> LATIN SMALL LETTER O WITH CIRCUMFLEX
'\xf6' # 0x0094 -> LATIN SMALL LETTER O WITH DIAERESIS
'\xf2' # 0x0095 -> LATIN SMALL LETTER O WITH GRAVE
'\xfb' # 0x0096 -> LATIN SMALL LETTER U WITH CIRCUMFLEX
'\xf9' # 0x0097 -> LATIN SMALL LETTER U WITH GRAVE
'\xff' # 0x0098 -> LATIN SMALL LETTER Y WITH DIAERESIS
'\xd6' # 0x0099 -> LATIN CAPITAL LETTER O WITH DIAERESIS
'\xdc' # 0x009a -> LATIN CAPITAL LETTER U WITH DIAERESIS
'\xa2' # 0x009b -> CENT SIGN
'\xa3' # 0x009c -> POUND SIGN
'\xa5' # 0x009d -> YEN SIGN
'\u20a7' # 0x009e -> PESETA SIGN
'\u0192' # 0x009f -> LATIN SMALL LETTER F WITH HOOK
'\xe1' # 0x00a0 -> LATIN SMALL LETTER A WITH ACUTE
'\xed' # 0x00a1 -> LATIN SMALL LETTER I WITH ACUTE
'\xf3' # 0x00a2 -> LATIN SMALL LETTER O WITH ACUTE
'\xfa' # 0x00a3 -> LATIN SMALL LETTER U WITH ACUTE
'\xf1' # 0x00a4 -> LATIN SMALL LETTER N WITH TILDE
'\xd1' # 0x00a5 -> LATIN CAPITAL LETTER N WITH TILDE
'\xaa' # 0x00a6 -> FEMININE ORDINAL INDICATOR
'\xba' # 0x00a7 -> MASCULINE ORDINAL INDICATOR
'\xbf' # 0x00a8 -> INVERTED QUESTION MARK
'\u2310' # 0x00a9 -> REVERSED NOT SIGN
'\xac' # 0x00aa -> NOT SIGN
'\xbd' # 0x00ab -> VULGAR FRACTION ONE HALF
'\xbc' # 0x00ac -> VULGAR FRACTION ONE QUARTER
'\xa1' # 0x00ad -> INVERTED EXCLAMATION MARK
'\xab' # 0x00ae -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
'\xbb' # 0x00af -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
'\u2591' # 0x00b0 -> LIGHT SHADE
'\u2592' # 0x00b1 -> MEDIUM SHADE
'\u2593' # 0x00b2 -> DARK SHADE
'\u2502' # 0x00b3 -> BOX DRAWINGS LIGHT VERTICAL
'\u2524' # 0x00b4 -> BOX DRAWINGS LIGHT VERTICAL AND LEFT
'\u2561' # 0x00b5 -> BOX DRAWINGS VERTICAL SINGLE AND LEFT DOUBLE
'\u2562' # 0x00b6 -> BOX DRAWINGS VERTICAL DOUBLE AND LEFT SINGLE
'\u2556' # 0x00b7 -> BOX DRAWINGS DOWN DOUBLE AND LEFT SINGLE
'\u2555' # 0x00b8 -> BOX DRAWINGS DOWN SINGLE AND LEFT DOUBLE
'\u2563' # 0x00b9 -> BOX DRAWINGS DOUBLE VERTICAL AND LEFT
'\u2551' # 0x00ba -> BOX DRAWINGS DOUBLE VERTICAL
'\u2557' # 0x00bb -> BOX DRAWINGS DOUBLE DOWN AND LEFT
'\u255d' # 0x00bc -> BOX DRAWINGS DOUBLE UP AND LEFT
'\u255c' # 0x00bd -> BOX DRAWINGS UP DOUBLE AND LEFT SINGLE
'\u255b' # 0x00be -> BOX DRAWINGS UP SINGLE AND LEFT DOUBLE
'\u2510' # 0x00bf -> BOX DRAWINGS LIGHT DOWN AND LEFT
'\u2514' # 0x00c0 -> BOX DRAWINGS LIGHT UP AND RIGHT
'\u2534' # 0x00c1 -> BOX DRAWINGS LIGHT UP AND HORIZONTAL
'\u252c' # 0x00c2 -> BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
'\u251c' # 0x00c3 -> BOX DRAWINGS LIGHT VERTICAL AND RIGHT
'\u2500' # 0x00c4 -> BOX DRAWINGS LIGHT HORIZONTAL
'\u253c' # 0x00c5 -> BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
'\u255e' # 0x00c6 -> BOX DRAWINGS VERTICAL SINGLE AND RIGHT DOUBLE
'\u255f' # 0x00c7 -> BOX DRAWINGS VERTICAL DOUBLE AND RIGHT SINGLE
'\u255a' # 0x00c8 -> BOX DRAWINGS DOUBLE UP AND RIGHT
'\u2554' # 0x00c9 -> BOX DRAWINGS DOUBLE DOWN AND RIGHT
'\u2569' # 0x00ca -> BOX DRAWINGS DOUBLE UP AND HORIZONTAL
'\u2566' # 0x00cb -> BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
'\u2560' # 0x00cc -> BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
'\u2550' # 0x00cd -> BOX DRAWINGS DOUBLE HORIZONTAL
'\u256c' # 0x00ce -> BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
'\u2567' # 0x00cf -> BOX DRAWINGS UP SINGLE AND HORIZONTAL DOUBLE
'\u2568' # 0x00d0 -> BOX DRAWINGS UP DOUBLE AND HORIZONTAL SINGLE
'\u2564' # 0x00d1 -> BOX DRAWINGS DOWN SINGLE AND HORIZONTAL DOUBLE
'\u2565' # 0x00d2 -> BOX DRAWINGS DOWN DOUBLE AND HORIZONTAL SINGLE
'\u2559' # 0x00d3 -> BOX DRAWINGS UP DOUBLE AND RIGHT SINGLE
'\u2558' # 0x00d4 -> BOX DRAWINGS UP SINGLE AND RIGHT DOUBLE
'\u2552' # 0x00d5 -> BOX DRAWINGS DOWN SINGLE AND RIGHT DOUBLE
'\u2553' # 0x00d6 -> BOX DRAWINGS DOWN DOUBLE AND RIGHT SINGLE
'\u256b' # 0x00d7 -> BOX DRAWINGS VERTICAL DOUBLE AND HORIZONTAL SINGLE
'\u256a' # 0x00d8 -> BOX DRAWINGS VERTICAL SINGLE AND HORIZONTAL DOUBLE
'\u2518' # 0x00d9 -> BOX DRAWINGS LIGHT UP AND LEFT
'\u250c' # 0x00da -> BOX DRAWINGS LIGHT DOWN AND RIGHT
'\u2588' # 0x00db -> FULL BLOCK
'\u2584' # 0x00dc -> LOWER HALF BLOCK
'\u258c' # 0x00dd -> LEFT HALF BLOCK
'\u2590' # 0x00de -> RIGHT HALF BLOCK
'\u2580' # 0x00df -> UPPER HALF BLOCK
'\u03b1' # 0x00e0 -> GREEK SMALL LETTER ALPHA
'\xdf' # 0x00e1 -> LATIN SMALL LETTER SHARP S
'\u0393' # 0x00e2 -> GREEK CAPITAL LETTER GAMMA
'\u03c0' # 0x00e3 -> GREEK SMALL LETTER PI
'\u03a3' # 0x00e4 -> GREEK CAPITAL LETTER SIGMA
'\u03c3' # 0x00e5 -> GREEK SMALL LETTER SIGMA
'\xb5' # 0x00e6 -> MICRO SIGN
'\u03c4' # 0x00e7 -> GREEK SMALL LETTER TAU
'\u03a6' # 0x00e8 -> GREEK CAPITAL LETTER PHI
'\u0398' # 0x00e9 -> GREEK CAPITAL LETTER THETA
'\u03a9' # 0x00ea -> GREEK CAPITAL LETTER OMEGA
'\u03b4' # 0x00eb -> GREEK SMALL LETTER DELTA
'\u221e' # 0x00ec -> INFINITY
'\u03c6' # 0x00ed -> GREEK SMALL LETTER PHI
'\u03b5' # 0x00ee -> GREEK SMALL LETTER EPSILON
'\u2229' # 0x00ef -> INTERSECTION
'\u2261' # 0x00f0 -> IDENTICAL TO
'\xb1' # 0x00f1 -> PLUS-MINUS SIGN
'\u2265' # 0x00f2 -> GREATER-THAN OR EQUAL TO
'\u2264' # 0x00f3 -> LESS-THAN OR EQUAL TO
'\u2320' # 0x00f4 -> TOP HALF INTEGRAL
'\u2321' # 0x00f5 -> BOTTOM HALF INTEGRAL
'\xf7' # 0x00f6 -> DIVISION SIGN
'\u2248' # 0x00f7 -> ALMOST EQUAL TO
'\xb0' # 0x00f8 -> DEGREE SIGN
'\u2219' # 0x00f9 -> BULLET OPERATOR
'\xb7' # 0x00fa -> MIDDLE DOT
'\u221a' # 0x00fb -> SQUARE ROOT
'\u207f' # 0x00fc -> SUPERSCRIPT LATIN SMALL LETTER N
'\xb2' # 0x00fd -> SUPERSCRIPT TWO
'\u25a0' # 0x00fe -> BLACK SQUARE
'\xa0' # 0x00ff -> NO-BREAK SPACE
)
### Encoding Map
encoding_map = {
0x0000: 0x0000, # NULL
0x0001: 0x0001, # START OF HEADING
0x0002: 0x0002, # START OF TEXT
0x0003: 0x0003, # END OF TEXT
0x0004: 0x0004, # END OF TRANSMISSION
0x0005: 0x0005, # ENQUIRY
0x0006: 0x0006, # ACKNOWLEDGE
0x0007: 0x0007, # BELL
0x0008: 0x0008, # BACKSPACE
0x0009: 0x0009, # HORIZONTAL TABULATION
0x000a: 0x000a, # LINE FEED
0x000b: 0x000b, # VERTICAL TABULATION
0x000c: 0x000c, # FORM FEED
0x000d: 0x000d, # CARRIAGE RETURN
0x000e: 0x000e, # SHIFT OUT
0x000f: 0x000f, # SHIFT IN
0x0010: 0x0010, # DATA LINK ESCAPE
0x0011: 0x0011, # DEVICE CONTROL ONE
0x0012: 0x0012, # DEVICE CONTROL TWO
0x0013: 0x0013, # DEVICE CONTROL THREE
0x0014: 0x0014, # DEVICE CONTROL FOUR
0x0015: 0x0015, # NEGATIVE ACKNOWLEDGE
0x0016: 0x0016, # SYNCHRONOUS IDLE
0x0017: 0x0017, # END OF TRANSMISSION BLOCK
0x0018: 0x0018, # CANCEL
0x0019: 0x0019, # END OF MEDIUM
0x001a: 0x001a, # SUBSTITUTE
0x001b: 0x001b, # ESCAPE
0x001c: 0x001c, # FILE SEPARATOR
0x001d: 0x001d, # GROUP SEPARATOR
0x001e: 0x001e, # RECORD SEPARATOR
0x001f: 0x001f, # UNIT SEPARATOR
0x0020: 0x0020, # SPACE
0x0021: 0x0021, # EXCLAMATION MARK
0x0022: 0x0022, # QUOTATION MARK
0x0023: 0x0023, # NUMBER SIGN
0x0024: 0x0024, # DOLLAR SIGN
0x0025: 0x0025, # PERCENT SIGN
0x0026: 0x0026, # AMPERSAND
0x0027: 0x0027, # APOSTROPHE
0x0028: 0x0028, # LEFT PARENTHESIS
0x0029: 0x0029, # RIGHT PARENTHESIS
0x002a: 0x002a, # ASTERISK
0x002b: 0x002b, # PLUS SIGN
0x002c: 0x002c, # COMMA
0x002d: 0x002d, # HYPHEN-MINUS
0x002e: 0x002e, # FULL STOP
0x002f: 0x002f, # SOLIDUS
0x0030: 0x0030, # DIGIT ZERO
0x0031: 0x0031, # DIGIT ONE
0x0032: 0x0032, # DIGIT TWO
0x0033: 0x0033, # DIGIT THREE
0x0034: 0x0034, # DIGIT FOUR
0x0035: 0x0035, # DIGIT FIVE
0x0036: 0x0036, # DIGIT SIX
0x0037: 0x0037, # DIGIT SEVEN
0x0038: 0x0038, # DIGIT EIGHT
0x0039: 0x0039, # DIGIT NINE
0x003a: 0x003a, # COLON
0x003b: 0x003b, # SEMICOLON
0x003c: 0x003c, # LESS-THAN SIGN
0x003d: 0x003d, # EQUALS SIGN
0x003e: 0x003e, # GREATER-THAN SIGN
0x003f: 0x003f, # QUESTION MARK
0x0040: 0x0040, # COMMERCIAL AT
0x0041: 0x0041, # LATIN CAPITAL LETTER A
0x0042: 0x0042, # LATIN CAPITAL LETTER B
0x0043: 0x0043, # LATIN CAPITAL LETTER C
0x0044: 0x0044, # LATIN CAPITAL LETTER D
0x0045: 0x0045, # LATIN CAPITAL LETTER E
0x0046: 0x0046, # LATIN CAPITAL LETTER F
0x0047: 0x0047, # LATIN CAPITAL LETTER G
0x0048: 0x0048, # LATIN CAPITAL LETTER H
0x0049: 0x0049, # LATIN CAPITAL LETTER I
0x004a: 0x004a, # LATIN CAPITAL LETTER J
0x004b: 0x004b, # LATIN CAPITAL LETTER K
0x004c: 0x004c, # LATIN CAPITAL LETTER L
0x004d: 0x004d, # LATIN CAPITAL LETTER M
0x004e: 0x004e, # LATIN CAPITAL LETTER N
0x004f: 0x004f, # LATIN CAPITAL LETTER O
0x0050: 0x0050, # LATIN CAPITAL LETTER P
0x0051: 0x0051, # LATIN CAPITAL LETTER Q
0x0052: 0x0052, # LATIN CAPITAL LETTER R
0x0053: 0x0053, # LATIN CAPITAL LETTER S
0x0054: 0x0054, # LATIN CAPITAL LETTER T
0x0055: 0x0055, # LATIN CAPITAL LETTER U
0x0056: 0x0056, # LATIN CAPITAL LETTER V
0x0057: 0x0057, # LATIN CAPITAL LETTER W
0x0058: 0x0058, # LATIN CAPITAL LETTER X
0x0059: 0x0059, # LATIN CAPITAL LETTER Y
0x005a: 0x005a, # LATIN CAPITAL LETTER Z
0x005b: 0x005b, # LEFT SQUARE BRACKET
0x005c: 0x005c, # REVERSE SOLIDUS
0x005d: 0x005d, # RIGHT SQUARE BRACKET
0x005e: 0x005e, # CIRCUMFLEX ACCENT
0x005f: 0x005f, # LOW LINE
0x0060: 0x0060, # GRAVE ACCENT
0x0061: 0x0061, # LATIN SMALL LETTER A
0x0062: 0x0062, # LATIN SMALL LETTER B
0x0063: 0x0063, # LATIN SMALL LETTER C
0x0064: 0x0064, # LATIN SMALL LETTER D
0x0065: 0x0065, # LATIN SMALL LETTER E
0x0066: 0x0066, # LATIN SMALL LETTER F
0x0067: 0x0067, # LATIN SMALL LETTER G
0x0068: 0x0068, # LATIN SMALL LETTER H
0x0069: 0x0069, # LATIN SMALL LETTER I
0x006a: 0x006a, # LATIN SMALL LETTER J
0x006b: 0x006b, # LATIN SMALL LETTER K
0x006c: 0x006c, # LATIN SMALL LETTER L
0x006d: 0x006d, # LATIN SMALL LETTER M
0x006e: 0x006e, # LATIN SMALL LETTER N
0x006f: 0x006f, # LATIN SMALL LETTER O
0x0070: 0x0070, # LATIN SMALL LETTER P
0x0071: 0x0071, # LATIN SMALL LETTER Q
0x0072: 0x0072, # LATIN SMALL LETTER R
0x0073: 0x0073, # LATIN SMALL LETTER S
0x0074: 0x0074, # LATIN SMALL LETTER T
0x0075: 0x0075, # LATIN SMALL LETTER U
0x0076: 0x0076, # LATIN SMALL LETTER V
0x0077: 0x0077, # LATIN SMALL LETTER W
0x0078: 0x0078, # LATIN SMALL LETTER X
0x0079: 0x0079, # LATIN SMALL LETTER Y
0x007a: 0x007a, # LATIN SMALL LETTER Z
0x007b: 0x007b, # LEFT CURLY BRACKET
0x007c: 0x007c, # VERTICAL LINE
0x007d: 0x007d, # RIGHT CURLY BRACKET
0x007e: 0x007e, # TILDE
0x007f: 0x007f, # DELETE
0x00a0: 0x00ff, # NO-BREAK SPACE
0x00a1: 0x00ad, # INVERTED EXCLAMATION MARK
0x00a2: 0x009b, # CENT SIGN
0x00a3: 0x009c, # POUND SIGN
0x00a5: 0x009d, # YEN SIGN
0x00aa: 0x00a6, # FEMININE ORDINAL INDICATOR
0x00ab: 0x00ae, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00ac: 0x00aa, # NOT SIGN
0x00b0: 0x00f8, # DEGREE SIGN
0x00b1: 0x00f1, # PLUS-MINUS SIGN
0x00b2: 0x00fd, # SUPERSCRIPT TWO
0x00b5: 0x00e6, # MICRO SIGN
0x00b7: 0x00fa, # MIDDLE DOT
0x00ba: 0x00a7, # MASCULINE ORDINAL INDICATOR
0x00bb: 0x00af, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00bc: 0x00ac, # VULGAR FRACTION ONE QUARTER
0x00bd: 0x00ab, # VULGAR FRACTION ONE HALF
0x00bf: 0x00a8, # INVERTED QUESTION MARK
0x00c4: 0x008e, # LATIN CAPITAL LETTER A WITH DIAERESIS
0x00c5: 0x008f, # LATIN CAPITAL LETTER A WITH RING ABOVE
0x00c6: 0x0092, # LATIN CAPITAL LIGATURE AE
0x00c7: 0x0080, # LATIN CAPITAL LETTER C WITH CEDILLA
0x00c9: 0x0090, # LATIN CAPITAL LETTER E WITH ACUTE
0x00d1: 0x00a5, # LATIN CAPITAL LETTER N WITH TILDE
0x00d6: 0x0099, # LATIN CAPITAL LETTER O WITH DIAERESIS
0x00dc: 0x009a, # LATIN CAPITAL LETTER U WITH DIAERESIS
0x00df: 0x00e1, # LATIN SMALL LETTER SHARP S
0x00e0: 0x0085, # LATIN SMALL LETTER A WITH GRAVE
0x00e1: 0x00a0, # LATIN SMALL LETTER A WITH ACUTE
0x00e2: 0x0083, # LATIN SMALL LETTER A WITH CIRCUMFLEX
0x00e4: 0x0084, # LATIN SMALL LETTER A WITH DIAERESIS
0x00e5: 0x0086, # LATIN SMALL LETTER A WITH RING ABOVE
0x00e6: 0x0091, # LATIN SMALL LIGATURE AE
0x00e7: 0x0087, # LATIN SMALL LETTER C WITH CEDILLA
0x00e8: 0x008a, # LATIN SMALL LETTER E WITH GRAVE
0x00e9: 0x0082, # LATIN SMALL LETTER E WITH ACUTE
0x00ea: 0x0088, # LATIN SMALL LETTER E WITH CIRCUMFLEX
0x00eb: 0x0089, # LATIN SMALL LETTER E WITH DIAERESIS
0x00ec: 0x008d, # LATIN SMALL LETTER I WITH GRAVE
0x00ed: 0x00a1, # LATIN SMALL LETTER I WITH ACUTE
0x00ee: 0x008c, # LATIN SMALL LETTER I WITH CIRCUMFLEX
0x00ef: 0x008b, # LATIN SMALL LETTER I WITH DIAERESIS
0x00f1: 0x00a4, # LATIN SMALL LETTER N WITH TILDE
0x00f2: 0x0095, # LATIN SMALL LETTER O WITH GRAVE
0x00f3: 0x00a2, # LATIN SMALL LETTER O WITH ACUTE
0x00f4: 0x0093, # LATIN SMALL LETTER O WITH CIRCUMFLEX
0x00f6: 0x0094, # LATIN SMALL LETTER O WITH DIAERESIS
0x00f7: 0x00f6, # DIVISION SIGN
0x00f9: 0x0097, # LATIN SMALL LETTER U WITH GRAVE
0x00fa: 0x00a3, # LATIN SMALL LETTER U WITH ACUTE
0x00fb: 0x0096, # LATIN SMALL LETTER U WITH CIRCUMFLEX
0x00fc: 0x0081, # LATIN SMALL LETTER U WITH DIAERESIS
0x00ff: 0x0098, # LATIN SMALL LETTER Y WITH DIAERESIS
0x0192: 0x009f, # LATIN SMALL LETTER F WITH HOOK
0x0393: 0x00e2, # GREEK CAPITAL LETTER GAMMA
0x0398: 0x00e9, # GREEK CAPITAL LETTER THETA
0x03a3: 0x00e4, # GREEK CAPITAL LETTER SIGMA
0x03a6: 0x00e8, # GREEK CAPITAL LETTER PHI
0x03a9: 0x00ea, # GREEK CAPITAL LETTER OMEGA
0x03b1: 0x00e0, # GREEK SMALL LETTER ALPHA
0x03b4: 0x00eb, # GREEK SMALL LETTER DELTA
0x03b5: 0x00ee, # GREEK SMALL LETTER EPSILON
0x03c0: 0x00e3, # GREEK SMALL LETTER PI
0x03c3: 0x00e5, # GREEK SMALL LETTER SIGMA
0x03c4: 0x00e7, # GREEK SMALL LETTER TAU
0x03c6: 0x00ed, # GREEK SMALL LETTER PHI
0x207f: 0x00fc, # SUPERSCRIPT LATIN SMALL LETTER N
0x20a7: 0x009e, # PESETA SIGN
0x2219: 0x00f9, # BULLET OPERATOR
0x221a: 0x00fb, # SQUARE ROOT
0x221e: 0x00ec, # INFINITY
0x2229: 0x00ef, # INTERSECTION
0x2248: 0x00f7, # ALMOST EQUAL TO
0x2261: 0x00f0, # IDENTICAL TO
0x2264: 0x00f3, # LESS-THAN OR EQUAL TO
0x2265: 0x00f2, # GREATER-THAN OR EQUAL TO
0x2310: 0x00a9, # REVERSED NOT SIGN
0x2320: 0x00f4, # TOP HALF INTEGRAL
0x2321: 0x00f5, # BOTTOM HALF INTEGRAL
0x2500: 0x00c4, # BOX DRAWINGS LIGHT HORIZONTAL
0x2502: 0x00b3, # BOX DRAWINGS LIGHT VERTICAL
0x250c: 0x00da, # BOX DRAWINGS LIGHT DOWN AND RIGHT
0x2510: 0x00bf, # BOX DRAWINGS LIGHT DOWN AND LEFT
0x2514: 0x00c0, # BOX DRAWINGS LIGHT UP AND RIGHT
0x2518: 0x00d9, # BOX DRAWINGS LIGHT UP AND LEFT
0x251c: 0x00c3, # BOX DRAWINGS LIGHT VERTICAL AND RIGHT
0x2524: 0x00b4, # BOX DRAWINGS LIGHT VERTICAL AND LEFT
0x252c: 0x00c2, # BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
0x2534: 0x00c1, # BOX DRAWINGS LIGHT UP AND HORIZONTAL
0x253c: 0x00c5, # BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
0x2550: 0x00cd, # BOX DRAWINGS DOUBLE HORIZONTAL
0x2551: 0x00ba, # BOX DRAWINGS DOUBLE VERTICAL
0x2552: 0x00d5, # BOX DRAWINGS DOWN SINGLE AND RIGHT DOUBLE
0x2553: 0x00d6, # BOX DRAWINGS DOWN DOUBLE AND RIGHT SINGLE
0x2554: 0x00c9, # BOX DRAWINGS DOUBLE DOWN AND RIGHT
0x2555: 0x00b8, # BOX DRAWINGS DOWN SINGLE AND LEFT DOUBLE
0x2556: 0x00b7, # BOX DRAWINGS DOWN DOUBLE AND LEFT SINGLE
0x2557: 0x00bb, # BOX DRAWINGS DOUBLE DOWN AND LEFT
0x2558: 0x00d4, # BOX DRAWINGS UP SINGLE AND RIGHT DOUBLE
0x2559: 0x00d3, # BOX DRAWINGS UP DOUBLE AND RIGHT SINGLE
0x255a: 0x00c8, # BOX DRAWINGS DOUBLE UP AND RIGHT
0x255b: 0x00be, # BOX DRAWINGS UP SINGLE AND LEFT DOUBLE
0x255c: 0x00bd, # BOX DRAWINGS UP DOUBLE AND LEFT SINGLE
0x255d: 0x00bc, # BOX DRAWINGS DOUBLE UP AND LEFT
0x255e: 0x00c6, # BOX DRAWINGS VERTICAL SINGLE AND RIGHT DOUBLE
0x255f: 0x00c7, # BOX DRAWINGS VERTICAL DOUBLE AND RIGHT SINGLE
0x2560: 0x00cc, # BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
0x2561: 0x00b5, # BOX DRAWINGS VERTICAL SINGLE AND LEFT DOUBLE
0x2562: 0x00b6, # BOX DRAWINGS VERTICAL DOUBLE AND LEFT SINGLE
0x2563: 0x00b9, # BOX DRAWINGS DOUBLE VERTICAL AND LEFT
0x2564: 0x00d1, # BOX DRAWINGS DOWN SINGLE AND HORIZONTAL DOUBLE
0x2565: 0x00d2, # BOX DRAWINGS DOWN DOUBLE AND HORIZONTAL SINGLE
0x2566: 0x00cb, # BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
0x2567: 0x00cf, # BOX DRAWINGS UP SINGLE AND HORIZONTAL DOUBLE
0x2568: 0x00d0, # BOX DRAWINGS UP DOUBLE AND HORIZONTAL SINGLE
0x2569: 0x00ca, # BOX DRAWINGS DOUBLE UP AND HORIZONTAL
0x256a: 0x00d8, # BOX DRAWINGS VERTICAL SINGLE AND HORIZONTAL DOUBLE
0x256b: 0x00d7, # BOX DRAWINGS VERTICAL DOUBLE AND HORIZONTAL SINGLE
0x256c: 0x00ce, # BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
0x2580: 0x00df, # UPPER HALF BLOCK
0x2584: 0x00dc, # LOWER HALF BLOCK
0x2588: 0x00db, # FULL BLOCK
0x258c: 0x00dd, # LEFT HALF BLOCK
0x2590: 0x00de, # RIGHT HALF BLOCK
0x2591: 0x00b0, # LIGHT SHADE
0x2592: 0x00b1, # MEDIUM SHADE
0x2593: 0x00b2, # DARK SHADE
0x25a0: 0x00fe, # BLACK SQUARE
}<|fim▁end|> | '\xe8' # 0x008a -> LATIN SMALL LETTER E WITH GRAVE
'\xef' # 0x008b -> LATIN SMALL LETTER I WITH DIAERESIS |
<|file_name|>second.py<|end_file_name|><|fim▁begin|>from flask import Flask
from flask.ext.admin import Admin, BaseView, expose<|fim▁hole|>
class MyView(BaseView):
@expose('/')
def index(self):
return self.render('index.html')
app = Flask(__name__)
admin = Admin(app)
admin.add_view(MyView(name='Hello'))
app.run()<|fim▁end|> | |
<|file_name|>basic_socket.hpp<|end_file_name|><|fim▁begin|>//
// basic_socket.hpp
// ~~~~~~~~~~~~~~~~
//
// Copyright (c) 2003-2015 Christopher M. Kohlhoff (chris at kohlhoff dot com)
//
// Distributed under the Boost Software License, Version 1.0. (See accompanying
// file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
//
#ifndef ASIO_BASIC_SOCKET_HPP
#define ASIO_BASIC_SOCKET_HPP
#if defined(_MSC_VER) && (_MSC_VER >= 1200)
# pragma once
#endif // defined(_MSC_VER) && (_MSC_VER >= 1200)
#include "asio/detail/config.hpp"
#include "asio/async_result.hpp"
#include "asio/basic_io_object.hpp"
#include "asio/detail/handler_type_requirements.hpp"
#include "asio/detail/throw_error.hpp"
#include "asio/detail/type_traits.hpp"
#include "asio/error.hpp"
#include "asio/socket_base.hpp"
#include "asio/detail/push_options.hpp"
namespace asio {
/// Provides socket functionality.
/**
* The basic_socket class template provides functionality that is common to both
* stream-oriented and datagram-oriented sockets.
*
* @par Thread Safety
* @e Distinct @e objects: Safe.@n
* @e Shared @e objects: Unsafe.
*/
template <typename Protocol, typename SocketService>
class basic_socket
: public basic_io_object<SocketService>,
public socket_base
{
public:
/// (Deprecated: Use native_handle_type.) The native representation of a
/// socket.
typedef typename SocketService::native_handle_type native_type;
/// The native representation of a socket.
typedef typename SocketService::native_handle_type native_handle_type;
/// The protocol type.
typedef Protocol protocol_type;
/// The endpoint type.
typedef typename Protocol::endpoint endpoint_type;
/// A basic_socket is always the lowest layer.
typedef basic_socket<Protocol, SocketService> lowest_layer_type;
/// Construct a basic_socket without opening it.
/**
* This constructor creates a socket without opening it.
*
* @param io_service The io_service object that the socket will use to
* dispatch handlers for any asynchronous operations performed on the socket.
*/
explicit basic_socket(asio::io_service& io_service)
: basic_io_object<SocketService>(io_service)
{
}
/// Construct and open a basic_socket.
/**
* This constructor creates and opens a socket.
*
* @param io_service The io_service object that the socket will use to
* dispatch handlers for any asynchronous operations performed on the socket.
*
* @param protocol An object specifying protocol parameters to be used.
*
* @throws asio::system_error Thrown on failure.
*/
basic_socket(asio::io_service& io_service,
const protocol_type& protocol)
: basic_io_object<SocketService>(io_service)
{
asio::error_code ec;
this->get_service().open(this->get_implementation(), protocol, ec);
asio::detail::throw_error(ec, "open");
}
/// Construct a basic_socket, opening it and binding it to the given local
/// endpoint.
/**
* This constructor creates a socket and automatically opens it bound to the
* specified endpoint on the local machine. The protocol used is the protocol
* associated with the given endpoint.
*
* @param io_service The io_service object that the socket will use to
* dispatch handlers for any asynchronous operations performed on the socket.
*
* @param endpoint An endpoint on the local machine to which the socket will
* be bound.
*
* @throws asio::system_error Thrown on failure.
*/
basic_socket(asio::io_service& io_service,
const endpoint_type& endpoint)
: basic_io_object<SocketService>(io_service)
{
asio::error_code ec;
const protocol_type protocol = endpoint.protocol();
this->get_service().open(this->get_implementation(), protocol, ec);
asio::detail::throw_error(ec, "open");
this->get_service().bind(this->get_implementation(), endpoint, ec);
asio::detail::throw_error(ec, "bind");
}
/// Construct a basic_socket on an existing native socket.
/**
* This constructor creates a socket object to hold an existing native socket.
*
* @param io_service The io_service object that the socket will use to
* dispatch handlers for any asynchronous operations performed on the socket.
*
* @param protocol An object specifying protocol parameters to be used.
*
* @param native_socket A native socket.
*
* @throws asio::system_error Thrown on failure.
*/
basic_socket(asio::io_service& io_service,
const protocol_type& protocol, const native_handle_type& native_socket)
: basic_io_object<SocketService>(io_service)
{
asio::error_code ec;
this->get_service().assign(this->get_implementation(),
protocol, native_socket, ec);
asio::detail::throw_error(ec, "assign");
}
#if defined(ASIO_HAS_MOVE) || defined(GENERATING_DOCUMENTATION)
/// Move-construct a basic_socket from another.
/**
* This constructor moves a socket from one object to another.
*
* @param other The other basic_socket object from which the move will
* occur.
*
* @note Following the move, the moved-from object is in the same state as if
* constructed using the @c basic_socket(io_service&) constructor.
*/
basic_socket(basic_socket&& other)
: basic_io_object<SocketService>(
ASIO_MOVE_CAST(basic_socket)(other))
{
}
/// Move-assign a basic_socket from another.
/**
* This assignment operator moves a socket from one object to another.
*
* @param other The other basic_socket object from which the move will
* occur.
*
* @note Following the move, the moved-from object is in the same state as if
* constructed using the @c basic_socket(io_service&) constructor.
*/
basic_socket& operator=(basic_socket&& other)
{
basic_io_object<SocketService>::operator=(
ASIO_MOVE_CAST(basic_socket)(other));
return *this;
}
// All sockets have access to each other's implementations.
template <typename Protocol1, typename SocketService1>
friend class basic_socket;
/// Move-construct a basic_socket from a socket of another protocol type.
/**
* This constructor moves a socket from one object to another.
*
* @param other The other basic_socket object from which the move will
* occur.
*
* @note Following the move, the moved-from object is in the same state as if
* constructed using the @c basic_socket(io_service&) constructor.
*/
template <typename Protocol1, typename SocketService1>
basic_socket(basic_socket<Protocol1, SocketService1>&& other,
typename enable_if<is_convertible<Protocol1, Protocol>::value>::type* = 0)
: basic_io_object<SocketService>(other.get_io_service())
{
this->get_service().template converting_move_construct<Protocol1>(
this->get_implementation(), other.get_implementation());
}
/// Move-assign a basic_socket from a socket of another protocol type.
/**
* This assignment operator moves a socket from one object to another.
*
* @param other The other basic_socket object from which the move will
* occur.
*
* @note Following the move, the moved-from object is in the same state as if
* constructed using the @c basic_socket(io_service&) constructor.
*/
template <typename Protocol1, typename SocketService1>
typename enable_if<is_convertible<Protocol1, Protocol>::value,
basic_socket>::type& operator=(
basic_socket<Protocol1, SocketService1>&& other)
{
basic_socket tmp(ASIO_MOVE_CAST2(basic_socket<
Protocol1, SocketService1>)(other));
basic_io_object<SocketService>::operator=(
ASIO_MOVE_CAST(basic_socket)(tmp));
return *this;
}
#endif // defined(ASIO_HAS_MOVE) || defined(GENERATING_DOCUMENTATION)
/// Get a reference to the lowest layer.
/**
* This function returns a reference to the lowest layer in a stack of
* layers. Since a basic_socket cannot contain any further layers, it simply
* returns a reference to itself.
*
* @return A reference to the lowest layer in the stack of layers. Ownership
* is not transferred to the caller.
*/
lowest_layer_type& lowest_layer()
{
return *this;
}
/// Get a const reference to the lowest layer.
/**
* This function returns a const reference to the lowest layer in a stack of
* layers. Since a basic_socket cannot contain any further layers, it simply
* returns a reference to itself.
*
* @return A const reference to the lowest layer in the stack of layers.
* Ownership is not transferred to the caller.
*/
const lowest_layer_type& lowest_layer() const
{
return *this;
}
/// Open the socket using the specified protocol.
/**
* This function opens the socket so that it will use the specified protocol.
*
* @param protocol An object specifying protocol parameters to be used.
*
* @throws asio::system_error Thrown on failure.
*
* @par Example
* @code
* asio::ip::tcp::socket socket(io_service);
* socket.open(asio::ip::tcp::v4());
* @endcode
*/
void open(const protocol_type& protocol = protocol_type())
{
asio::error_code ec;
this->get_service().open(this->get_implementation(), protocol, ec);
asio::detail::throw_error(ec, "open");
}
/// Open the socket using the specified protocol.
/**
* This function opens the socket so that it will use the specified protocol.
*
* @param protocol An object specifying which protocol is to be used.
*
* @param ec Set to indicate what error occurred, if any.
*
* @par Example
* @code
* asio::ip::tcp::socket socket(io_service);
* asio::error_code ec;
* socket.open(asio::ip::tcp::v4(), ec);
* if (ec)
* {
* // An error occurred.
* }
* @endcode
*/
asio::error_code open(const protocol_type& protocol,
asio::error_code& ec)
{
return this->get_service().open(this->get_implementation(), protocol, ec);
}
/// Assign an existing native socket to the socket.
/*
* This function opens the socket to hold an existing native socket.
*
* @param protocol An object specifying which protocol is to be used.
*
* @param native_socket A native socket.
*
* @throws asio::system_error Thrown on failure.
*/
void assign(const protocol_type& protocol,
const native_handle_type& native_socket)
{
asio::error_code ec;
this->get_service().assign(this->get_implementation(),
protocol, native_socket, ec);
asio::detail::throw_error(ec, "assign");
}
/// Assign an existing native socket to the socket.
/*
* This function opens the socket to hold an existing native socket.
*
* @param protocol An object specifying which protocol is to be used.
*
* @param native_socket A native socket.
*
* @param ec Set to indicate what error occurred, if any.
*/
asio::error_code assign(const protocol_type& protocol,
const native_handle_type& native_socket, asio::error_code& ec)
{
return this->get_service().assign(this->get_implementation(),
protocol, native_socket, ec);
}
/// Determine whether the socket is open.
bool is_open() const
{
return this->get_service().is_open(this->get_implementation());
}
/// Close the socket.
/**
* This function is used to close the socket. Any asynchronous send, receive
* or connect operations will be cancelled immediately, and will complete
* with the asio::error::operation_aborted error.
*
* @throws asio::system_error Thrown on failure. Note that, even if
* the function indicates an error, the underlying descriptor is closed.
*
* @note For portable behaviour with respect to graceful closure of a
* connected socket, call shutdown() before closing the socket.
*/
void close()
{
asio::error_code ec;
this->get_service().close(this->get_implementation(), ec);
asio::detail::throw_error(ec, "close");
}
/// Close the socket.
/**
* This function is used to close the socket. Any asynchronous send, receive
* or connect operations will be cancelled immediately, and will complete
* with the asio::error::operation_aborted error.
*
* @param ec Set to indicate what error occurred, if any. Note that, even if
* the function indicates an error, the underlying descriptor is closed.
*
* @par Example
* @code
* asio::ip::tcp::socket socket(io_service);
* ...
* asio::error_code ec;
* socket.close(ec);
* if (ec)
* {
* // An error occurred.
* }
* @endcode
*
* @note For portable behaviour with respect to graceful closure of a
* connected socket, call shutdown() before closing the socket.
*/
asio::error_code close(asio::error_code& ec)
{
return this->get_service().close(this->get_implementation(), ec);
}
/// (Deprecated: Use native_handle().) Get the native socket representation.
/**
* This function may be used to obtain the underlying representation of the
* socket. This is intended to allow access to native socket functionality
* that is not otherwise provided.
*/
native_type native()
{
return this->get_service().native_handle(this->get_implementation());
}
/// Get the native socket representation.
/**
* This function may be used to obtain the underlying representation of the
* socket. This is intended to allow access to native socket functionality
* that is not otherwise provided.
*/
native_handle_type native_handle()
{
return this->get_service().native_handle(this->get_implementation());
}
/// Cancel all asynchronous operations associated with the socket.
/**
* This function causes all outstanding asynchronous connect, send and receive
* operations to finish immediately, and the handlers for cancelled operations
* will be passed the asio::error::operation_aborted error.
*
* @throws asio::system_error Thrown on failure.
*
* @note Calls to cancel() will always fail with
* asio::error::operation_not_supported when run on Windows XP, Windows
* Server 2003, and earlier versions of Windows, unless
* ASIO_ENABLE_CANCELIO is defined. However, the CancelIo function has
* two issues that should be considered before enabling its use:
*
* @li It will only cancel asynchronous operations that were initiated in the
* current thread.
*
* @li It can appear to complete without error, but the request to cancel the
* unfinished operations may be silently ignored by the operating system.
* Whether it works or not seems to depend on the drivers that are installed.
*
* For portable cancellation, consider using one of the following
* alternatives:
*
* @li Disable asio's I/O completion port backend by defining
* ASIO_DISABLE_IOCP.
*
* @li Use the close() function to simultaneously cancel the outstanding
* operations and close the socket.
*
* When running on Windows Vista, Windows Server 2008, and later, the
* CancelIoEx function is always used. This function does not have the
* problems described above.
*/
#if defined(ASIO_MSVC) && (ASIO_MSVC >= 1400) \
&& (!defined(_WIN32_WINNT) || _WIN32_WINNT < 0x0600) \
&& !defined(ASIO_ENABLE_CANCELIO)
__declspec(deprecated("By default, this function always fails with "
"operation_not_supported when used on Windows XP, Windows Server 2003, "
"or earlier. Consult documentation for details."))
#endif
void cancel()
{
asio::error_code ec;
this->get_service().cancel(this->get_implementation(), ec);
asio::detail::throw_error(ec, "cancel");
}
/// Cancel all asynchronous operations associated with the socket.
/**
* This function causes all outstanding asynchronous connect, send and receive
* operations to finish immediately, and the handlers for cancelled operations
* will be passed the asio::error::operation_aborted error.
*
* @param ec Set to indicate what error occurred, if any.
*
* @note Calls to cancel() will always fail with
* asio::error::operation_not_supported when run on Windows XP, Windows
* Server 2003, and earlier versions of Windows, unless
* ASIO_ENABLE_CANCELIO is defined. However, the CancelIo function has
* two issues that should be considered before enabling its use:
*
* @li It will only cancel asynchronous operations that were initiated in the
* current thread.
*
* @li It can appear to complete without error, but the request to cancel the
* unfinished operations may be silently ignored by the operating system.
* Whether it works or not seems to depend on the drivers that are installed.
*
* For portable cancellation, consider using one of the following
* alternatives:
*
* @li Disable asio's I/O completion port backend by defining
* ASIO_DISABLE_IOCP.
*
* @li Use the close() function to simultaneously cancel the outstanding
* operations and close the socket.
*
* When running on Windows Vista, Windows Server 2008, and later, the
* CancelIoEx function is always used. This function does not have the
* problems described above.
*/
#if defined(ASIO_MSVC) && (ASIO_MSVC >= 1400) \
&& (!defined(_WIN32_WINNT) || _WIN32_WINNT < 0x0600) \
&& !defined(ASIO_ENABLE_CANCELIO)
__declspec(deprecated("By default, this function always fails with "
"operation_not_supported when used on Windows XP, Windows Server 2003, "
"or earlier. Consult documentation for details."))
#endif
asio::error_code cancel(asio::error_code& ec)
{
return this->get_service().cancel(this->get_implementation(), ec);
}
/// Determine whether the socket is at the out-of-band data mark.
/**
* This function is used to check whether the socket input is currently
* positioned at the out-of-band data mark.
*
* @return A bool indicating whether the socket is at the out-of-band data
* mark.
*
* @throws asio::system_error Thrown on failure.
*/
bool at_mark() const
{
asio::error_code ec;
bool b = this->get_service().at_mark(this->get_implementation(), ec);
asio::detail::throw_error(ec, "at_mark");
return b;
}
/// Determine whether the socket is at the out-of-band data mark.
/**
* This function is used to check whether the socket input is currently
* positioned at the out-of-band data mark.
*
* @param ec Set to indicate what error occurred, if any.
*
* @return A bool indicating whether the socket is at the out-of-band data
* mark.
*/
bool at_mark(asio::error_code& ec) const
{
return this->get_service().at_mark(this->get_implementation(), ec);
}
/// Determine the number of bytes available for reading.
/**
* This function is used to determine the number of bytes that may be read
* without blocking.
*
* @return The number of bytes that may be read without blocking, or 0 if an
* error occurs.
*
* @throws asio::system_error Thrown on failure.
*/
std::size_t available() const
{
asio::error_code ec;
std::size_t s = this->get_service().available(
this->get_implementation(), ec);
asio::detail::throw_error(ec, "available");
return s;
}
/// Determine the number of bytes available for reading.
/**
* This function is used to determine the number of bytes that may be read
* without blocking.
*
* @param ec Set to indicate what error occurred, if any.
*
* @return The number of bytes that may be read without blocking, or 0 if an
* error occurs.
*/
std::size_t available(asio::error_code& ec) const
{
return this->get_service().available(this->get_implementation(), ec);
}
/// Bind the socket to the given local endpoint.
/**
* This function binds the socket to the specified endpoint on the local
* machine.
*
* @param endpoint An endpoint on the local machine to which the socket will
* be bound.
*
* @throws asio::system_error Thrown on failure.
*
* @par Example
* @code
* asio::ip::tcp::socket socket(io_service);
* socket.open(asio::ip::tcp::v4());
* socket.bind(asio::ip::tcp::endpoint(
* asio::ip::tcp::v4(), 12345));
* @endcode
*/
void bind(const endpoint_type& endpoint)
{
asio::error_code ec;
this->get_service().bind(this->get_implementation(), endpoint, ec);
asio::detail::throw_error(ec, "bind");
}
/// Bind the socket to the given local endpoint.
/**
* This function binds the socket to the specified endpoint on the local
* machine.
*
* @param endpoint An endpoint on the local machine to which the socket will
* be bound.
*
* @param ec Set to indicate what error occurred, if any.
*
* @par Example
* @code
* asio::ip::tcp::socket socket(io_service);
* socket.open(asio::ip::tcp::v4());
* asio::error_code ec;
* socket.bind(asio::ip::tcp::endpoint(
* asio::ip::tcp::v4(), 12345), ec);
* if (ec)
* {
* // An error occurred.
* }
* @endcode
*/
asio::error_code bind(const endpoint_type& endpoint,
asio::error_code& ec)
{
return this->get_service().bind(this->get_implementation(), endpoint, ec);
}
/// Connect the socket to the specified endpoint.
/**
* This function is used to connect a socket to the specified remote endpoint.
* The function call will block until the connection is successfully made or
* an error occurs.
*
* The socket is automatically opened if it is not already open. If the
* connect fails, and the socket was automatically opened, the socket is
* not returned to the closed state.
*
* @param peer_endpoint The remote endpoint to which the socket will be
* connected.
*
* @throws asio::system_error Thrown on failure.
*
* @par Example
* @code
* asio::ip::tcp::socket socket(io_service);
* asio::ip::tcp::endpoint endpoint(
* asio::ip::address::from_string("1.2.3.4"), 12345);
* socket.connect(endpoint);
* @endcode
*/
void connect(const endpoint_type& peer_endpoint)
{
asio::error_code ec;
if (!is_open())
{
this->get_service().open(this->get_implementation(),
peer_endpoint.protocol(), ec);
asio::detail::throw_error(ec, "connect");
}
this->get_service().connect(this->get_implementation(), peer_endpoint, ec);
asio::detail::throw_error(ec, "connect");
}
/// Connect the socket to the specified endpoint.
/**
* This function is used to connect a socket to the specified remote endpoint.
* The function call will block until the connection is successfully made or
* an error occurs.
*
* The socket is automatically opened if it is not already open. If the
* connect fails, and the socket was automatically opened, the socket is
* not returned to the closed state.
*
* @param peer_endpoint The remote endpoint to which the socket will be
* connected.
*
* @param ec Set to indicate what error occurred, if any.
*
* @par Example
* @code
* asio::ip::tcp::socket socket(io_service);
* asio::ip::tcp::endpoint endpoint(
* asio::ip::address::from_string("1.2.3.4"), 12345);
* asio::error_code ec;
* socket.connect(endpoint, ec);
* if (ec)
* {
* // An error occurred.
* }
* @endcode
*/
asio::error_code connect(const endpoint_type& peer_endpoint,
asio::error_code& ec)
{
if (!is_open())
{
if (this->get_service().open(this->get_implementation(),
peer_endpoint.protocol(), ec))
{
return ec;
}
}
return this->get_service().connect(
this->get_implementation(), peer_endpoint, ec);
}
/// Start an asynchronous connect.
/**
* This function is used to asynchronously connect a socket to the specified
* remote endpoint. The function call always returns immediately.
*
* The socket is automatically opened if it is not already open. If the
* connect fails, and the socket was automatically opened, the socket is
* not returned to the closed state.
*
* @param peer_endpoint The remote endpoint to which the socket will be
* connected. Copies will be made of the endpoint object as required.
*
* @param handler The handler to be called when the connection operation
* completes. Copies will be made of the handler as required. The function
* signature of the handler must be:
* @code void handler(
* const asio::error_code& error // Result of operation
* ); @endcode
* Regardless of whether the asynchronous operation completes immediately or
* not, the handler will not be invoked from within this function. Invocation
* of the handler will be performed in a manner equivalent to using
* asio::io_service::post().
*
* @par Example
* @code
* void connect_handler(const asio::error_code& error)
* {
* if (!error)
* {
* // Connect succeeded.
* }
* }
*
* ...
*
* asio::ip::tcp::socket socket(io_service);
* asio::ip::tcp::endpoint endpoint(
* asio::ip::address::from_string("1.2.3.4"), 12345);
* socket.async_connect(endpoint, connect_handler);
* @endcode
*/
template <typename ConnectHandler>
ASIO_INITFN_RESULT_TYPE(ConnectHandler,
void (asio::error_code))
async_connect(const endpoint_type& peer_endpoint,
ASIO_MOVE_ARG(ConnectHandler) handler)
{
// If you get an error on the following line it means that your handler does
// not meet the documented type requirements for a ConnectHandler.
ASIO_CONNECT_HANDLER_CHECK(ConnectHandler, handler) type_check;
if (!is_open())
{
asio::error_code ec;
const protocol_type protocol = peer_endpoint.protocol();
if (this->get_service().open(this->get_implementation(), protocol, ec))
{
detail::async_result_init<
ConnectHandler, void (asio::error_code)> init(
ASIO_MOVE_CAST(ConnectHandler)(handler));
this->get_io_service().post(
asio::detail::bind_handler(
ASIO_MOVE_CAST(ASIO_HANDLER_TYPE(
ConnectHandler, void (asio::error_code)))(
init.handler), ec));
return init.result.get();
}
}
return this->get_service().async_connect(this->get_implementation(),
peer_endpoint, ASIO_MOVE_CAST(ConnectHandler)(handler));
}
/// Set an option on the socket.
/**
* This function is used to set an option on the socket.
*
* @param option The new option value to be set on the socket.
*
* @throws asio::system_error Thrown on failure.
*
* @sa SettableSocketOption @n
* asio::socket_base::broadcast @n
* asio::socket_base::do_not_route @n
* asio::socket_base::keep_alive @n
* asio::socket_base::linger @n
* asio::socket_base::receive_buffer_size @n
* asio::socket_base::receive_low_watermark @n
* asio::socket_base::reuse_address @n
* asio::socket_base::send_buffer_size @n
* asio::socket_base::send_low_watermark @n
* asio::ip::multicast::join_group @n
* asio::ip::multicast::leave_group @n
* asio::ip::multicast::enable_loopback @n
* asio::ip::multicast::outbound_interface @n
* asio::ip::multicast::hops @n
* asio::ip::tcp::no_delay
*
* @par Example
* Setting the IPPROTO_TCP/TCP_NODELAY option:
* @code
* asio::ip::tcp::socket socket(io_service);
* ...
* asio::ip::tcp::no_delay option(true);
* socket.set_option(option);
* @endcode
*/
template <typename SettableSocketOption>
void set_option(const SettableSocketOption& option)
{
asio::error_code ec;
this->get_service().set_option(this->get_implementation(), option, ec);
asio::detail::throw_error(ec, "set_option");
}
/// Set an option on the socket.
/**
* This function is used to set an option on the socket.
*
* @param option The new option value to be set on the socket.
*
* @param ec Set to indicate what error occurred, if any.
*
* @sa SettableSocketOption @n
* asio::socket_base::broadcast @n
* asio::socket_base::do_not_route @n
* asio::socket_base::keep_alive @n
* asio::socket_base::linger @n
* asio::socket_base::receive_buffer_size @n
* asio::socket_base::receive_low_watermark @n
* asio::socket_base::reuse_address @n
* asio::socket_base::send_buffer_size @n
* asio::socket_base::send_low_watermark @n
* asio::ip::multicast::join_group @n
* asio::ip::multicast::leave_group @n
* asio::ip::multicast::enable_loopback @n
* asio::ip::multicast::outbound_interface @n
* asio::ip::multicast::hops @n
* asio::ip::tcp::no_delay
*
* @par Example
* Setting the IPPROTO_TCP/TCP_NODELAY option:
* @code
* asio::ip::tcp::socket socket(io_service);
* ...
* asio::ip::tcp::no_delay option(true);
* asio::error_code ec;
* socket.set_option(option, ec);
* if (ec)
* {
* // An error occurred.
* }
* @endcode
*/
template <typename SettableSocketOption>
asio::error_code set_option(const SettableSocketOption& option,
asio::error_code& ec)
{
return this->get_service().set_option(
this->get_implementation(), option, ec);
}
/// Get an option from the socket.
/**
* This function is used to get the current value of an option on the socket.
*
* @param option The option value to be obtained from the socket.
*
* @throws asio::system_error Thrown on failure.
*
* @sa GettableSocketOption @n
* asio::socket_base::broadcast @n
* asio::socket_base::do_not_route @n
* asio::socket_base::keep_alive @n
* asio::socket_base::linger @n
* asio::socket_base::receive_buffer_size @n
* asio::socket_base::receive_low_watermark @n
* asio::socket_base::reuse_address @n
* asio::socket_base::send_buffer_size @n
* asio::socket_base::send_low_watermark @n
* asio::ip::multicast::join_group @n
* asio::ip::multicast::leave_group @n
* asio::ip::multicast::enable_loopback @n
* asio::ip::multicast::outbound_interface @n
* asio::ip::multicast::hops @n
* asio::ip::tcp::no_delay
*<|fim▁hole|> * asio::ip::tcp::socket socket(io_service);
* ...
* asio::ip::tcp::socket::keep_alive option;
* socket.get_option(option);
* bool is_set = option.value();
* @endcode
*/
template <typename GettableSocketOption>
void get_option(GettableSocketOption& option) const
{
asio::error_code ec;
this->get_service().get_option(this->get_implementation(), option, ec);
asio::detail::throw_error(ec, "get_option");
}
/// Get an option from the socket.
/**
* This function is used to get the current value of an option on the socket.
*
* @param option The option value to be obtained from the socket.
*
* @param ec Set to indicate what error occurred, if any.
*
* @sa GettableSocketOption @n
* asio::socket_base::broadcast @n
* asio::socket_base::do_not_route @n
* asio::socket_base::keep_alive @n
* asio::socket_base::linger @n
* asio::socket_base::receive_buffer_size @n
* asio::socket_base::receive_low_watermark @n
* asio::socket_base::reuse_address @n
* asio::socket_base::send_buffer_size @n
* asio::socket_base::send_low_watermark @n
* asio::ip::multicast::join_group @n
* asio::ip::multicast::leave_group @n
* asio::ip::multicast::enable_loopback @n
* asio::ip::multicast::outbound_interface @n
* asio::ip::multicast::hops @n
* asio::ip::tcp::no_delay
*
* @par Example
* Getting the value of the SOL_SOCKET/SO_KEEPALIVE option:
* @code
* asio::ip::tcp::socket socket(io_service);
* ...
* asio::ip::tcp::socket::keep_alive option;
* asio::error_code ec;
* socket.get_option(option, ec);
* if (ec)
* {
* // An error occurred.
* }
* bool is_set = option.value();
* @endcode
*/
template <typename GettableSocketOption>
asio::error_code get_option(GettableSocketOption& option,
asio::error_code& ec) const
{
return this->get_service().get_option(
this->get_implementation(), option, ec);
}
/// Perform an IO control command on the socket.
/**
* This function is used to execute an IO control command on the socket.
*
* @param command The IO control command to be performed on the socket.
*
* @throws asio::system_error Thrown on failure.
*
* @sa IoControlCommand @n
* asio::socket_base::bytes_readable @n
* asio::socket_base::non_blocking_io
*
* @par Example
* Getting the number of bytes ready to read:
* @code
* asio::ip::tcp::socket socket(io_service);
* ...
* asio::ip::tcp::socket::bytes_readable command;
* socket.io_control(command);
* std::size_t bytes_readable = command.get();
* @endcode
*/
template <typename IoControlCommand>
void io_control(IoControlCommand& command)
{
asio::error_code ec;
this->get_service().io_control(this->get_implementation(), command, ec);
asio::detail::throw_error(ec, "io_control");
}
/// Perform an IO control command on the socket.
/**
* This function is used to execute an IO control command on the socket.
*
* @param command The IO control command to be performed on the socket.
*
* @param ec Set to indicate what error occurred, if any.
*
* @sa IoControlCommand @n
* asio::socket_base::bytes_readable @n
* asio::socket_base::non_blocking_io
*
* @par Example
* Getting the number of bytes ready to read:
* @code
* asio::ip::tcp::socket socket(io_service);
* ...
* asio::ip::tcp::socket::bytes_readable command;
* asio::error_code ec;
* socket.io_control(command, ec);
* if (ec)
* {
* // An error occurred.
* }
* std::size_t bytes_readable = command.get();
* @endcode
*/
template <typename IoControlCommand>
asio::error_code io_control(IoControlCommand& command,
asio::error_code& ec)
{
return this->get_service().io_control(
this->get_implementation(), command, ec);
}
/// Gets the non-blocking mode of the socket.
/**
* @returns @c true if the socket's synchronous operations will fail with
* asio::error::would_block if they are unable to perform the requested
* operation immediately. If @c false, synchronous operations will block
* until complete.
*
* @note The non-blocking mode has no effect on the behaviour of asynchronous
* operations. Asynchronous operations will never fail with the error
* asio::error::would_block.
*/
bool non_blocking() const
{
return this->get_service().non_blocking(this->get_implementation());
}
/// Sets the non-blocking mode of the socket.
/**
* @param mode If @c true, the socket's synchronous operations will fail with
* asio::error::would_block if they are unable to perform the requested
* operation immediately. If @c false, synchronous operations will block
* until complete.
*
* @throws asio::system_error Thrown on failure.
*
* @note The non-blocking mode has no effect on the behaviour of asynchronous
* operations. Asynchronous operations will never fail with the error
* asio::error::would_block.
*/
void non_blocking(bool mode)
{
asio::error_code ec;
this->get_service().non_blocking(this->get_implementation(), mode, ec);
asio::detail::throw_error(ec, "non_blocking");
}
/// Sets the non-blocking mode of the socket.
/**
* @param mode If @c true, the socket's synchronous operations will fail with
* asio::error::would_block if they are unable to perform the requested
* operation immediately. If @c false, synchronous operations will block
* until complete.
*
* @param ec Set to indicate what error occurred, if any.
*
* @note The non-blocking mode has no effect on the behaviour of asynchronous
* operations. Asynchronous operations will never fail with the error
* asio::error::would_block.
*/
asio::error_code non_blocking(
bool mode, asio::error_code& ec)
{
return this->get_service().non_blocking(
this->get_implementation(), mode, ec);
}
/// Gets the non-blocking mode of the native socket implementation.
/**
* This function is used to retrieve the non-blocking mode of the underlying
* native socket. This mode has no effect on the behaviour of the socket
* object's synchronous operations.
*
* @returns @c true if the underlying socket is in non-blocking mode and
* direct system calls may fail with asio::error::would_block (or the
* equivalent system error).
*
* @note The current non-blocking mode is cached by the socket object.
* Consequently, the return value may be incorrect if the non-blocking mode
* was set directly on the native socket.
*
* @par Example
* This function is intended to allow the encapsulation of arbitrary
* non-blocking system calls as asynchronous operations, in a way that is
* transparent to the user of the socket object. The following example
* illustrates how Linux's @c sendfile system call might be encapsulated:
* @code template <typename Handler>
* struct sendfile_op
* {
* tcp::socket& sock_;
* int fd_;
* Handler handler_;
* off_t offset_;
* std::size_t total_bytes_transferred_;
*
* // Function call operator meeting WriteHandler requirements.
* // Used as the handler for the async_write_some operation.
* void operator()(asio::error_code ec, std::size_t)
* {
* // Put the underlying socket into non-blocking mode.
* if (!ec)
* if (!sock_.native_non_blocking())
* sock_.native_non_blocking(true, ec);
*
* if (!ec)
* {
* for (;;)
* {
* // Try the system call.
* errno = 0;
* int n = ::sendfile(sock_.native_handle(), fd_, &offset_, 65536);
* ec = asio::error_code(n < 0 ? errno : 0,
* asio::error::get_system_category());
* total_bytes_transferred_ += ec ? 0 : n;
*
* // Retry operation immediately if interrupted by signal.
* if (ec == asio::error::interrupted)
* continue;
*
* // Check if we need to run the operation again.
* if (ec == asio::error::would_block
* || ec == asio::error::try_again)
* {
* // We have to wait for the socket to become ready again.
* sock_.async_write_some(asio::null_buffers(), *this);
* return;
* }
*
* if (ec || n == 0)
* {
* // An error occurred, or we have reached the end of the file.
* // Either way we must exit the loop so we can call the handler.
* break;
* }
*
* // Loop around to try calling sendfile again.
* }
* }
*
* // Pass result back to user's handler.
* handler_(ec, total_bytes_transferred_);
* }
* };
*
* template <typename Handler>
* void async_sendfile(tcp::socket& sock, int fd, Handler h)
* {
* sendfile_op<Handler> op = { sock, fd, h, 0, 0 };
* sock.async_write_some(asio::null_buffers(), op);
* } @endcode
*/
bool native_non_blocking() const
{
return this->get_service().native_non_blocking(this->get_implementation());
}
/// Sets the non-blocking mode of the native socket implementation.
/**
* This function is used to modify the non-blocking mode of the underlying
* native socket. It has no effect on the behaviour of the socket object's
* synchronous operations.
*
* @param mode If @c true, the underlying socket is put into non-blocking
* mode and direct system calls may fail with asio::error::would_block
* (or the equivalent system error).
*
* @throws asio::system_error Thrown on failure. If the @c mode is
* @c false, but the current value of @c non_blocking() is @c true, this
* function fails with asio::error::invalid_argument, as the
* combination does not make sense.
*
* @par Example
* This function is intended to allow the encapsulation of arbitrary
* non-blocking system calls as asynchronous operations, in a way that is
* transparent to the user of the socket object. The following example
* illustrates how Linux's @c sendfile system call might be encapsulated:
* @code template <typename Handler>
* struct sendfile_op
* {
* tcp::socket& sock_;
* int fd_;
* Handler handler_;
* off_t offset_;
* std::size_t total_bytes_transferred_;
*
* // Function call operator meeting WriteHandler requirements.
* // Used as the handler for the async_write_some operation.
* void operator()(asio::error_code ec, std::size_t)
* {
* // Put the underlying socket into non-blocking mode.
* if (!ec)
* if (!sock_.native_non_blocking())
* sock_.native_non_blocking(true, ec);
*
* if (!ec)
* {
* for (;;)
* {
* // Try the system call.
* errno = 0;
* int n = ::sendfile(sock_.native_handle(), fd_, &offset_, 65536);
* ec = asio::error_code(n < 0 ? errno : 0,
* asio::error::get_system_category());
* total_bytes_transferred_ += ec ? 0 : n;
*
* // Retry operation immediately if interrupted by signal.
* if (ec == asio::error::interrupted)
* continue;
*
* // Check if we need to run the operation again.
* if (ec == asio::error::would_block
* || ec == asio::error::try_again)
* {
* // We have to wait for the socket to become ready again.
* sock_.async_write_some(asio::null_buffers(), *this);
* return;
* }
*
* if (ec || n == 0)
* {
* // An error occurred, or we have reached the end of the file.
* // Either way we must exit the loop so we can call the handler.
* break;
* }
*
* // Loop around to try calling sendfile again.
* }
* }
*
* // Pass result back to user's handler.
* handler_(ec, total_bytes_transferred_);
* }
* };
*
* template <typename Handler>
* void async_sendfile(tcp::socket& sock, int fd, Handler h)
* {
* sendfile_op<Handler> op = { sock, fd, h, 0, 0 };
* sock.async_write_some(asio::null_buffers(), op);
* } @endcode
*/
void native_non_blocking(bool mode)
{
asio::error_code ec;
this->get_service().native_non_blocking(
this->get_implementation(), mode, ec);
asio::detail::throw_error(ec, "native_non_blocking");
}
/// Sets the non-blocking mode of the native socket implementation.
/**
* This function is used to modify the non-blocking mode of the underlying
* native socket. It has no effect on the behaviour of the socket object's
* synchronous operations.
*
* @param mode If @c true, the underlying socket is put into non-blocking
* mode and direct system calls may fail with asio::error::would_block
* (or the equivalent system error).
*
* @param ec Set to indicate what error occurred, if any. If the @c mode is
* @c false, but the current value of @c non_blocking() is @c true, this
* function fails with asio::error::invalid_argument, as the
* combination does not make sense.
*
* @par Example
* This function is intended to allow the encapsulation of arbitrary
* non-blocking system calls as asynchronous operations, in a way that is
* transparent to the user of the socket object. The following example
* illustrates how Linux's @c sendfile system call might be encapsulated:
* @code template <typename Handler>
* struct sendfile_op
* {
* tcp::socket& sock_;
* int fd_;
* Handler handler_;
* off_t offset_;
* std::size_t total_bytes_transferred_;
*
* // Function call operator meeting WriteHandler requirements.
* // Used as the handler for the async_write_some operation.
* void operator()(asio::error_code ec, std::size_t)
* {
* // Put the underlying socket into non-blocking mode.
* if (!ec)
* if (!sock_.native_non_blocking())
* sock_.native_non_blocking(true, ec);
*
* if (!ec)
* {
* for (;;)
* {
* // Try the system call.
* errno = 0;
* int n = ::sendfile(sock_.native_handle(), fd_, &offset_, 65536);
* ec = asio::error_code(n < 0 ? errno : 0,
* asio::error::get_system_category());
* total_bytes_transferred_ += ec ? 0 : n;
*
* // Retry operation immediately if interrupted by signal.
* if (ec == asio::error::interrupted)
* continue;
*
* // Check if we need to run the operation again.
* if (ec == asio::error::would_block
* || ec == asio::error::try_again)
* {
* // We have to wait for the socket to become ready again.
* sock_.async_write_some(asio::null_buffers(), *this);
* return;
* }
*
* if (ec || n == 0)
* {
* // An error occurred, or we have reached the end of the file.
* // Either way we must exit the loop so we can call the handler.
* break;
* }
*
* // Loop around to try calling sendfile again.
* }
* }
*
* // Pass result back to user's handler.
* handler_(ec, total_bytes_transferred_);
* }
* };
*
* template <typename Handler>
* void async_sendfile(tcp::socket& sock, int fd, Handler h)
* {
* sendfile_op<Handler> op = { sock, fd, h, 0, 0 };
* sock.async_write_some(asio::null_buffers(), op);
* } @endcode
*/
asio::error_code native_non_blocking(
bool mode, asio::error_code& ec)
{
return this->get_service().native_non_blocking(
this->get_implementation(), mode, ec);
}
/// Get the local endpoint of the socket.
/**
* This function is used to obtain the locally bound endpoint of the socket.
*
* @returns An object that represents the local endpoint of the socket.
*
* @throws asio::system_error Thrown on failure.
*
* @par Example
* @code
* asio::ip::tcp::socket socket(io_service);
* ...
* asio::ip::tcp::endpoint endpoint = socket.local_endpoint();
* @endcode
*/
endpoint_type local_endpoint() const
{
asio::error_code ec;
endpoint_type ep = this->get_service().local_endpoint(
this->get_implementation(), ec);
asio::detail::throw_error(ec, "local_endpoint");
return ep;
}
/// Get the local endpoint of the socket.
/**
* This function is used to obtain the locally bound endpoint of the socket.
*
* @param ec Set to indicate what error occurred, if any.
*
* @returns An object that represents the local endpoint of the socket.
* Returns a default-constructed endpoint object if an error occurred.
*
* @par Example
* @code
* asio::ip::tcp::socket socket(io_service);
* ...
* asio::error_code ec;
* asio::ip::tcp::endpoint endpoint = socket.local_endpoint(ec);
* if (ec)
* {
* // An error occurred.
* }
* @endcode
*/
endpoint_type local_endpoint(asio::error_code& ec) const
{
return this->get_service().local_endpoint(this->get_implementation(), ec);
}
/// Get the remote endpoint of the socket.
/**
* This function is used to obtain the remote endpoint of the socket.
*
* @returns An object that represents the remote endpoint of the socket.
*
* @throws asio::system_error Thrown on failure.
*
* @par Example
* @code
* asio::ip::tcp::socket socket(io_service);
* ...
* asio::ip::tcp::endpoint endpoint = socket.remote_endpoint();
* @endcode
*/
endpoint_type remote_endpoint() const
{
asio::error_code ec;
endpoint_type ep = this->get_service().remote_endpoint(
this->get_implementation(), ec);
asio::detail::throw_error(ec, "remote_endpoint");
return ep;
}
/// Get the remote endpoint of the socket.
/**
* This function is used to obtain the remote endpoint of the socket.
*
* @param ec Set to indicate what error occurred, if any.
*
* @returns An object that represents the remote endpoint of the socket.
* Returns a default-constructed endpoint object if an error occurred.
*
* @par Example
* @code
* asio::ip::tcp::socket socket(io_service);
* ...
* asio::error_code ec;
* asio::ip::tcp::endpoint endpoint = socket.remote_endpoint(ec);
* if (ec)
* {
* // An error occurred.
* }
* @endcode
*/
endpoint_type remote_endpoint(asio::error_code& ec) const
{
return this->get_service().remote_endpoint(this->get_implementation(), ec);
}
/// Disable sends or receives on the socket.
/**
* This function is used to disable send operations, receive operations, or
* both.
*
* @param what Determines what types of operation will no longer be allowed.
*
* @throws asio::system_error Thrown on failure.
*
* @par Example
* Shutting down the send side of the socket:
* @code
* asio::ip::tcp::socket socket(io_service);
* ...
* socket.shutdown(asio::ip::tcp::socket::shutdown_send);
* @endcode
*/
void shutdown(shutdown_type what)
{
asio::error_code ec;
this->get_service().shutdown(this->get_implementation(), what, ec);
asio::detail::throw_error(ec, "shutdown");
}
/// Disable sends or receives on the socket.
/**
* This function is used to disable send operations, receive operations, or
* both.
*
* @param what Determines what types of operation will no longer be allowed.
*
* @param ec Set to indicate what error occurred, if any.
*
* @par Example
* Shutting down the send side of the socket:
* @code
* asio::ip::tcp::socket socket(io_service);
* ...
* asio::error_code ec;
* socket.shutdown(asio::ip::tcp::socket::shutdown_send, ec);
* if (ec)
* {
* // An error occurred.
* }
* @endcode
*/
asio::error_code shutdown(shutdown_type what,
asio::error_code& ec)
{
return this->get_service().shutdown(this->get_implementation(), what, ec);
}
protected:
/// Protected destructor to prevent deletion through this type.
~basic_socket()
{
}
};
} // namespace asio
#include "asio/detail/pop_options.hpp"
#endif // ASIO_BASIC_SOCKET_HPP<|fim▁end|> | * @par Example
* Getting the value of the SOL_SOCKET/SO_KEEPALIVE option:
* @code |
<|file_name|>Router.js<|end_file_name|><|fim▁begin|>/**
* Copyright (c) 2015-present, Pavel Aksonov
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree.
*
*/
import React, {
Component,
PropTypes,
} from 'react';
import NavigationExperimental from 'react-native-experimental-navigation';
import Actions, { ActionMap } from './Actions';
import getInitialState from './State';
import Reducer, { findElement } from './Reducer';
import DefaultRenderer from './DefaultRenderer';
import Scene from './Scene';
import * as ActionConst from './ActionConst';
const {
RootContainer: NavigationRootContainer,
} = NavigationExperimental;<|fim▁hole|>};
class Router extends Component {
constructor(props) {
super(props);
this.state = {};
this.renderNavigation = this.renderNavigation.bind(this);
this.handleProps = this.handleProps.bind(this);
}
componentDidMount() {
this.handleProps(this.props);
}
componentWillReceiveProps(props) {
this.handleProps(props);
}
handleProps(props) {
let scenesMap;
if (props.scenes) {
scenesMap = props.scenes;
} else {
let scenes = props.children;
if (Array.isArray(props.children) || props.children.props.component) {
scenes = (
<Scene
key="__root"
hideNav
{...this.props}
>
{props.children}
</Scene>
);
}
scenesMap = Actions.create(scenes, props.wrapBy);
}
// eslint-disable-next-line no-unused-vars
const { children, styles, scenes, reducer, createReducer, ...parentProps } = props;
scenesMap.rootProps = parentProps;
const initialState = getInitialState(scenesMap);
const reducerCreator = props.createReducer || Reducer;
const routerReducer = props.reducer || (
reducerCreator({
initialState,
scenes: scenesMap,
}));
this.setState({ reducer: routerReducer });
}
renderNavigation(navigationState, onNavigate) {
if (!navigationState) {
return null;
}
Actions.get = key => findElement(navigationState, key, ActionConst.REFRESH);
Actions.callback = props => {
const constAction = (props.type && ActionMap[props.type] ? ActionMap[props.type] : null);
if (this.props.dispatch) {
if (constAction) {
this.props.dispatch({ ...props, type: constAction });
} else {
this.props.dispatch(props);
}
}
return (constAction ? onNavigate({ ...props, type: constAction }) : onNavigate(props));
};
return <DefaultRenderer onNavigate={onNavigate} navigationState={navigationState} />;
}
render() {
if (!this.state.reducer) return null;
return (
<NavigationRootContainer
reducer={this.state.reducer}
renderNavigation={this.renderNavigation}
/>
);
}
}
Router.propTypes = propTypes;
export default Router;<|fim▁end|> |
const propTypes = {
dispatch: PropTypes.func, |
<|file_name|>git.py<|end_file_name|><|fim▁begin|># -*- coding: UTF-8 -*-
# Copyright (C) 2007, 2009, 2011-2012 J. David Ibáñez <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# Import from the Standard Library
import os
from datetime import datetime, timedelta, time
from heapq import heappush, heappop
from multiprocessing import Process
from os.path import abspath, dirname
from uuid import uuid4
# Import from pygit2
from pygit2 import TreeBuilder, GIT_FILEMODE_TREE, init_repository
# Import from itools
from itools.database import Metadata
from itools.database.magic_ import magic_from_buffer
from itools.database.git import open_worktree
from itools.fs import lfs
# Import from here
from catalog import Catalog, _get_xquery, SearchResults, make_catalog
from patchs import PatchsBackend
from registry import register_backend
TEST_DB_WITHOUT_COMMITS = bool(int(os.environ.get('TEST_DB_WITHOUT_COMMITS') or 0))
TEST_DB_DESACTIVATE_GIT = bool(int(os.environ.get('TEST_DB_DESACTIVATE_GIT') or 0))
class Heap(object):
"""
This object behaves very much like a sorted dict, but for security only a
subset of the dict API is exposed:
>>> len(heap)
>>> heap[path] = value
>>> value = heap.get(path)
>>> path, value = heap.popitem()
The keys are relative paths as used in Git trees, like 'a/b/c' (and '' for
the root).
The dictionary is sorted so deeper paths are considered smaller, and so
returned first by 'popitem'. The order relation between two paths of equal
depth is undefined.
This data structure is used by RWDatabase._save_changes to build the tree
objects before commit.
"""
def __init__(self):
self._dict = {}
self._heap = []
def __len__(self):
return len(self._dict)
def get(self, path):
return self._dict.get(path)
def __setitem__(self, path, value):
if path not in self._dict:
n = -path.count('/') if path else 1
heappush(self._heap, (n, path))
self._dict[path] = value
def popitem(self):
key = heappop(self._heap)
path = key[1]
return path, self._dict.pop(path)
class GitBackend(object):
def __init__(self, path, fields, read_only=False):
self.nb_transactions = 0
self.last_transaction_dtime = None
self.path = abspath(path) + '/'
self.fields = fields
self.read_only = read_only
# Open database
self.path_data = '%s/database/' % self.path
# Check if is a folder
self.path_data = '%s/database/' % self.path
if not lfs.is_folder(self.path_data):
error = '"{0}" should be a folder, but it is not'.format(self.path_data)
raise ValueError(error)
# New interface to Git
self.worktree = open_worktree(self.path_data)
# Initialize the database, but chrooted
self.fs = lfs.open(self.path_data)
# Static FS
database_static_path = '{0}/database_static'.format(path)
if not lfs.exists(database_static_path):
self.init_backend_static(path)
self.static_fs = lfs.open(database_static_path)
# Patchs backend
self.patchs_backend = PatchsBackend(path, self.fs, read_only)
# Catalog
self.catalog = self.get_catalog()
@classmethod
def init_backend(cls, path, fields, init=False, soft=False):
# Metadata database
init_repository('{0}/database'.format(path), bare=False)
# Init backend static
cls.init_backend_static(path)
# Make catalog
make_catalog('{0}/catalog'.format(path), fields)
@classmethod
def init_backend_static(cls, path):
# Static database
lfs.make_folder('{0}/database_static'.format(path))
lfs.make_folder('{0}/database_static/.history'.format(path))
#######################################################################
# Database API
#######################################################################
def normalize_key(self, path, __root=None):
# Performance is critical so assume the path is already relative to
# the repository.
key = __root.resolve(path)
if key and key[0] == '.git':
err = "bad '{0}' path, access to the '.git' folder is denied"
raise ValueError(err.format(path))
return '/'.join(key)
def handler_exists(self, key):
fs = self.get_handler_fs_by_key(key)
return fs.exists(key)
def get_handler_names(self, key):
return self.fs.get_names(key)
def get_handler_data(self, key):
if not key:
return None
fs = self.get_handler_fs_by_key(key)
with fs.open(key) as f:
return f.read()
def get_handler_mimetype(self, key):
data = self.get_handler_data(key)
return magic_from_buffer(data)
def handler_is_file(self, key):
fs = self.get_handler_fs_by_key(key)
return fs.is_file(key)
def handler_is_folder(self, key):
fs = self.get_handler_fs_by_key(key)
return fs.is_folder(key)
def get_handler_mtime(self, key):
fs = self.get_handler_fs_by_key(key)
return fs.get_mtime(key)
def save_handler(self, key, handler):
data = handler.to_str()
# Save the file
fs = self.get_handler_fs(handler)
# Write and truncate (calls to "_save_state" must be done with the
# pointer pointing to the beginning)
if not fs.exists(key):
with fs.make_file(key) as f:
f.write(data)
f.truncate(f.tell())
else:
with fs.open(key, 'w') as f:
f.write(data)
f.truncate(f.tell())
# Set dirty = None
handler.timestamp = self.get_handler_mtime(key)
handler.dirty = None
def traverse_resources(self):
raise NotImplementedError
def get_handler_fs(self, handler):
if isinstance(handler, Metadata):
return self.fs
return self.static_fs
def get_handler_fs_by_key(self, key):
if key.endswith('metadata'):
return self.fs
return self.static_fs
def add_handler_into_static_history(self, key):
the_time = datetime.now().strftime('%Y%m%d%H%M%S')
new_key = '.history/{0}.{1}.{2}'.format(key, the_time, uuid4())
parent_path = dirname(new_key)
if not self.static_fs.exists(parent_path):
self.static_fs.make_folder(parent_path)
self.static_fs.copy(key, new_key)
def do_transaction(self, commit_message, data, added, changed, removed, handlers,
docs_to_index, docs_to_unindex):
git_author, git_date, git_msg, docs_to_index, docs_to_unindex = data
# Statistics
self.nb_transactions += 1
# Add static changed & removed files to ~/database_static/.history/
changed_and_removed = list(changed) + list(removed)
for key in changed_and_removed:
if not key.endswith('metadata'):
self.add_handler_into_static_history(key)
# Create patch if there's changed
if added or changed or removed:
self.patchs_backend.create_patch(added, changed, removed, handlers, git_author)
else:
# it's a catalog transaction, we have to do nothing
pass
# Added and changed
added_and_changed = list(added) + list(changed)
for key in added_and_changed:
handler = handlers.get(key)
parent_path = dirname(key)
fs = self.get_handler_fs(handler)
if not fs.exists(parent_path):
fs.make_folder(parent_path)
self.save_handler(key, handler)
# Remove files (if not removed via git-rm)<|fim▁hole|> fs = self.get_handler_fs_by_key(key)
fs.remove(key)
# Do git transaction for metadata
if not TEST_DB_WITHOUT_COMMITS:
self.do_git_transaction(commit_message, data, added, changed, removed, handlers)
else:
# Commit at start
if not self.last_transaction_dtime:
self.do_git_big_commit()
else:
now = datetime.now()
t = now.time()
is_night = time(21, 00) < t or t < time(06, 00)
done_recently = now - self.last_transaction_dtime < timedelta(minutes=120)
if is_night and not done_recently:
self.do_git_big_commit()
# Catalog
for path in docs_to_unindex:
self.catalog.unindex_document(path)
for resource, values in docs_to_index:
self.catalog.index_document(values)
self.catalog.save_changes()
def do_git_big_commit(self):
""" Some databases are really bigs (1 millions files). GIT is too slow in this cases.
So we don't commit at each transaction, but at each N transactions.
"""
if TEST_DB_DESACTIVATE_GIT is True:
return
p1 = Process(target=self._do_git_big_commit)
p1.start()
self.last_transaction_dtime = datetime.now()
def _do_git_big_commit(self):
worktree = self.worktree
worktree._call(['git', 'add', '-A'])
worktree._call(['git', 'commit', '-m', 'Autocommit'])
def do_git_transaction(self, commit_message, data, added, changed, removed, handlers):
worktree = self.worktree
# 3. Git add
git_add = list(added) + list(changed)
git_add = [x for x in git_add if x.endswith('metadata')]
worktree.git_add(*git_add)
# 3. Git rm
git_rm = list(removed)
git_rm = [x for x in git_rm if x.endswith('metadata')]
worktree.git_rm(*git_rm)
# 2. Build the 'git commit' command
git_author, git_date, git_msg, docs_to_index, docs_to_unindex = data
git_msg = git_msg or 'no comment'
# 4. Create the tree
repo = worktree.repo
index = repo.index
try:
head = repo.revparse_single('HEAD')
except KeyError:
git_tree = None
else:
root = head.tree
# Initialize the heap
heap = Heap()
heap[''] = repo.TreeBuilder(root)
for key in git_add:
entry = index[key]
heap[key] = (entry.oid, entry.mode)
for key in git_rm:
heap[key] = None
while heap:
path, value = heap.popitem()
# Stop condition
if path == '':
git_tree = value.write()
break
if type(value) is TreeBuilder:
if len(value) == 0:
value = None
else:
oid = value.write()
value = (oid, GIT_FILEMODE_TREE)
# Split the path
if '/' in path:
parent, name = path.rsplit('/', 1)
else:
parent = ''
name = path
# Get the tree builder
tb = heap.get(parent)
if tb is None:
try:
tentry = root[parent]
except KeyError:
tb = repo.TreeBuilder()
else:
tree = repo[tentry.oid]
tb = repo.TreeBuilder(tree)
heap[parent] = tb
# Modify
if value is None:
# Sometimes there are empty folders left in the
# filesystem, but not in the tree, then we get a
# "Failed to remove entry" error. Be robust.
if tb.get(name) is not None:
tb.remove(name)
else:
tb.insert(name, value[0], value[1])
# 5. Git commit
worktree.git_commit(git_msg, git_author, git_date, tree=git_tree)
def abort_transaction(self):
self.catalog.abort_changes()
#from pygit2 import GIT_CHECKOUT_FORCE, GIT_CHECKOUT_REMOVE_UNTRACKED
# Don't need to abort since git add is made à last minute
#strategy = GIT_CHECKOUT_FORCE | GIT_CHECKOUT_REMOVE_UNTRACKED
#if pygit2.__version__ >= '0.21.1':
# self.worktree.repo.checkout_head(strategy=strategy)
#else:
# self.worktree.repo.checkout_head(strategy)
def flush_catalog(self, docs_to_unindex, docs_to_index):
for path in docs_to_unindex:
self.catalog.unindex_document(path)
for resource, values in docs_to_index:
self.catalog.index_document(values)
def get_catalog(self):
path = '{0}/catalog'.format(self.path)
if not lfs.is_folder(path):
return None
return Catalog(path, self.fields, read_only=self.read_only)
def search(self, query=None, **kw):
"""Launch a search in the catalog.
"""
catalog = self.catalog
xquery = _get_xquery(catalog, query, **kw)
return SearchResults(catalog, xquery)
def close(self):
self.catalog.close()
register_backend('git', GitBackend)<|fim▁end|> | for key in removed:
if not key.endswith('metadata') or TEST_DB_WITHOUT_COMMITS: |
<|file_name|>0001_initial.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2016-08-01 07:59
from __future__ import unicode_literals
<|fim▁hole|>from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import proso.django.models
class Migration(migrations.Migration):
initial = True
dependencies = [
('proso_user', '0001_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('proso_common', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Answer',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('time', models.DateTimeField(default=datetime.datetime.now)),
('response_time', models.IntegerField()),
('guess', models.FloatField(default=0)),
('type', models.CharField(max_length=10)),
('lang', models.CharField(blank=True, default=None, max_length=2, null=True)),
('config', models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to='proso_common.Config')),
],
),
migrations.CreateModel(
name='AnswerMeta',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('content', models.TextField()),
('content_hash', models.CharField(db_index=True, max_length=40, unique=True)),
],
),
migrations.CreateModel(
name='Audit',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('key', models.CharField(max_length=50)),
('value', models.FloatField()),
('time', models.DateTimeField(default=datetime.datetime.now)),
('answer', models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to='proso_models.Answer')),
],
),
migrations.CreateModel(
name='EnvironmentInfo',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('status', models.IntegerField(choices=[(0, 'disabled'), (1, 'loading'), (2, 'enabled'), (3, 'active')], default=1)),
('revision', models.IntegerField()),
('load_progress', models.IntegerField(default=0)),
('updated', models.DateTimeField(auto_now=True)),
('created', models.DateTimeField(auto_now_add=True)),
('config', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='proso_common.Config')),
],
),
migrations.CreateModel(
name='Item',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('active', models.BooleanField(default=True)),
],
bases=(models.Model, proso.django.models.ModelDiffMixin),
),
migrations.CreateModel(
name='ItemRelation',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('visible', models.BooleanField(default=True)),
('active', models.BooleanField(default=True)),
('child', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='child_relations', to='proso_models.Item')),
('parent', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='parent_relations', to='proso_models.Item')),
],
bases=(models.Model, proso.django.models.ModelDiffMixin),
),
migrations.CreateModel(
name='ItemType',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('model', models.CharField(max_length=100)),
('table', models.CharField(max_length=100)),
('foreign_key', models.CharField(max_length=100)),
('language', models.CharField(blank=True, default=None, max_length=100, null=True)),
('valid', models.BooleanField(default=True)),
],
),
migrations.CreateModel(
name='PracticeContext',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('content', models.TextField()),
('content_hash', models.CharField(db_index=True, max_length=40, unique=True)),
],
),
migrations.CreateModel(
name='PracticeSet',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('finished', models.BooleanField(default=False)),
],
),
migrations.CreateModel(
name='Variable',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('permanent', models.BooleanField(default=False)),
('key', models.CharField(max_length=50)),
('value', models.FloatField()),
('audit', models.BooleanField(default=True)),
('updated', models.DateTimeField(default=datetime.datetime.now)),
('answer', models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to='proso_models.Answer')),
('info', models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to='proso_models.EnvironmentInfo')),
('item_primary', models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='item_primary_variables', to='proso_models.Item')),
('item_secondary', models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='item_secondary_variables', to='proso_models.Item')),
('user', models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.AlterUniqueTogether(
name='itemtype',
unique_together=set([('model', 'foreign_key'), ('table', 'foreign_key')]),
),
migrations.AddField(
model_name='item',
name='children',
field=models.ManyToManyField(related_name='parents', through='proso_models.ItemRelation', to='proso_models.Item'),
),
migrations.AddField(
model_name='item',
name='item_type',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='proso_models.ItemType'),
),
migrations.AddField(
model_name='audit',
name='info',
field=models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to='proso_models.EnvironmentInfo'),
),
migrations.AddField(
model_name='audit',
name='item_primary',
field=models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='item_primary_audits', to='proso_models.Item'),
),
migrations.AddField(
model_name='audit',
name='item_secondary',
field=models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='item_secondary_audits', to='proso_models.Item'),
),
migrations.AddField(
model_name='audit',
name='user',
field=models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='answer',
name='context',
field=models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to='proso_models.PracticeContext'),
),
migrations.AddField(
model_name='answer',
name='item',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='item_answers', to='proso_models.Item'),
),
migrations.AddField(
model_name='answer',
name='item_answered',
field=models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='item_answered_answers', to='proso_models.Item'),
),
migrations.AddField(
model_name='answer',
name='item_asked',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='item_asked_answers', to='proso_models.Item'),
),
migrations.AddField(
model_name='answer',
name='metainfo',
field=models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to='proso_models.AnswerMeta'),
),
migrations.AddField(
model_name='answer',
name='practice_set',
field=models.ForeignKey(blank=None, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to='proso_models.PracticeSet'),
),
migrations.AddField(
model_name='answer',
name='session',
field=models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to='proso_user.Session'),
),
migrations.AddField(
model_name='answer',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.AlterUniqueTogether(
name='variable',
unique_together=set([('info', 'key', 'user', 'item_primary', 'item_secondary')]),
),
migrations.AlterIndexTogether(
name='variable',
index_together=set([('info', 'key', 'user'), ('info', 'key', 'user', 'item_primary'), ('info', 'key', 'user', 'item_primary', 'item_secondary'), ('info', 'key', 'item_primary'), ('info', 'key')]),
),
migrations.AlterUniqueTogether(
name='environmentinfo',
unique_together=set([('config', 'revision')]),
),
migrations.AlterIndexTogether(
name='audit',
index_together=set([('info', 'key', 'user'), ('info', 'key', 'user', 'item_primary'), ('info', 'key', 'user', 'item_primary', 'item_secondary'), ('info', 'key', 'item_primary'), ('info', 'key')]),
),
migrations.AlterIndexTogether(
name='answer',
index_together=set([('user', 'context')]),
),
]<|fim▁end|> | import datetime |
<|file_name|>VariantStoreVariant.java<|end_file_name|><|fim▁begin|>/*
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see http://www.gnu.org/licenses/
*/
package org.phenotips.variantStoreIntegration;
import org.phenotips.data.similarity.internal.AbstractVariant;
import org.phenotips.variantstore.shared.GACallInfoFields;
import org.phenotips.variantstore.shared.GAVariantInfoFields;
import org.phenotips.variantstore.shared.VariantUtils;
import java.text.DecimalFormat;
import java.util.List;
import org.apache.commons.lang3.StringUtils;
import org.ga4gh.GACall;
import org.ga4gh.GAVariant;
/**
* A variant from the variant store. Annotated by Exomiser.
*
* @version $Id$
*/
public class VariantStoreVariant extends AbstractVariant
{
private static DecimalFormat df = new DecimalFormat("#.####");
/**
* Create a {@link Variant} from a {@link GAVariant} returned by a {@link
* org.phenotips.variantstore.VariantStoreInterface}.
*
* @param gaVariant a {@link GAVariant}
* @param totIndividuals number of individuals stored in the variant store<|fim▁hole|> */
public VariantStoreVariant(GAVariant gaVariant, Integer totIndividuals) {
setChrom(gaVariant.getReferenceName());
setPosition((int) (gaVariant.getStart() + 1));
GACall call = gaVariant.getCalls().get(0);
List<Integer> genotype = call.getGenotype();
setGenotype(gaVariant.getReferenceBases(),
StringUtils.join(gaVariant.getAlternateBases(), ','),
StringUtils.join(genotype, '/'));
setEffect(VariantUtils.getInfo(gaVariant, GAVariantInfoFields.GENE_EFFECT));
String value = VariantUtils.getInfo(call, GACallInfoFields.EXOMISER_VARIANT_SCORE);
if (value == null || "null".equals(value)) {
setScore(null);
} else {
setScore(Double.valueOf(value));
}
setAnnotation("geneScore", VariantUtils.getInfo(call, GACallInfoFields.EXOMISER_GENE_COMBINED_SCORE));
setAnnotation("geneSymbol", VariantUtils.getInfo(gaVariant, GAVariantInfoFields.GENE));
setAnnotation("hgvs", VariantUtils.getInfo(gaVariant, GAVariantInfoFields.GENE_HGVS));
value = VariantUtils.getInfo(gaVariant, GAVariantInfoFields.EXAC_AF);
setAnnotation("exacAF", df.format(Double.valueOf(value)));
setAnnotation("gtHet", VariantUtils.getInfo(gaVariant, GAVariantInfoFields.GT_HET));
setAnnotation("gtHom", VariantUtils.getInfo(gaVariant, GAVariantInfoFields.GT_HOM));
if (totIndividuals != null) {
value = VariantUtils.getInfo(gaVariant, GAVariantInfoFields.AC_TOT);
Double pcAF = Double.valueOf(value) / (totIndividuals * 2);
setAnnotation("pcAF", df.format(pcAF));
}
}
}<|fim▁end|> | |
<|file_name|>main.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import aaargh
from app import Negi
app = aaargh.App(description="Jinja2+JSON powered static HTML build tool")<|fim▁hole|>
@app.cmd(help='Parse JSON and build HTML')
@app.cmd_arg('-d','--data_dir',default='./data',help='JSON data dirctory(default:./data')
@app.cmd_arg('-t','--tmpl_dir',default='./templates',help='Jinja2 template dirctory(default:./templates')
@app.cmd_arg('-o','--out_dir',default='./dist',help='Output dirctory(default:./dist')
@app.cmd_arg('-v','--verbose',nargs='?',const=True,default=False)
def build(data_dir,tmpl_dir,out_dir,verbose):
builder = Negi(
data_dir= data_dir,
tmpl_dir = tmpl_dir,
out_dir = out_dir,
verbose = verbose
)
builder.build()
def main():
app.run()
if __name__ == '__main__':
main()<|fim▁end|> | |
<|file_name|>storage_management_client.py<|end_file_name|><|fim▁begin|># coding=utf-8
# --------------------------------------------------------------------------
# Code generated by Microsoft (R) AutoRest Code Generator 1.1.0.0
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
<|fim▁hole|>from msrest.service_client import ServiceClient
from msrest import Serializer, Deserializer
from msrestazure import AzureConfiguration
from .version import VERSION
from .operations.storage_accounts_operations import StorageAccountsOperations
from .operations.usage_operations import UsageOperations
from . import models
class StorageManagementClientConfiguration(AzureConfiguration):
"""Configuration for StorageManagementClient
Note that all parameters used to create this instance are saved as instance
attributes.
:param credentials: Credentials needed for the client to connect to Azure.
:type credentials: :mod:`A msrestazure Credentials
object<msrestazure.azure_active_directory>`
:param subscription_id: Gets subscription credentials which uniquely
identify Microsoft Azure subscription. The subscription ID forms part of
the URI for every service call.
:type subscription_id: str
:param str base_url: Service URL
"""
def __init__(
self, credentials, subscription_id, base_url=None):
if credentials is None:
raise ValueError("Parameter 'credentials' must not be None.")
if subscription_id is None:
raise ValueError("Parameter 'subscription_id' must not be None.")
if not isinstance(subscription_id, str):
raise TypeError("Parameter 'subscription_id' must be str.")
if not base_url:
base_url = 'https://management.azure.com'
super(StorageManagementClientConfiguration, self).__init__(base_url)
self.add_user_agent('storagemanagementclient/{}'.format(VERSION))
self.add_user_agent('Azure-SDK-For-Python')
self.credentials = credentials
self.subscription_id = subscription_id
class StorageManagementClient(object):
"""The Storage Management Client.
:ivar config: Configuration for client.
:vartype config: StorageManagementClientConfiguration
:ivar storage_accounts: StorageAccounts operations
:vartype storage_accounts: storage.operations.StorageAccountsOperations
:ivar usage: Usage operations
:vartype usage: storage.operations.UsageOperations
:param credentials: Credentials needed for the client to connect to Azure.
:type credentials: :mod:`A msrestazure Credentials
object<msrestazure.azure_active_directory>`
:param subscription_id: Gets subscription credentials which uniquely
identify Microsoft Azure subscription. The subscription ID forms part of
the URI for every service call.
:type subscription_id: str
:param str base_url: Service URL
"""
def __init__(
self, credentials, subscription_id, base_url=None):
self.config = StorageManagementClientConfiguration(credentials, subscription_id, base_url)
self._client = ServiceClient(self.config.credentials, self.config)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self.api_version = '2015-06-15'
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
self.storage_accounts = StorageAccountsOperations(
self._client, self.config, self._serialize, self._deserialize)
self.usage = UsageOperations(
self._client, self.config, self._serialize, self._deserialize)<|fim▁end|> | |
<|file_name|>unreachable_asm_2.rs<|end_file_name|><|fim▁begin|>#![feature(llvm_asm)]
enum Empty {}
fn empty() -> Option<Empty> {
None
}
// EMIT_MIR unreachable_asm_2.main.UnreachablePropagation.diff
fn main() {
if let Some(_x) = empty() {
let mut _y;
if true {
// asm instruction stops unreachable propagation to block bb3.
unsafe { llvm_asm!("NOP"); }
_y = 21;
} else {<|fim▁hole|>
match _x { }
}
}<|fim▁end|> | // asm instruction stops unreachable propagation to block bb3.
unsafe { llvm_asm!("NOP"); }
_y = 42;
} |
<|file_name|>process_message.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public<|fim▁hole|>use types::cef_string_t;
cef_stub_static_method_impls! {
fn cef_process_message_create(name: *const cef_string_t) -> *mut cef_process_message_t;
}<|fim▁end|> | * License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use interfaces::cef_process_message_t; |
<|file_name|>empty.py<|end_file_name|><|fim▁begin|># vim:ts=4:et
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
from ..utils import strip_nnn
from . import attachnode
from . import export
def is_group_root(obj, objects):<|fim▁hole|>def collect_objects(collection):
objects = {}
def collect(col):
for o in col.objects:
objects[o.name] = o
for c in col.children:
collect(c)
collect(collection)
return objects
def export_collection(obj, muobj, mu):
saved_exported_objects = set(export.exported_objects)
group = obj.instance_collection
objects = collect_objects(group)
for n in objects:
o = objects[n]
# while KSP models (part/prop/internal) will have only one root
# object, grouping might be used for other purposes (eg, greeble)
# so support multiple group root objects
if o.hide_render or not is_group_root(o, objects):
continue
child = export.make_obj(mu, o, mu.path)
if child:
muobj.children.append(child)
export.exported_objects = saved_exported_objects
def handle_empty(obj, muobj, mu):
if obj.instance_collection:
if obj.instance_type != 'COLLECTION':
#FIXME flag an error? figure out something else to do?
return None
export_collection(obj, muobj, mu)
name = strip_nnn(obj.name)
if name[:5] == "node_":
n = attachnode.AttachNode(obj, mu.inverse)
mu.nodes.append(n)
if not n.keep_transform() and not obj.children:
return None
muobj.transform.localRotation @= attachnode.rotation_correction
elif name == "thrustTransform":
muobj.transform.localRotation @= attachnode.rotation_correction
elif name in ["CoMOffset", "CoPOffset", "CoLOffset"]:
setattr(mu, name, (mu.inverse @ obj.matrix_world.col[3])[:3])
if not obj.children:
return None
return muobj
type_handlers = {
type(None): handle_empty
}<|fim▁end|> | if not obj.parent:
return True
return obj.parent.name not in objects
|
<|file_name|>registry.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2013, 2014, 2015 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA
"""Implement registries for formatter."""
import os
from flask_registry import (<|fim▁hole|>)
from invenio.ext.registry import ModuleAutoDiscoverySubRegistry
from invenio.utils.datastructures import LazyDict
import yaml
format_templates_directories = RegistryProxy(
'format_templates_directories',
ModuleAutoDiscoveryRegistry,
'format_templates'
)
format_templates = RegistryProxy(
'format_templates',
PkgResourcesDirDiscoveryRegistry,
'.', registry_namespace=format_templates_directories
)
output_formats_directories = RegistryProxy(
'output_formats_directories',
ModuleAutoDiscoveryRegistry,
'output_formats'
)
output_formats_files = RegistryProxy(
'output_formats_files',
PkgResourcesDirDiscoveryRegistry,
'.', registry_namespace=output_formats_directories
)
template_context_functions = RegistryProxy(
'template_context_functions',
ModuleAutoDiscoverySubRegistry,
'template_context_functions'
)
def create_format_templates_lookup():
"""Create format templates."""
out = {}
def _register(path, level=1):
if level > 4:
return
normpath = os.path.normpath(path)
if os.path.isdir(normpath):
for p in os.listdir(normpath):
_register(os.path.join(normpath, p), level=level+1)
else:
parts = normpath.split(os.path.sep)
out[os.path.sep.join(parts[-level:])] = normpath
for t in reversed(format_templates):
_register(t)
return out
format_templates_lookup = LazyDict(create_format_templates_lookup)
def create_output_formats_lookup():
"""Create output formats."""
out = {}
for f in output_formats_files:
of = os.path.basename(f).lower()
data = {'names': {}}
if of.endswith('.yml'):
of = of[:-4]
with open(f, 'r') as f:
data.update(yaml.load(f) or {})
data['code'] = of
else:
continue # unknown filetype
if of in out:
continue
out[of] = data
return out
output_formats = LazyDict(create_output_formats_lookup)
export_formats = LazyDict(lambda: dict(
(code, of) for code, of in output_formats.items()
if of.get('content_type', '') != 'text/html' and of.get('visibility', 0)
))<|fim▁end|> | ModuleAutoDiscoveryRegistry,
PkgResourcesDirDiscoveryRegistry,
RegistryProxy, |
<|file_name|>Consumer.java<|end_file_name|><|fim▁begin|>package com.salesforce.dva.argus.service.mq.kafka;
import com.fasterxml.jackson.databind.JavaType;
<|fim▁hole|>import java.io.Serializable;
import java.util.List;
public interface Consumer {
<T extends Serializable> List<T> dequeueFromBuffer(String topic, Class<T> type, int timeout, int limit);
<T extends Serializable> List<T> dequeueFromBuffer(String topic, JavaType type, int timeout, int limit);
void shutdown();
}<|fim▁end|> | |
<|file_name|>test_sns_operations.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# Copyright 2013 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
import unittest
from mock import Mock, sentinel
import botocore.session
class TestSNSOperations(unittest.TestCase):
def setUp(self):
self.session = botocore.session.get_session()
self.sns = self.session.get_service('sns')
def test_subscribe_with_endpoint(self):
op = self.sns.get_operation('Subscribe')
params = op.build_parameters(topic_arn='topic_arn',
protocol='http',
notification_endpoint='http://example.org')
self.assertEqual(params['Endpoint'], 'http://example.org')
def test_sns_pre_send_event(self):
op = self.sns.get_operation('Subscribe')
calls = []
self.session.register('before-call.sns.Subscribe',
lambda **kwargs: calls.append(kwargs))
endpoint = Mock()
endpoint.make_request.return_value = (sentinel.RESPONSE,
sentinel.PARSED)
op.call(endpoint=endpoint, topic_arn='topic_arn', protocol='http',
notification_endpoint='http://example.org')
self.assertEqual(len(calls), 1)
kwargs = calls[0]<|fim▁hole|>
def test_sns_post_send_event_is_invoked(self):
op = self.sns.get_operation('Subscribe')
calls = []
self.session.register('after-call.sns.Subscribe',
lambda **kwargs: calls.append(kwargs))
endpoint = Mock()
endpoint.make_request.return_value = (sentinel.RESPONSE,
sentinel.PARSED)
op.call(endpoint=endpoint, topic_arn='topic_arn', protocol='http',
notification_endpoint='http://example.org')
self.assertEqual(len(calls), 1)
self.assertEqual(calls[0]['operation'], op)
self.assertEqual(calls[0]['http_response'], sentinel.RESPONSE)
self.assertEqual(calls[0]['parsed'], sentinel.PARSED)
if __name__ == "__main__":
unittest.main()<|fim▁end|> | self.assertEqual(kwargs['operation'], op)
self.assertEqual(kwargs['endpoint'], endpoint)
self.assertEqual(kwargs['params']['TopicArn'], 'topic_arn') |
<|file_name|>misc_test.go<|end_file_name|><|fim▁begin|>package gofakeit
import (
"fmt"
"reflect"
"sort"
"testing"
"github.com/brianvoe/gofakeit/v6/data"
)
func ExampleBool() {
Seed(11)
fmt.Println(Bool())
// Output: true
}
func ExampleFaker_Bool() {
f := New(11)
fmt.Println(f.Bool())
// Output: true
}
func BenchmarkBool(b *testing.B) {
b.Run("package", func(b *testing.B) {
for i := 0; i < b.N; i++ {
Bool()
}
})
b.Run("Faker math", func(b *testing.B) {
f := New(0)
for i := 0; i < b.N; i++ {
f.Bool()
}
})
b.Run("Faker crypto", func(b *testing.B) {
f := NewCrypto()
for i := 0; i < b.N; i++ {
f.Bool()
}
})
}
func TestUUID(t *testing.T) {
id := UUID()
if len(id) != 36 {
t.Error("unique length does not equal requested length")
}
}
func ExampleUUID() {
Seed(11)
fmt.Println(UUID())
// Output: 590c1440-9888-45b0-bd51-a817ee07c3f2
}
func ExampleFaker_UUID() {
f := New(11)
fmt.Println(f.UUID())
// Output: 590c1440-9888-45b0-bd51-a817ee07c3f2
}
func BenchmarkUUID(b *testing.B) {
b.Run("package", func(b *testing.B) {
for i := 0; i < b.N; i++ {
UUID()
}
})
b.Run("Faker math", func(b *testing.B) {
f := New(0)
for i := 0; i < b.N; i++ {
f.UUID()
}
})
b.Run("Faker crypto", func(b *testing.B) {
f := NewCrypto()
for i := 0; i < b.N; i++ {
f.UUID()
}
})
}
func TestShuffleAnySlice(t *testing.T) {
ShuffleAnySlice(nil) // Should do nothing
ShuffleAnySlice("b") // Should do nothing
ShuffleAnySlice([]string{"b"}) // If single value should do nothing
a := []string{"a", "b", "c", "d", "e", "f", "g", "h"}
b := make([]string, len(a))
copy(b, a)
ShuffleAnySlice(a)
if equalSliceString(a, b) {
t.Errorf("shuffle strings resulted in the same permutation, the odds are slim")
}<|fim▁hole|> n := []int{1, 2, 3, 4, 5, 6, 7, 8, 9, 0}
m := make([]int, len(n))
copy(m, n)
ShuffleAnySlice(n)
if equalSliceInt(n, m) {
t.Errorf("shuffle ints resulted in the same permutation, the odds are slim")
}
i := []interface{}{"a", 1, "c", 3, []string{"a", "b", "c"}, -555, []byte{1, 5}, "h"}
ii := make([]interface{}, len(i))
copy(ii, i)
ShuffleAnySlice(i)
if equalSliceInterface(i, ii) {
t.Errorf("shuffle interface resulted in the same permutation, the odds are slim")
}
}
func ExampleShuffleAnySlice() {
Seed(11)
strings := []string{"happy", "times", "for", "everyone", "have", "a", "good", "day"}
ShuffleAnySlice(strings)
fmt.Println(strings)
ints := []int{52, 854, 941, 74125, 8413, 777, 89416, 841657}
ShuffleAnySlice(ints)
fmt.Println(ints)
// Output:
// [good everyone have for times a day happy]
// [777 74125 941 854 89416 52 8413 841657]
}
func ExampleFaker_ShuffleAnySlice() {
f := New(11)
strings := []string{"happy", "times", "for", "everyone", "have", "a", "good", "day"}
f.ShuffleAnySlice(strings)
fmt.Println(strings)
ints := []int{52, 854, 941, 74125, 8413, 777, 89416, 841657}
f.ShuffleAnySlice(ints)
fmt.Println(ints)
// Output:
// [good everyone have for times a day happy]
// [777 74125 941 854 89416 52 8413 841657]
}
func BenchmarkShuffleAnySlice(b *testing.B) {
b.Run("package", func(b *testing.B) {
a := []interface{}{"a", 1, "c", 3, []string{"a", "b", "c"}, -555, []byte{1, 5}, "h"}
for i := 0; i < b.N; i++ {
ShuffleAnySlice(a)
}
})
b.Run("Faker math", func(b *testing.B) {
a := []interface{}{"a", 1, "c", 3, []string{"a", "b", "c"}, -555, []byte{1, 5}, "h"}
f := New(0)
for i := 0; i < b.N; i++ {
f.ShuffleAnySlice(a)
}
})
b.Run("Faker crypto", func(b *testing.B) {
a := []interface{}{"a", 1, "c", 3, []string{"a", "b", "c"}, -555, []byte{1, 5}, "h"}
f := NewCrypto()
for i := 0; i < b.N; i++ {
f.ShuffleAnySlice(a)
}
})
}
func ExampleFlipACoin() {
Seed(11)
fmt.Println(FlipACoin())
// Output: Heads
}
func ExampleFaker_FlipACoin() {
f := New(11)
fmt.Println(f.FlipACoin())
// Output: Heads
}
func TestFlipACoin(t *testing.T) {
for i := 0; i < 100; i++ {
FlipACoin()
}
}
func BenchmarkFlipACoin(b *testing.B) {
b.Run("package", func(b *testing.B) {
for i := 0; i < b.N; i++ {
FlipACoin()
}
})
b.Run("Faker math", func(b *testing.B) {
f := New(0)
for i := 0; i < b.N; i++ {
f.FlipACoin()
}
})
b.Run("Faker crypto", func(b *testing.B) {
f := NewCrypto()
for i := 0; i < b.N; i++ {
f.FlipACoin()
}
})
}
func TestRandomMapKey(t *testing.T) {
mStr := map[string]int{
"a": 1,
"b": 2,
"c": 3,
}
for i := 0; i < 100; i++ {
key := RandomMapKey(mStr)
if _, ok := mStr[key.(string)]; !ok {
t.Errorf("key %s not found in map", key)
}
}
mInt := map[int]string{
1: "a",
2: "b",
3: "c",
}
for i := 0; i < 100; i++ {
f := New(11)
key := f.RandomMapKey(mInt)
if _, ok := mInt[key.(int)]; !ok {
t.Errorf("key %d not found in map", key)
}
}
}
func TestCategories(t *testing.T) {
var got, expected []string
for k := range Categories() {
got = append(got, k)
}
for k := range data.Data {
expected = append(expected, k)
}
sort.Strings(got)
sort.Strings(expected)
if !reflect.DeepEqual(got, expected) {
t.Error("Type arrays are not the same.\nExpected: ", expected, "\nGot: ", got)
}
}<|fim▁end|> | |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>const LdapStrategy = require('./LdapStrategy');
const MoodleStrategy = require('./MoodleStrategy');
const IservStrategy = require('./IservStrategy');
const TSPStrategy = require('./TSPStrategy');
const ApiKeyStrategy = require('./ApiKeyStrategy');
module.exports = {
LdapStrategy,
MoodleStrategy,
IservStrategy,
TSPStrategy,
ApiKeyStrategy,<|fim▁hole|><|fim▁end|> | }; |
<|file_name|>messagehandler.py<|end_file_name|><|fim▁begin|>from functions import logger, config
import asyncio
class MessageHandler:
def __init__(self, client, message, command, args):
''' Create a new messagehandler which handles the required parts for the commands.
disabling this module will fuck up the whole bot.'''
self.client = client
self.message = message
self.command = command
self.channel = message.channel
self.access_level = 0
self.needed_level = 6
self.args = args
async def sendMessage(self, text, channel=None):
'''
Sends a text message to a channel.
Arguments:
(str) text: The message you want to send
(Optional) channel: The channel you want the message to be sent in
Returns:<|fim▁hole|> if len(text)==0:
raise ValueError("The message needs at least one character.")
if len(text)>2000:
raise ValueError("The message can\'t be more than 2000 chars")
if channel is None:
message = await self.client.send_message(self.channel, "\u200B{}".format(text))
else:
message = await self.client.send_message(channel, "\u200B{}".format(text))
return message<|fim▁end|> | An messageobject if the message has been sent, None otherwise.'''
message = None
text = str(text) |
<|file_name|>burst_tagger_cc_base.cpp<|end_file_name|><|fim▁begin|>/*
* This file is protected by Copyright. Please refer to the COPYRIGHT file
* distributed with this source distribution.
*
* This file is part of GNUHAWK.
*
* GNUHAWK is free software: you can redistribute it and/or modify is under the
* terms of the GNU General Public License as published by the Free Software
* Foundation, either version 3 of the License, or (at your option) any later
* version.
*
* GNUHAWK is distributed in the hope that it will be useful, but WITHOUT ANY
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
* details.
*
* You should have received a copy of the GNU General Public License along with
* this program. If not, see http://www.gnu.org/licenses/.
*/
#include "burst_tagger_cc_base.h"
/*******************************************************************************************
AUTO-GENERATED CODE. DO NOT MODIFY
The following class functions are for the base class for the component class. To
customize any of these functions, do not modify them here. Instead, overload them
on the child class
******************************************************************************************/
burst_tagger_cc_base::burst_tagger_cc_base(const char *uuid, const char *label) :
GnuHawkBlock(uuid, label),
serviceThread(0),
noutput_items(0),
_maintainTimeStamp(false),
_throttle(false)
{
construct();
}
void burst_tagger_cc_base::construct()
{
Resource_impl::_started = false;
loadProperties();
serviceThread = 0;
sentEOS = false;
inputPortOrder.resize(0);;
outputPortOrder.resize(0);
PortableServer::ObjectId_var oid;
complex_in = new bulkio::InFloatPort("complex_in");
complex_in->setNewStreamListener(this, &burst_tagger_cc_base::complex_in_newStreamCallback);
oid = ossie::corba::RootPOA()->activate_object(complex_in);
trigger_in = new bulkio::InShortPort("trigger_in");
trigger_in->setNewStreamListener(this, &burst_tagger_cc_base::trigger_in_newStreamCallback);
oid = ossie::corba::RootPOA()->activate_object(trigger_in);
complex_out = new bulkio::OutFloatPort("complex_out");
oid = ossie::corba::RootPOA()->activate_object(complex_out);
registerInPort(complex_in);
inputPortOrder.push_back("complex_in");
registerInPort(trigger_in);
inputPortOrder.push_back("trigger_in");
registerOutPort(complex_out, complex_out->_this());
outputPortOrder.push_back("complex_out");
}
/*******************************************************************************************
Framework-level functions
These functions are generally called by the framework to perform housekeeping.
*******************************************************************************************/
void burst_tagger_cc_base::initialize() throw (CF::LifeCycle::InitializeError, CORBA::SystemException)
{
}
void burst_tagger_cc_base::start() throw (CORBA::SystemException, CF::Resource::StartError)
{
boost::mutex::scoped_lock lock(serviceThreadLock);
if (serviceThread == 0) {
complex_in->unblock();
trigger_in->unblock();
serviceThread = new ProcessThread<burst_tagger_cc_base>(this, 0.1);
serviceThread->start();
}
if (!Resource_impl::started()) {
Resource_impl::start();
}
}
void burst_tagger_cc_base::stop() throw (CORBA::SystemException, CF::Resource::StopError)
{
if ( complex_in ) complex_in->block();
if ( trigger_in ) trigger_in->block();
{
boost::mutex::scoped_lock lock(_sriMutex);
_sriQueue.clear();
}
// release the child thread (if it exists)
if (serviceThread != 0) {
{
boost::mutex::scoped_lock lock(serviceThreadLock);
LOG_TRACE( burst_tagger_cc_base, "Stopping Service Function" );
serviceThread->stop();
}
if ( !serviceThread->release()) {
throw CF::Resource::StopError(CF::CF_NOTSET, "Processing thread did not die");
}
boost::mutex::scoped_lock lock(serviceThreadLock);
if ( serviceThread ) {
delete serviceThread;
}
}
serviceThread = 0;
if (Resource_impl::started()) {
Resource_impl::stop();
}
LOG_TRACE( burst_tagger_cc_base, "COMPLETED STOP REQUEST" );
}
CORBA::Object_ptr burst_tagger_cc_base::getPort(const char* _id) throw (CORBA::SystemException, CF::PortSupplier::UnknownPort)
{
std::map<std::string, Port_Provides_base_impl *>::iterator p_in = inPorts.find(std::string(_id));
if (p_in != inPorts.end()) {
if (!strcmp(_id,"complex_in")) {
bulkio::InFloatPort *ptr = dynamic_cast<bulkio::InFloatPort *>(p_in->second);
if (ptr) {
return ptr->_this();
}
}
if (!strcmp(_id,"trigger_in")) {
bulkio::InShortPort *ptr = dynamic_cast<bulkio::InShortPort *>(p_in->second);
if (ptr) {
return ptr->_this();
}
}
}
std::map<std::string, CF::Port_var>::iterator p_out = outPorts_var.find(std::string(_id));
if (p_out != outPorts_var.end()) {
return CF::Port::_duplicate(p_out->second);
}
throw (CF::PortSupplier::UnknownPort());
}
void burst_tagger_cc_base::releaseObject() throw (CORBA::SystemException, CF::LifeCycle::ReleaseError)
{
// This function clears the component running condition so main shuts down everything
try {
stop();
} catch (CF::Resource::StopError& ex) {
// TODO - this should probably be logged instead of ignored
}
// deactivate ports
releaseInPorts();
releaseOutPorts();
delete(complex_in);
delete(trigger_in);
delete(complex_out);
Resource_impl::releaseObject();
}
void burst_tagger_cc_base::loadProperties()
{
addProperty(itemsize,
8,
"itemsize",
"",
"readonly",
"",
"external",
"configure");
addProperty(true_tag,
true_tag_struct(),
"true_tag",
"",
"readwrite",
"",
"external",
"configure");
addProperty(false_tag,
false_tag_struct(),
"false_tag",
"",
"readwrite",
"",
"external",
"configure");
}
// Destructor
burst_tagger_cc_base::~burst_tagger_cc_base()
{
// Free input streams
for (IStreamList::iterator iter = _istreams.begin(); iter != _istreams.end(); ++iter) {
delete (*iter);
}
// Free output streams
for (OStreamList::iterator iter = _ostreams.begin(); iter != _ostreams.end(); ++iter) {
delete (*iter);
}
}
//
// Allow for logging
//
PREPARE_LOGGING(burst_tagger_cc_base);
inline static unsigned int
round_up (unsigned int n, unsigned int multiple)
{<|fim▁hole|>inline static unsigned int
round_down (unsigned int n, unsigned int multiple)
{
return (n / multiple) * multiple;
}
uint32_t burst_tagger_cc_base::getNOutputStreams() {
return 0;
}
void burst_tagger_cc_base::setupIOMappings( )
{
int ninput_streams = 0;
int noutput_streams = 0;
std::string sid("");
int inMode=RealMode;
if ( !validGRBlock() ) return;
ninput_streams = gr_sptr->get_max_input_streams();
gr_io_signature_sptr g_isig = gr_sptr->input_signature();
noutput_streams = gr_sptr->get_max_output_streams();
gr_io_signature_sptr g_osig = gr_sptr->output_signature();
LOG_DEBUG( burst_tagger_cc_base, "GNUHAWK IO MAPPINGS IN/OUT " << ninput_streams << "/" << noutput_streams );
//
// Someone reset the GR Block so we need to clean up old mappings if they exists
// we need to reset the io signatures and check the vlens
//
if ( _istreams.size() > 0 || _ostreams.size() > 0 ) {
LOG_DEBUG( burst_tagger_cc_base, "RESET INPUT SIGNATURE SIZE:" << _istreams.size() );
IStreamList::iterator istream;
for ( int idx=0 ; istream != _istreams.end(); idx++, istream++ ) {
// re-add existing stream definitons
LOG_DEBUG( burst_tagger_cc_base, "ADD READ INDEX TO GNU RADIO BLOCK");
if ( ninput_streams == -1 ) gr_sptr->add_read_index();
// setup io signature
(*istream)->associate( gr_sptr );
}
LOG_DEBUG( burst_tagger_cc_base, "RESET OUTPUT SIGNATURE SIZE:" << _ostreams.size() );
OStreamList::iterator ostream;
for ( int idx=0 ; ostream != _ostreams.end(); idx++, ostream++ ) {
// need to evaluate new settings...???
(*ostream)->associate( gr_sptr );
}
return;
}
int i = 0;
//
// Setup mapping of RH port to GNU RADIO Block input streams
// For version 1, we are ignoring the GNU Radio input stream -1 case that allows multiple data
// streams over a single connection. We are mapping a single RH Port to a single GNU Radio stream.
// Stream Identifiers will be pass along as they are received
//
LOG_TRACE( burst_tagger_cc_base, "setupIOMappings INPUT PORTS: " << inPorts.size() );
RH_ProvidesPortMap::iterator p_in;
i = 0;
// grab ports based on their order in the scd.xml file
p_in = inPorts.find("complex_in");
if ( p_in != inPorts.end() ) {
bulkio::InFloatPort *port = dynamic_cast< bulkio::InFloatPort * >(p_in->second);
int mode = inMode;
sid = "";
// need to add read index to GNU Radio Block for processing streams when max_input == -1
if ( ninput_streams == -1 ) gr_sptr->add_read_index();
// check if we received SRI during setup
BULKIO::StreamSRISequence_var sris = port->activeSRIs();
if ( sris->length() > 0 ) {
BULKIO::StreamSRI sri = sris[sris->length()-1];
mode = sri.mode;
}
std::vector<int> in;
io_mapping.push_back( in );
_istreams.push_back( new gr_istream< bulkio::InFloatPort > ( port, gr_sptr, i, mode, sid ));
LOG_DEBUG( burst_tagger_cc_base, "ADDING INPUT MAP IDX:" << i << " SID:" << sid );
// increment port counter
i++;
}
// grab ports based on their order in the scd.xml file
p_in = inPorts.find("trigger_in");
if ( p_in != inPorts.end() ) {
bulkio::InShortPort *port = dynamic_cast< bulkio::InShortPort * >(p_in->second);
int mode = inMode;
sid = "";
// need to add read index to GNU Radio Block for processing streams when max_input == -1
if ( ninput_streams == -1 ) gr_sptr->add_read_index();
// check if we received SRI during setup
BULKIO::StreamSRISequence_var sris = port->activeSRIs();
if ( sris->length() > 0 ) {
BULKIO::StreamSRI sri = sris[sris->length()-1];
mode = sri.mode;
}
std::vector<int> in;
io_mapping.push_back( in );
_istreams.push_back( new gr_istream< bulkio::InShortPort > ( port, gr_sptr, i, mode, sid ));
LOG_DEBUG( burst_tagger_cc_base, "ADDING INPUT MAP IDX:" << i << " SID:" << sid );
// increment port counter
i++;
}
//
// Setup mapping of RH port to GNU RADIO Block input streams
// For version 1, we are ignoring the GNU Radio output stream -1 case that allows multiple data
// streams over a single connection. We are mapping a single RH Port to a single GNU Radio stream.
//
LOG_TRACE( burst_tagger_cc_base, "setupIOMappings OutputPorts: " << outPorts.size() );
RH_UsesPortMap::iterator p_out;
i = 0;
// grab ports based on their order in the scd.xml file
p_out = outPorts.find("complex_out");
if ( p_out != outPorts.end() ) {
bulkio::OutFloatPort *port = dynamic_cast< bulkio::OutFloatPort * >(p_out->second);
int idx = -1;
BULKIO::StreamSRI sri = createOutputSRI( i, idx );
if (idx == -1) idx = i;
if(idx < (int)io_mapping.size()) io_mapping[idx].push_back(i);
int mode = sri.mode;
sid = sri.streamID;
_ostreams.push_back( new gr_ostream< bulkio::OutFloatPort > ( port, gr_sptr, i, mode, sid ));
LOG_DEBUG( burst_tagger_cc_base, "ADDING OUTPUT MAP IDX:" << i << " SID:" << sid );
_ostreams[i]->setSRI(sri, i );
// increment port counter
i++;
}
}
void burst_tagger_cc_base::complex_in_newStreamCallback( BULKIO::StreamSRI &sri )
{
LOG_TRACE( burst_tagger_cc_base, "START NotifySRI port:stream " << complex_in->getName() << "/" << sri.streamID);
boost::mutex::scoped_lock lock(_sriMutex);
_sriQueue.push_back( std::make_pair( complex_in, sri ) );
LOG_TRACE( burst_tagger_cc_base, "END NotifySRI QUEUE " << _sriQueue.size() << " port:stream " << complex_in->getName() << "/" << sri.streamID);
}
void burst_tagger_cc_base::trigger_in_newStreamCallback( BULKIO::StreamSRI &sri )
{
LOG_TRACE( burst_tagger_cc_base, "START NotifySRI port:stream " << trigger_in->getName() << "/" << sri.streamID);
boost::mutex::scoped_lock lock(_sriMutex);
_sriQueue.push_back( std::make_pair( trigger_in, sri ) );
LOG_TRACE( burst_tagger_cc_base, "END NotifySRI QUEUE " << _sriQueue.size() << " port:stream " << trigger_in->getName() << "/" << sri.streamID);
}
void burst_tagger_cc_base::processStreamIdChanges()
{
boost::mutex::scoped_lock lock(_sriMutex);
LOG_TRACE( burst_tagger_cc_base, "processStreamIDChanges QUEUE: " << _sriQueue.size() );
if ( _sriQueue.size() == 0 ) return;
std::string sid("");
if ( validGRBlock() ) {
IStreamList::iterator istream;
int idx=0;
std::string sid("");
int mode=0;
SRIQueue::iterator item = _sriQueue.begin();
for ( ; item != _sriQueue.end(); item++ ) {
idx = 0;
sid = "";
mode= item->second.mode;
sid = item->second.streamID;
istream = _istreams.begin();
for ( ; istream != _istreams.end(); idx++, istream++ ) {
if ( (*istream)->getPort() == item->first ) {
LOG_DEBUG( burst_tagger_cc_base, " SETTING IN_STREAM ID/STREAM_ID :" << idx << "/" << sid );
(*istream)->sri(true);
(*istream)->spe(mode);
LOG_DEBUG( burst_tagger_cc_base, " SETTING OUT_STREAM ID/STREAM_ID :" << idx << "/" << sid );
setOutputStreamSRI( idx, item->second );
}
}
}
_sriQueue.clear();
} else {
LOG_WARN( burst_tagger_cc_base, " NEW STREAM ID, NO GNU RADIO BLOCK DEFINED, SRI QUEUE SIZE:" << _sriQueue.size() );
}
}
BULKIO::StreamSRI burst_tagger_cc_base::createOutputSRI( int32_t oidx ) {
// for each output stream set the SRI context
BULKIO::StreamSRI sri = BULKIO::StreamSRI();
sri.hversion = 1;
sri.xstart = 0.0;
sri.xdelta = 1;
sri.xunits = BULKIO::UNITS_TIME;
sri.subsize = 0;
sri.ystart = 0.0;
sri.ydelta = 0.0;
sri.yunits = BULKIO::UNITS_NONE;
sri.mode = 0;
std::ostringstream t;
t << naming_service_name.c_str() << "_" << oidx;
std::string sid = t.str();
sri.streamID = CORBA::string_dup(sid.c_str());
return sri;
}
BULKIO::StreamSRI burst_tagger_cc_base::createOutputSRI( int32_t oidx, int32_t &in_idx)
{
return createOutputSRI( oidx );
}
void burst_tagger_cc_base::adjustOutputRate(BULKIO::StreamSRI &sri )
{
if ( validGRBlock() ) {
double ret=sri.xdelta*gr_sptr->relative_rate();
/**
**/
LOG_TRACE( burst_tagger_cc_base, "ADJUSTING SRI.XDELTA FROM/TO: " << sri.xdelta << "/" << ret );
sri.xdelta = ret;
}
}
burst_tagger_cc_base::TimeDuration burst_tagger_cc_base::getTargetDuration()
{
TimeDuration t_drate;;
uint64_t samps=0;
double xdelta=1.0;
double trate=1.0;
if ( _ostreams.size() > 0 ) {
samps= _ostreams[0]->nelems();
xdelta= _ostreams[0]->sri.xdelta;
}
trate = samps*xdelta;
uint64_t sec = (uint64_t)trunc(trate);
uint64_t usec = (uint64_t)((trate-sec)*1e6);
t_drate = boost::posix_time::seconds(sec) +
boost::posix_time::microseconds(usec);
LOG_TRACE( burst_tagger_cc_base, " SEC/USEC " << sec << "/" << usec << "\n" <<
" target_duration " << t_drate );
return t_drate;
}
burst_tagger_cc_base::TimeDuration burst_tagger_cc_base::calcThrottle( TimeMark &start_time,
TimeMark &end_time )
{
TimeDuration delta;
TimeDuration target_duration = getTargetDuration();
if ( start_time.is_not_a_date_time() == false ) {
TimeDuration s_dtime= end_time - start_time;
delta = target_duration - s_dtime;
delta /= 4;
LOG_TRACE( burst_tagger_cc_base, " s_time/t_dime " << s_dtime << "/" << target_duration << "\n" <<
" delta " << delta );
}
return delta;
}
int burst_tagger_cc_base::_transformerServiceFunction( std::vector< gr_istream_base * > &istreams ,
std::vector< gr_ostream_base * > &ostreams )
{
typedef std::vector< gr_istream_base * > _IStreamList;
typedef std::vector< gr_ostream_base * > _OStreamList;
boost::mutex::scoped_lock lock(serviceThreadLock);
if ( validGRBlock() == false ) {
// create our processing block, and setup property notifiers
createBlock();
LOG_DEBUG( burst_tagger_cc_base, " FINISHED BUILDING GNU RADIO BLOCK");
}
//process any Stream ID changes this could affect number of io streams
processStreamIdChanges();
if ( !validGRBlock() || istreams.size() == 0 || ostreams.size() == 0 ) {
LOG_WARN( burst_tagger_cc_base, "NO STREAMS ATTACHED TO BLOCK..." );
return NOOP;
}
_input_ready.resize( istreams.size() );
_ninput_items_required.resize( istreams.size() );
_ninput_items.resize( istreams.size() );
_input_items.resize( istreams.size() );
_output_items.resize( ostreams.size() );
//
// RESOLVE: need to look at forecast strategy,
// 1) see how many read items are necessary for N number of outputs
// 2) read input data and see how much output we can produce
//
//
// Grab available data from input streams
//
_OStreamList::iterator ostream;
_IStreamList::iterator istream = istreams.begin();
int nitems=0;
for ( int idx=0 ; istream != istreams.end() && serviceThread->threadRunning() ; idx++, istream++ ) {
// note this a blocking read that can cause deadlocks
nitems = (*istream)->read();
if ( (*istream)->overrun() ) {
LOG_WARN( burst_tagger_cc_base, " NOT KEEPING UP WITH STREAM ID:" << (*istream)->streamID );
}
if ( (*istream)->sriChanged() ) {
// RESOLVE - need to look at how SRI changes can affect Gnu Radio BLOCK state
LOG_DEBUG( burst_tagger_cc_base, "SRI CHANGED, STREAMD IDX/ID: "
<< idx << "/" << (*istream)->getPktStreamId() );
setOutputStreamSRI( idx, (*istream)->getPktSri() );
}
}
LOG_TRACE( burst_tagger_cc_base, "READ NITEMS: " << nitems );
if ( nitems <= 0 && !_istreams[0]->eos() ) {
return NOOP;
}
bool eos = false;
int nout = 0;
bool workDone = false;
while ( nout > -1 && serviceThread->threadRunning() ) {
eos = false;
nout = _forecastAndProcess( eos, istreams, ostreams );
if ( nout > -1 ) {
workDone = true;
// we chunked on data so move read pointer..
istream = istreams.begin();
for ( ; istream != istreams.end(); istream++ ) {
int idx=std::distance( istreams.begin(), istream );
// if we processed data for this stream
if ( _input_ready[idx] ) {
size_t nitems = 0;
try {
nitems = gr_sptr->nitems_read( idx );
} catch(...){}
if ( nitems > (*istream)->nitems() ) {
LOG_WARN( burst_tagger_cc_base, "WORK CONSUMED MORE DATA THAN AVAILABLE, READ/AVAILABLE "
<< nitems << "/" << (*istream)->nitems() );
nitems = (*istream)->nitems();
}
(*istream)->consume( nitems );
LOG_TRACE( burst_tagger_cc_base, " CONSUME READ DATA ITEMS/REMAIN " << nitems << "/" << (*istream)->nitems());
}
}
gr_sptr->reset_read_index();
}
// check for not enough data return
if ( nout == -1 ) {
// check for end of stream
istream = istreams.begin();
for ( ; istream != istreams.end() ; istream++) {
if ( (*istream)->eos() ) {
eos=true;
}
}
if ( eos ) {
LOG_TRACE( burst_tagger_cc_base, "EOS SEEN, SENDING DOWNSTREAM " );
_forecastAndProcess( eos, istreams, ostreams);
}
}
}
if ( eos ) {
istream = istreams.begin();
for ( ; istream != istreams.end() ; istream++ ) {
int idx=std::distance( istreams.begin(), istream );
LOG_DEBUG( burst_tagger_cc_base, " CLOSING INPUT STREAM IDX:" << idx );
(*istream)->close();
}
// close remaining output streams
ostream = ostreams.begin();
for ( ; eos && ostream != ostreams.end(); ostream++ ) {
int idx=std::distance( ostreams.begin(), ostream );
LOG_DEBUG( burst_tagger_cc_base, " CLOSING OUTPUT STREAM IDX:" << idx );
(*ostream)->close();
}
}
//
// set the read pointers of the GNU Radio Block to start at the beginning of the
// supplied buffers
//
gr_sptr->reset_read_index();
LOG_TRACE( burst_tagger_cc_base, " END OF TRANSFORM SERVICE FUNCTION....." << noutput_items );
if ( nout == -1 && eos == false && !workDone ) {
return NOOP;
} else {
return NORMAL;
}
}
int burst_tagger_cc_base::_forecastAndProcess( bool &eos, std::vector< gr_istream_base * > &istreams ,
std::vector< gr_ostream_base * > &ostreams )
{
typedef std::vector< gr_istream_base * > _IStreamList;
typedef std::vector< gr_ostream_base * > _OStreamList;
_OStreamList::iterator ostream;
_IStreamList::iterator istream = istreams.begin();
int nout = 0;
bool dataReady = false;
if ( !eos ) {
uint64_t max_items_avail = 0;
for ( int idx=0 ; istream != istreams.end() && serviceThread->threadRunning() ; idx++, istream++ ) {
LOG_TRACE( burst_tagger_cc_base, "GET MAX ITEMS: STREAM:"<< idx << " NITEMS/SCALARS:" <<
(*istream)->nitems() << "/" << (*istream)->nelems() );
max_items_avail = std::max( (*istream)->nitems(), max_items_avail );
}
if ( max_items_avail == 0 ) {
LOG_TRACE( burst_tagger_cc_base, "DATA CHECK - MAX ITEMS NOUTPUT/MAX_ITEMS:" << noutput_items << "/" << max_items_avail);
return -1;
}
//
// calc number of output elements based on input items available
//
noutput_items = 0;
if ( !gr_sptr->fixed_rate() ) {
noutput_items = round_down((int32_t) (max_items_avail * gr_sptr->relative_rate()), gr_sptr->output_multiple());
LOG_TRACE( burst_tagger_cc_base, " VARIABLE FORECAST NOUTPUT == " << noutput_items );
} else {
istream = istreams.begin();
for ( int i=0; istream != istreams.end(); i++, istream++ ) {
int t_noutput_items = gr_sptr->fixed_rate_ninput_to_noutput( (*istream)->nitems() );
if ( gr_sptr->output_multiple_set() ) {
t_noutput_items = round_up(t_noutput_items, gr_sptr->output_multiple());
}
if ( t_noutput_items > 0 ) {
if ( noutput_items == 0 ) {
noutput_items = t_noutput_items;
}
if ( t_noutput_items <= noutput_items ) {
noutput_items = t_noutput_items;
}
}
}
LOG_TRACE( burst_tagger_cc_base, " FIXED FORECAST NOUTPUT/output_multiple == " <<
noutput_items << "/" << gr_sptr->output_multiple());
}
//
// ask the block how much input they need to produce noutput_items...
// if enough data is available to process then set the dataReady flag
//
int32_t outMultiple = gr_sptr->output_multiple();
while ( !dataReady && noutput_items >= outMultiple ) {
//
// ask the block how much input they need to produce noutput_items...
//
gr_sptr->forecast(noutput_items, _ninput_items_required);
LOG_TRACE( burst_tagger_cc_base, "--> FORECAST IN/OUT " << _ninput_items_required[0] << "/" << noutput_items );
istream = istreams.begin();
uint32_t dr_cnt=0;
for ( int idx=0 ; noutput_items > 0 && istream != istreams.end(); idx++, istream++ ) {
// check if buffer has enough elements
_input_ready[idx] = false;
if ( (*istream)->nitems() >= (uint64_t)_ninput_items_required[idx] ) {
_input_ready[idx] = true;
dr_cnt++;
}
LOG_TRACE( burst_tagger_cc_base, "ISTREAM DATACHECK NELMS/NITEMS/REQ/READY:" << (*istream)->nelems() <<
"/" << (*istream)->nitems() << "/" << _ninput_items_required[idx] << "/" << _input_ready[idx]);
}
if ( dr_cnt < istreams.size() ) {
if ( outMultiple > 1 ) {
noutput_items -= outMultiple;
} else {
noutput_items /= 2;
}
} else {
dataReady = true;
}
LOG_TRACE( burst_tagger_cc_base, " TRIM FORECAST NOUTPUT/READY " << noutput_items << "/" << dataReady );
}
// check if data is ready...
if ( !dataReady ) {
LOG_TRACE( burst_tagger_cc_base, "DATA CHECK - NOT ENOUGH DATA AVAIL/REQ:" << _istreams[0]->nitems() <<
"/" << _ninput_items_required[0] );
return -1;
}
// reset looping variables
int ritems = 0;
int nitems = 0;
// reset caching vectors
_output_items.clear();
_input_items.clear();
_ninput_items.clear();
istream = istreams.begin();
for ( int idx=0 ; istream != istreams.end(); idx++, istream++ ) {
// check if the stream is ready
if ( !_input_ready[idx] ) {
continue;
}
// get number of items remaining
try {
ritems = gr_sptr->nitems_read( idx );
} catch(...){
// something bad has happened, we are missing an input stream
LOG_ERROR( burst_tagger_cc_base, "MISSING INPUT STREAM FOR GR BLOCK, STREAM ID:" << (*istream)->streamID );
return -2;
}
nitems = (*istream)->nitems() - ritems;
LOG_TRACE( burst_tagger_cc_base, " ISTREAM: IDX:" << idx << " ITEMS AVAIL/READ/REQ " << nitems << "/"
<< ritems << "/" << _ninput_items_required[idx] );
if ( nitems >= _ninput_items_required[idx] && nitems > 0 ) {
//remove eos checks ...if ( nitems < _ninput_items_required[idx] ) nitems=0;
_ninput_items.push_back( nitems );
_input_items.push_back( (*istream)->read_pointer(ritems) );
}
}
//
// setup output buffer vector based on noutput..
//
ostream = ostreams.begin();
for( ; ostream != ostreams.end(); ostream++ ) {
(*ostream)->resize(noutput_items);
_output_items.push_back( (*ostream)->write_pointer() );
}
nout=0;
if ( _input_items.size() != 0 && serviceThread->threadRunning() ) {
LOG_TRACE( burst_tagger_cc_base, " CALLING WORK.....N_OUT:" << noutput_items << " N_IN:" << nitems
<< " ISTREAMS:" << _input_items.size() << " OSTREAMS:" << _output_items.size());
nout = gr_sptr->general_work( noutput_items, _ninput_items, _input_items, _output_items);
LOG_TRACE( burst_tagger_cc_base, "RETURN WORK ..... N_OUT:" << nout);
}
// check for stop condition from work method
if ( nout < gr_block::WORK_DONE ) {
LOG_WARN( burst_tagger_cc_base, "WORK RETURNED STOP CONDITION..." << nout );
nout=0;
eos = true;
}
}
if (nout != 0 or eos ) {
noutput_items = nout;
LOG_TRACE( burst_tagger_cc_base, " WORK RETURNED: NOUT : " << nout << " EOS:" << eos);
ostream = ostreams.begin();
for ( int idx=0 ; ostream != ostreams.end(); idx++, ostream++ ) {
bool gotPkt = false;
TimeStamp pktTs;
int inputIdx = idx;
if ( (size_t)(inputIdx) >= istreams.size() ) {
for ( inputIdx= istreams.size()-1; inputIdx > -1; inputIdx--) {
if ( not istreams[inputIdx]->pktNull() ) {
gotPkt = true;
pktTs = istreams[inputIdx]->getPktTimeStamp();
break;
}
}
} else {
pktTs = istreams[inputIdx]->getPktTimeStamp();
if ( not istreams[inputIdx]->pktNull() ){
gotPkt = true;
}
}
LOG_TRACE( burst_tagger_cc_base, "PUSHING DATA ITEMS/STREAM_ID " << (*ostream)->nitems() << "/" << (*ostream)->streamID );
if ( _maintainTimeStamp ) {
// set time stamp for output samples based on input time stamp
if ( (*ostream)->nelems() == 0 ) {
#ifdef TEST_TIME_STAMP
LOG_DEBUG( burst_tagger_cc_base, "SEED - TS SRI: xdelta:" << std::setprecision(12) << ostream->sri.xdelta );
LOG_DEBUG( burst_tagger_cc_base, "OSTREAM WRITE: maint:" << _maintainTimeStamp );
LOG_DEBUG( burst_tagger_cc_base, " mode:" << ostream->tstamp.tcmode );
LOG_DEBUG( burst_tagger_cc_base, " status:" << ostream->tstamp.tcstatus );
LOG_DEBUG( burst_tagger_cc_base, " offset:" << ostream->tstamp.toff );
LOG_DEBUG( burst_tagger_cc_base, " whole:" << std::setprecision(10) << ostream->tstamp.twsec );
LOG_DEBUG( burst_tagger_cc_base, "SEED - TS frac:" << std::setprecision(12) << ostream->tstamp.tfsec );
#endif
(*ostream)->setTimeStamp( pktTs, _maintainTimeStamp );
}
// write out samples, and set next time stamp based on xdelta and noutput_items
(*ostream)->write ( noutput_items, eos );
} else {
// use incoming packet's time stamp to forward
if ( gotPkt ) {
#ifdef TEST_TIME_STAMP
LOG_DEBUG( burst_tagger_cc_base, "OSTREAM SRI: items/xdelta:" << noutput_items << "/" << std::setprecision(12) << ostream->sri.xdelta );
LOG_DEBUG( burst_tagger_cc_base, "PKT - TS maint:" << _maintainTimeStamp );
LOG_DEBUG( burst_tagger_cc_base, " mode:" << pktTs.tcmode );
LOG_DEBUG( burst_tagger_cc_base, " status:" << pktTs.tcstatus );
LOG_DEBUG( burst_tagger_cc_base, " offset:" << pktTs.toff );
LOG_DEBUG( burst_tagger_cc_base, " whole:" << std::setprecision(10) << pktTs.twsec );
LOG_DEBUG( burst_tagger_cc_base, "PKT - TS frac:" << std::setprecision(12) << pktTs.tfsec );
#endif
(*ostream)->write( noutput_items, eos, pktTs );
} else {
#ifdef TEST_TIME_STAMP
LOG_DEBUG( burst_tagger_cc_base, "OSTREAM SRI: items/xdelta:" << noutput_items << "/" << std::setprecision(12) << ostream->sri.xdelta );
LOG_DEBUG( burst_tagger_cc_base, "OSTREAM TOD maint:" << _maintainTimeStamp );
LOG_DEBUG( burst_tagger_cc_base, " mode:" << ostream->tstamp.tcmode );
LOG_DEBUG( burst_tagger_cc_base, " status:" << ostream->tstamp.tcstatus );
LOG_DEBUG( burst_tagger_cc_base, " offset:" << ostream->tstamp.toff );
LOG_DEBUG( burst_tagger_cc_base, " whole:" << std::setprecision(10) << ostream->tstamp.twsec );
LOG_DEBUG( burst_tagger_cc_base, "OSTREAM TOD frac:" << std::setprecision(12) << ostream->tstamp.tfsec );
#endif
// use time of day as time stamp
(*ostream)->write( noutput_items, eos, _maintainTimeStamp );
}
}
} // for ostreams
}
return nout;
}<|fim▁end|> | return ((n + multiple - 1) / multiple) * multiple;
}
|
<|file_name|>borrowck-loan-of-static-data-issue-27616.rs<|end_file_name|><|fim▁begin|>use std::mem;<|fim▁hole|> let inner = &mut *b as *mut _;
mem::forget(b);
unsafe { &mut *inner }
}
fn evil(mut s: &'static mut String)
{
// create alias
let alias: &'static mut String = s;
let inner: &str = &alias;
// free value
*s = String::new(); //~ ERROR cannot assign
let _spray = "0wned".to_owned();
// ... and then use it
println!("{}", inner);
}
fn main() {
evil(leak(Box::new("hello".to_owned())));
}<|fim▁end|> |
fn leak<T>(mut b: Box<T>) -> &'static mut T {
// isn't this supposed to be safe? |
<|file_name|>gitrepository.cpp<|end_file_name|><|fim▁begin|>#include "gitrepository.h"
#include <qtcacheexception.h>
#include <git2.h>
inline void git_eval(int err){
if (err) {
const git_error* err = giterr_last();
throw QtC::QtCacheException(err->message);
}
}
template<typename T> class git_auto
{
public:
git_auto(T* object = NULL)
: ref(object)
{}
~git_auto();
operator T*() const { return ref; }
T** operator &() {return &ref; }
private:
T* ref;
};
git_auto<git_repository>::~git_auto() { git_repository_free(this->ref); }
git_auto<git_signature>::~git_auto() { git_signature_free(this->ref); }
git_auto<git_index>::~git_auto() { git_index_free(this->ref); }
git_auto<git_tree>::~git_auto() { git_tree_free(this->ref); }
git_auto<git_reference>::~git_auto() { git_reference_free(this->ref); }
git_auto<git_commit>::~git_auto() { git_commit_free(this->ref); }
git_auto<git_status_list>::~git_auto() { git_status_list_free(this->ref); }
git_auto<git_remote>::~git_auto() {
if (this->ref && git_remote_connected(this->ref)) {
git_remote_disconnect(this->ref);
}
git_remote_free(this->ref);
}
GitRepository::GitRepository(const QString &localDirPath)
: m_local_dir_path(localDirPath),
m_repo(NULL),
m_signature(NULL)
{
git_libgit2_init();
}
GitRepository::~GitRepository()
{
if (NULL != m_signature) {
git_signature_free(m_signature);
}
if (NULL != m_repo) {
git_repository_free(m_repo);
}
git_libgit2_shutdown();
}
bool GitRepository::isOpen()
{
return NULL != m_repo;
}
void GitRepository::setRepository(git_repository* repo)
{
if (NULL != m_repo){
git_repository_free(m_repo);
}
m_repo = repo;
}
git_repository* GitRepository::repository()
{
if (NULL == m_repo){
try{
open();
}catch(...){
try{
init();
}catch(...){
throw;
}
}
}
return m_repo;
}
void GitRepository::open()
{
git_repository* repo = NULL;
git_eval(git_repository_open(&repo, m_local_dir_path.absolutePath().toLocal8Bit()));
setRepository(repo);
}
void GitRepository::init()
{
git_repository* repo = NULL;
git_repository_init_options initopts = GIT_REPOSITORY_INIT_OPTIONS_INIT;
initopts.flags = GIT_REPOSITORY_INIT_MKPATH;
git_eval(git_repository_init_ext(&repo, m_local_dir_path.absolutePath().toLocal8Bit(), &initopts));
git_auto<git_index> index;
git_eval(git_repository_index(&index, repo));
git_oid tree_id;
git_eval(git_index_write_tree(&tree_id, index));
git_auto<git_tree> tree;
git_eval(git_tree_lookup(&tree, repo, &tree_id));
git_oid commit_id;
git_eval(git_commit_create_v(&commit_id, repo, "HEAD", signature(), signature(), NULL, "Initial commit", tree, 0));
setRepository(repo);
}
void GitRepository::branch(const QString& name)
{
git_repository* repo = repository();
git_auto<git_reference> branch;
int err = git_branch_lookup(&branch, repo, name.toLatin1(), GIT_BRANCH_LOCAL);
if (err == GIT_ENOTFOUND){
git_oid parent_id;
git_auto<git_commit> parent;
git_eval(git_reference_name_to_id(&parent_id, repo, "HEAD"));
git_eval(git_commit_lookup(&parent, repo, &parent_id));
git_eval(git_branch_create(&branch, repo, name.toLocal8Bit(), parent, 1));
}else{
git_eval(err);
}
git_eval(git_repository_set_head(repo, git_reference_name(branch)));
git_checkout_options opts = GIT_CHECKOUT_OPTIONS_INIT;
opts.checkout_strategy = GIT_CHECKOUT_FORCE;
git_eval(git_checkout_head(repo, &opts));
}
void GitRepository::add(const QString& filepath)
{
git_repository* repo = repository();
git_auto<git_index> index;
git_eval(git_repository_index(&index, repo));
git_eval(git_index_add_bypath(index, filepath.toLatin1()));
git_index_write(index);
git_index_free(index);
}
void GitRepository::commit(const QString& message)
{
git_repository* repo = repository();
{
git_auto<git_status_list> changes;
git_eval(git_status_list_new(&changes, repo, NULL));
if (git_status_list_entrycount(changes) == 0) { return; }
}
git_auto<git_index> index;
git_eval(git_repository_index(&index, repo));
git_oid tree_id;
git_eval(git_index_write_tree(&tree_id, index));
git_auto<git_tree> tree;
git_eval(git_tree_lookup(&tree, repo, &tree_id));
git_oid parent_id;
git_eval(git_reference_name_to_id(&parent_id, repo, "HEAD"));
git_auto<git_commit> parent;
git_eval(git_commit_lookup(&parent, repo, &parent_id));
git_oid commit_id;
git_signature* sig = signature();
git_eval(git_commit_create_v(&commit_id, repo, "HEAD", sig, sig, NULL, message.toLocal8Bit(), tree, 1, parent));
}
void GitRepository::clone(const QString& url)
{
git_repository* repo = NULL;
git_clone_options opts = GIT_CLONE_OPTIONS_INIT;
opts.checkout_branch = "master";
git_eval(git_clone(&repo, url.toLatin1(), m_local_dir_path.absolutePath().toLocal8Bit(), &opts));
setRepository(repo);
}
void GitRepository::push()
{
git_repository* repo = repository();
const char* remote_name = "origin";
git_auto<git_remote> remote;
git_eval(git_remote_lookup(&remote, repo, remote_name));
git_eval(git_remote_connect(remote, GIT_DIRECTION_PUSH, NULL, NULL));
git_auto<git_reference> head;
git_eval(git_repository_head(&head, repo));
QString refname = QString("+%1:%1").arg(git_reference_name(head));
git_eval(git_remote_add_push(repo, remote_name, refname.toLatin1()));
git_eval(git_remote_upload(remote, NULL, NULL));
}
void GitRepository::fetch()
{
git_repository* repo = repository();
const char* remote_name = "origin";
git_auto<git_remote> remote;
git_eval(git_remote_lookup(&remote, repo, remote_name));
git_eval(git_remote_connect(remote, GIT_DIRECTION_FETCH, NULL, NULL));
<|fim▁hole|> git_eval(git_remote_fetch(remote, NULL, NULL, NULL));
}
void GitRepository::setSignature(const QString& authorName, const QString& authorEmail)
{
git_signature* sig = m_signature;
if (sig) { git_signature_free(sig); }
git_eval(git_signature_now(&sig, authorName.toLocal8Bit(), authorEmail.toLocal8Bit()));
m_signature = sig;
}
git_signature* GitRepository::signature()
{
if (!m_signature) { setSignature(); }
return m_signature;
}<|fim▁end|> | |
<|file_name|>column.mako.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
<%namespace name="helpers" file="/helpers.mako.rs" />
<% data.new_style_struct("Column", inherited=False) %>
// FIXME: This prop should be animatable.
${helpers.predefined_type("column-width",
"length::LengthOrAuto",
"Either::Second(Auto)",
initial_specified_value="Either::Second(Auto)",
parse_method="parse_non_negative_length",
extra_prefixes="moz",
boxed=True,
animation_type="none",
experimental=True,
spec="https://drafts.csswg.org/css-multicol/#propdef-column-width")}
// FIXME: This prop should be animatable.
${helpers.predefined_type("column-count", "IntegerOrAuto",
"Either::Second(Auto)",
parse_method="parse_positive",
initial_specified_value="Either::Second(Auto)",
experimental="True",
animation_type="none",
extra_prefixes="moz",
spec="https://drafts.csswg.org/css-multicol/#propdef-column-count")}
// FIXME: This prop should be animatable.
${helpers.predefined_type("column-gap",
"length::LengthOrNormal",
"Either::Second(Normal)",
parse_method='parse_non_negative_length',
extra_prefixes="moz",
experimental=True,
boxed=True,
animation_type="none",
spec="https://drafts.csswg.org/css-multicol/#propdef-column-gap")}
${helpers.single_keyword("column-fill", "balance auto", extra_prefixes="moz",
products="gecko", animation_type="none",
spec="https://drafts.csswg.org/css-multicol/#propdef-column-fill")}
// https://drafts.csswg.org/css-multicol-1/#propdef-column-rule-width
<%helpers:longhand name="column-rule-width" products="gecko" boxed="True" animation_type="normal" extra_prefixes="moz"
spec="https://drafts.csswg.org/css-multicol/#propdef-column-rule-width">
use app_units::Au;
use std::fmt;
use style_traits::ToCss;
use values::HasViewportPercentage;
use values::specified::BorderWidth;
pub mod computed_value {
use app_units::Au;
pub type T = Au;
}
pub type SpecifiedValue = BorderWidth;
#[inline]
pub fn get_initial_value() -> computed_value::T {
Au::from_px(3) // medium
}
#[inline]
pub fn get_initial_specified_value() -> SpecifiedValue {<|fim▁hole|> }
pub fn parse(context: &ParserContext, input: &mut Parser) -> Result<SpecifiedValue, ()> {
BorderWidth::parse(context, input)
}
</%helpers:longhand>
// https://drafts.csswg.org/css-multicol-1/#crc
${helpers.predefined_type("column-rule-color", "CSSColor",
"::cssparser::Color::CurrentColor",
initial_specified_value="specified::CSSColor::currentcolor()",
products="gecko", animation_type="normal", extra_prefixes="moz",
complex_color=True, need_clone=True,
spec="https://drafts.csswg.org/css-multicol/#propdef-column-rule-color")}
// It's not implemented in servo or gecko yet.
${helpers.single_keyword("column-span", "none all",
products="none", animation_type="none",
spec="https://drafts.csswg.org/css-multicol/#propdef-column-span")}
${helpers.single_keyword("column-rule-style",
"none hidden dotted dashed solid double groove ridge inset outset",
products="gecko", extra_prefixes="moz",
gecko_constant_prefix="NS_STYLE_BORDER_STYLE",
animation_type="none",
spec="https://drafts.csswg.org/css-multicol/#propdef-column-rule-style")}<|fim▁end|> | BorderWidth::Medium |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>import './accounts-config.js';
import './i18n.js';<|fim▁hole|><|fim▁end|> | import './routes.js';
import '../../ui/iso3d/phaser-plugin-isometric.min.js'; |
<|file_name|>Binding.cpp<|end_file_name|><|fim▁begin|>/* This file is part of the KDE project
Copyright 2007 Stefan Nikolaus <[email protected]>
Copyright (C) 2008 Thomas Zander <[email protected]>
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Library General Public
License as published by the Free Software Foundation; either
version 2 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Library General Public License for more details.
You should have received a copy of the GNU Library General Public License
along with this library; see the file COPYING.LIB. If not, write to
the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
Boston, MA 02110-1301, USA.
*/
#include "Binding.h"
#include "BindingModel.h"
#include <QRect>
#include <kdebug.h>
#include "CellStorage.h"
#include "Map.h"
#include "Sheet.h"
#include "Value.h"
using namespace Calligra::Sheets;
class Q_DECL_HIDDEN Binding::Private : public QSharedData
{
public:
BindingModel* model;
Private(Binding *q) : model(new BindingModel(q)) {}
~Private() { delete model; }
};
Binding::Binding()
: d(new Private(this))
{
}
Binding::Binding(const Region& region)
: d(new Private(this))
{
Q_ASSERT(region.isValid());
d->model->setRegion(region);
}
Binding::Binding(const Binding& other)
: d(other.d)
{
}
Binding::~Binding()
{
}
bool Binding::isEmpty() const
{
return d->model->region().isEmpty();
}
QAbstractItemModel* Binding::model() const
{
return d->model;
}
const Calligra::Sheets::Region& Binding::region() const
{
return d->model->region();
}
void Binding::setRegion(const Region& region)
{
d->model->setRegion(region);
}
void Binding::update(const Region& region)
{
QRect rect;
Region changedRegion;
const QPoint offset = d->model->region().firstRange().topLeft();
const QRect range = d->model->region().firstRange();
const Sheet* sheet = d->model->region().firstSheet();
Region::ConstIterator end(region.constEnd());
for (Region::ConstIterator it = region.constBegin(); it != end; ++it) {
if (sheet != (*it)->sheet())
continue;
rect = range & (*it)->rect();
rect.translate(-offset.x(), -offset.y());
if (rect.isValid()) {
d->model->emitDataChanged(rect);
changedRegion.add(rect, (*it)->sheet());
}
}
d->model->emitChanged(changedRegion);
}
void Binding::operator=(const Binding & other)
{
d = other.d;
}
bool Binding::operator==(const Binding& other) const
{
return d == other.d;
}
bool Binding::operator<(const Binding& other) const
{<|fim▁hole|> return d < other.d;
}
QHash<QString, QVector<QRect> > BindingModel::cellRegion() const
{
QHash<QString, QVector<QRect> > answer;
Region::ConstIterator end = m_region.constEnd();
for (Region::ConstIterator it = m_region.constBegin(); it != end; ++it) {
if (!(*it)->isValid()) {
continue;
}
answer[(*it)->name()].append((*it)->rect());
}
return answer;
}
bool BindingModel::setCellRegion(const QString& regionName)
{
Q_ASSERT(m_region.isValid());
Q_ASSERT(m_region.firstSheet());
const Map* const map = m_region.firstSheet()->map();
const Region region = Region(regionName, map);
if (!region.isValid()) {
kDebug() << qPrintable(regionName) << "is not a valid region.";
return false;
}
// Clear the old binding.
Region::ConstIterator end = m_region.constEnd();
for (Region::ConstIterator it = m_region.constBegin(); it != end; ++it) {
if (!(*it)->isValid()) {
continue;
}
// FIXME Stefan: This may also clear other bindings!
(*it)->sheet()->cellStorage()->setBinding(Region((*it)->rect(), (*it)->sheet()), Binding());
}
// Set the new region
m_region = region;
end = m_region.constEnd();
for (Region::ConstIterator it = m_region.constBegin(); it != end; ++it) {
if (!(*it)->isValid()) {
continue;
}
(*it)->sheet()->cellStorage()->setBinding(Region((*it)->rect(), (*it)->sheet()), *m_binding);
}
return true;
}
/////// BindingModel
BindingModel::BindingModel(Binding* binding, QObject *parent)
: QAbstractTableModel(parent)
, m_binding(binding)
{
}
bool BindingModel::isCellRegionValid(const QString& regionName) const
{
Q_CHECK_PTR(m_region.firstSheet());
Q_CHECK_PTR(m_region.firstSheet()->map());
return Region(regionName, m_region.firstSheet()->map()).isValid();
}
void BindingModel::emitChanged(const Region& region)
{
emit changed(region);
}
void BindingModel::emitDataChanged(const QRect& rect)
{
const QPoint tl = rect.topLeft();
const QPoint br = rect.bottomRight();
//kDebug(36005) << "emit QAbstractItemModel::dataChanged" << QString("%1:%2").arg(tl).arg(br);
emit dataChanged(index(tl.y(), tl.x()), index(br.y(), br.x()));
}
QVariant BindingModel::data(const QModelIndex& index, int role) const
{
if ((m_region.isEmpty()) || (role != Qt::EditRole && role != Qt::DisplayRole))
return QVariant();
const QPoint offset = m_region.firstRange().topLeft();
const Sheet* sheet = m_region.firstSheet();
int row = offset.y() + index.row();
int column = offset.x() + index.column();
Value value = sheet->cellStorage()->value(column, row);
switch (role) {
case Qt::DisplayRole: {
// return the in the cell displayed test
Cell c(sheet, column, row);
bool showFormula = false;
return c.displayText(Style(), &value, &showFormula);
}
case Qt::EditRole: {
// return the actual cell value
// KoChart::Value is either:
// - a double (interpreted as a value)
// - a QString (interpreted as a label)
// - a QDateTime (interpreted as a date/time value)
// - Invalid (interpreted as empty)
QVariant variant;
switch (value.type()) {
case Value::Float:
case Value::Integer:
if (value.format() == Value::fmt_DateTime ||
value.format() == Value::fmt_Date ||
value.format() == Value::fmt_Time) {
variant.setValue<QDateTime>(value.asDateTime(sheet->map()->calculationSettings()));
break;
} // fall through
case Value::Boolean:
case Value::Complex:
case Value::Array:
variant.setValue<double>(numToDouble(value.asFloat()));
break;
case Value::String:
case Value::Error:
variant.setValue<QString>(value.asString());
break;
case Value::Empty:
case Value::CellRange:
default:
break;
}
return variant;
}
}
//kDebug() << index.column() <<"," << index.row() <<"," << variant;
return QVariant();
}
const Calligra::Sheets::Region& BindingModel::region() const
{
return m_region;
}
QVariant BindingModel::headerData(int section, Qt::Orientation orientation, int role) const
{
if ((m_region.isEmpty()) || (role != Qt::EditRole && role != Qt::DisplayRole))
return QVariant();
const QPoint offset = m_region.firstRange().topLeft();
const int col = (orientation == Qt::Vertical) ? offset.x() : offset.x() + section;
const int row = (orientation == Qt::Vertical) ? offset.y() + section : offset.y();
const Sheet* sheet = m_region.firstSheet();
const Value value = sheet->cellStorage()->value(col, row);
return value.asVariant();
}
int BindingModel::rowCount(const QModelIndex& parent) const
{
Q_UNUSED(parent);
return m_region.isEmpty() ? 0 : m_region.firstRange().height();
}
int BindingModel::columnCount(const QModelIndex& parent) const
{
Q_UNUSED(parent);
return m_region.isEmpty() ? 0 : m_region.firstRange().width();
}
void BindingModel::setRegion(const Region& region)
{
m_region = region;
}<|fim▁end|> | |
<|file_name|>mul_add.rs<|end_file_name|><|fim▁begin|>//! Implements vertical (lane-wise) floating-point `mul_add`.
macro_rules! impl_math_float_mul_add {
([$elem_ty:ident; $elem_count:expr]: $id:ident | $test_tt:tt) => {
impl $id {
/// Fused multiply add: `self * y + z`
#[inline]
pub fn mul_add(self, y: Self, z: Self) -> Self {
use crate::codegen::math::float::mul_add::MulAdd;
MulAdd::mul_add(self, y, z)
}
}
test_if!{
$test_tt:
paste::item! {
pub mod [<$id _math_mul_add>] {
use super::*;
#[cfg_attr(not(target_arch = "wasm32"), test)] #[cfg_attr(target_arch = "wasm32", wasm_bindgen_test)]
fn mul_add() {
let z = $id::splat(0 as $elem_ty);
let o = $id::splat(1 as $elem_ty);
let t = $id::splat(2 as $elem_ty);
let t3 = $id::splat(3 as $elem_ty);
let f = $id::splat(4 as $elem_ty);
assert_eq!(z, z.mul_add(z, z));
assert_eq!(o, o.mul_add(o, z));
assert_eq!(o, o.mul_add(z, o));
assert_eq!(o, z.mul_add(o, o));<|fim▁hole|> assert_eq!(t, t.mul_add(o, z));
assert_eq!(f, t.mul_add(t, z));
assert_eq!(f, t.mul_add(o, t));
assert_eq!(t3, t.mul_add(o, o));
}
}
}
}
};
}<|fim▁end|> |
assert_eq!(t, o.mul_add(o, o));
assert_eq!(t, o.mul_add(t, z)); |
<|file_name|>format.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Copyright 2012-2015 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import signal
import subprocess
import io
import os
import re
import locale
import tempfile
import warnings
from luigi import six
class FileWrapper(object):
"""<|fim▁hole|> def __init__(self, file_object):
self._subpipe = file_object
def __getattr__(self, name):
# forward calls to 'write', 'close' and other methods not defined below
return getattr(self._subpipe, name)
def __enter__(self, *args, **kwargs):
# instead of returning whatever is returned by __enter__ on the subpipe
# this returns self, so whatever custom injected methods are still available
# this might cause problems with custom file_objects, but seems to work
# fine with standard python `file` objects which is the only default use
return self
def __exit__(self, *args, **kwargs):
return self._subpipe.__exit__(*args, **kwargs)
def __iter__(self):
return iter(self._subpipe)
class InputPipeProcessWrapper(object):
def __init__(self, command, input_pipe=None):
"""
Initializes a InputPipeProcessWrapper instance.
:param command: a subprocess.Popen instance with stdin=input_pipe and
stdout=subprocess.PIPE.
Alternatively, just its args argument as a convenience.
"""
self._command = command
self._input_pipe = input_pipe
self._original_input = True
if input_pipe is not None:
try:
input_pipe.fileno()
except AttributeError:
# subprocess require a fileno to work, if not present we copy to disk first
self._original_input = False
f = tempfile.NamedTemporaryFile('wb', prefix='luigi-process_tmp', delete=False)
self._tmp_file = f.name
f.write(input_pipe.read())
input_pipe.close()
f.close()
self._input_pipe = FileWrapper(io.BufferedReader(io.FileIO(self._tmp_file, 'r')))
self._process = command if isinstance(command, subprocess.Popen) else self.create_subprocess(command)
# we want to keep a circular reference to avoid garbage collection
# when the object is used in, e.g., pipe.read()
self._process._selfref = self
def create_subprocess(self, command):
"""
http://www.chiark.greenend.org.uk/ucgi/~cjwatson/blosxom/2009-07-02-python-sigpipe.html
"""
def subprocess_setup():
# Python installs a SIGPIPE handler by default. This is usually not what
# non-Python subprocesses expect.
signal.signal(signal.SIGPIPE, signal.SIG_DFL)
return subprocess.Popen(command,
stdin=self._input_pipe,
stdout=subprocess.PIPE,
preexec_fn=subprocess_setup,
close_fds=True)
def _finish(self):
# Need to close this before input_pipe to get all SIGPIPE messages correctly
self._process.stdout.close()
if not self._original_input and os.path.exists(self._tmp_file):
os.remove(self._tmp_file)
if self._input_pipe is not None:
self._input_pipe.close()
self._process.wait() # deadlock?
if self._process.returncode not in (0, 141, 128 - 141):
# 141 == 128 + 13 == 128 + SIGPIPE - normally processes exit with 128 + {reiceived SIG}
# 128 - 141 == -13 == -SIGPIPE, sometimes python receives -13 for some subprocesses
raise RuntimeError('Error reading from pipe. Subcommand exited with non-zero exit status %s.' % self._process.returncode)
def close(self):
self._finish()
def __del__(self):
self._finish()
def __enter__(self):
return self
def _abort(self):
"""
Call _finish, but eat the exception (if any).
"""
try:
self._finish()
except KeyboardInterrupt:
raise
except BaseException:
pass
def __exit__(self, type, value, traceback):
if type:
self._abort()
else:
self._finish()
def __getattr__(self, name):
if name == '_process':
raise AttributeError(name)
try:
return getattr(self._process.stdout, name)
except AttributeError:
return getattr(self._input_pipe, name)
def __iter__(self):
for line in self._process.stdout:
yield line
self._finish()
def readable(self):
return True
def writable(self):
return False
def seekable(self):
return False
class OutputPipeProcessWrapper(object):
WRITES_BEFORE_FLUSH = 10000
def __init__(self, command, output_pipe=None):
self.closed = False
self._command = command
self._output_pipe = output_pipe
self._process = subprocess.Popen(command,
stdin=subprocess.PIPE,
stdout=output_pipe,
close_fds=True)
self._flushcount = 0
def write(self, *args, **kwargs):
self._process.stdin.write(*args, **kwargs)
self._flushcount += 1
if self._flushcount == self.WRITES_BEFORE_FLUSH:
self._process.stdin.flush()
self._flushcount = 0
def writeLine(self, line):
assert '\n' not in line
self.write(line + '\n')
def _finish(self):
"""
Closes and waits for subprocess to exit.
"""
if self._process.returncode is None:
self._process.stdin.flush()
self._process.stdin.close()
self._process.wait()
self.closed = True
def __del__(self):
if not self.closed:
self.abort()
def __exit__(self, type, value, traceback):
if type is None:
self.close()
else:
self.abort()
def __enter__(self):
return self
def close(self):
self._finish()
if self._process.returncode == 0:
if self._output_pipe is not None:
self._output_pipe.close()
else:
raise RuntimeError('Error when executing command %s' % self._command)
def abort(self):
self._finish()
def __getattr__(self, name):
if name == '_process':
raise AttributeError(name)
try:
return getattr(self._process.stdin, name)
except AttributeError:
return getattr(self._output_pipe, name)
def readable(self):
return False
def writable(self):
return True
def seekable(self):
return False
class BaseWrapper(object):
def __init__(self, stream, *args, **kwargs):
self._stream = stream
try:
super(BaseWrapper, self).__init__(stream, *args, **kwargs)
except TypeError:
pass
def __getattr__(self, name):
if name == '_stream':
raise AttributeError(name)
return getattr(self._stream, name)
def __enter__(self):
self._stream.__enter__()
return self
def __exit__(self, *args):
self._stream.__exit__(*args)
def __iter__(self):
try:
for line in self._stream:
yield line
finally:
self.close()
class NewlineWrapper(BaseWrapper):
def __init__(self, stream, newline=None):
if newline is None:
self.newline = newline
else:
self.newline = newline.encode('ascii')
if self.newline not in (b'', b'\r\n', b'\n', b'\r', None):
raise ValueError("newline need to be one of {b'', b'\r\n', b'\n', b'\r', None}")
super(NewlineWrapper, self).__init__(stream)
def read(self, n=-1):
b = self._stream.read(n)
if self.newline == b'':
return b
if self.newline is None:
newline = b'\n'
return re.sub(b'(\n|\r\n|\r)', newline, b)
def writelines(self, lines):
if self.newline is None or self.newline == '':
newline = os.linesep.encode('ascii')
else:
newline = self.newline
self._stream.writelines(
(re.sub(b'(\n|\r\n|\r)', newline, line) for line in lines)
)
def write(self, b):
if self.newline is None or self.newline == '':
newline = os.linesep.encode('ascii')
else:
newline = self.newline
self._stream.write(re.sub(b'(\n|\r\n|\r)', newline, b))
class MixedUnicodeBytesWrapper(BaseWrapper):
"""
"""
def __init__(self, stream, encoding=None):
if encoding is None:
encoding = locale.getpreferredencoding()
self.encoding = encoding
super(MixedUnicodeBytesWrapper, self).__init__(stream)
def write(self, b):
self._stream.write(self._convert(b))
def writelines(self, lines):
self._stream.writelines((self._convert(line) for line in lines))
def _convert(self, b):
if isinstance(b, six.text_type):
b = b.encode(self.encoding)
warnings.warn('Writing unicode to byte stream', stacklevel=2)
return b
class Format(object):
"""
Interface for format specifications.
"""
@classmethod
def pipe_reader(cls, input_pipe):
raise NotImplementedError()
@classmethod
def pipe_writer(cls, output_pipe):
raise NotImplementedError()
def __rshift__(a, b):
return ChainFormat(a, b)
class ChainFormat(Format):
def __init__(self, *args, **kwargs):
self.args = args
try:
self.input = args[0].input
except AttributeError:
pass
try:
self.output = args[-1].output
except AttributeError:
pass
if not kwargs.get('check_consistency', True):
return
for x in range(len(args) - 1):
try:
if args[x].output != args[x + 1].input:
raise TypeError(
'The format chaining is not valid, %s expect %s'
'but %s provide %s' % (
args[x].__class__.__name__,
args[x].input,
args[x + 1].__class__.__name__,
args[x + 1].output,
)
)
except AttributeError:
pass
def pipe_reader(self, input_pipe):
for x in reversed(self.args):
input_pipe = x.pipe_reader(input_pipe)
return input_pipe
def pipe_writer(self, output_pipe):
for x in reversed(self.args):
output_pipe = x.pipe_writer(output_pipe)
return output_pipe
class TextWrapper(io.TextIOWrapper):
def __exit__(self, *args):
# io.TextIOWrapper close the file on __exit__, let the underlying file decide
if not self.closed and self.writable():
super(TextWrapper, self).flush()
self._stream.__exit__(*args)
def __del__(self, *args):
# io.TextIOWrapper close the file on __del__, let the underlying file decide
if not self.closed and self.writable():
super(TextWrapper, self).flush()
try:
self._stream.__del__(*args)
except AttributeError:
pass
def __init__(self, stream, *args, **kwargs):
self._stream = stream
try:
super(TextWrapper, self).__init__(stream, *args, **kwargs)
except TypeError:
pass
def __getattr__(self, name):
if name == '_stream':
raise AttributeError(name)
return getattr(self._stream, name)
def __enter__(self):
self._stream.__enter__()
return self
class NopFormat(Format):
def pipe_reader(self, input_pipe):
return input_pipe
def pipe_writer(self, output_pipe):
return output_pipe
class WrappedFormat(Format):
def __init__(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs
def pipe_reader(self, input_pipe):
return self.wrapper_cls(input_pipe, *self.args, **self.kwargs)
def pipe_writer(self, output_pipe):
return self.wrapper_cls(output_pipe, *self.args, **self.kwargs)
class TextFormat(WrappedFormat):
input = 'unicode'
output = 'bytes'
wrapper_cls = TextWrapper
class MixedUnicodeBytesFormat(WrappedFormat):
output = 'bytes'
wrapper_cls = MixedUnicodeBytesWrapper
class NewlineFormat(WrappedFormat):
input = 'bytes'
output = 'bytes'
wrapper_cls = NewlineWrapper
class GzipFormat(Format):
input = 'bytes'
output = 'bytes'
def __init__(self, compression_level=None):
self.compression_level = compression_level
def pipe_reader(self, input_pipe):
return InputPipeProcessWrapper(['gunzip'], input_pipe)
def pipe_writer(self, output_pipe):
args = ['gzip']
if self.compression_level is not None:
args.append('-' + str(int(self.compression_level)))
return OutputPipeProcessWrapper(args, output_pipe)
class Bzip2Format(Format):
input = 'bytes'
output = 'bytes'
def pipe_reader(self, input_pipe):
return InputPipeProcessWrapper(['bzcat'], input_pipe)
def pipe_writer(self, output_pipe):
return OutputPipeProcessWrapper(['bzip2'], output_pipe)
Text = TextFormat()
UTF8 = TextFormat(encoding='utf8')
Nop = NopFormat()
SysNewLine = NewlineFormat()
Gzip = GzipFormat()
Bzip2 = Bzip2Format()
MixedUnicodeBytes = MixedUnicodeBytesFormat()
def get_default_format():
if six.PY3:
return Text
elif os.linesep == '\n':
return Nop
else:
return SysNewLine<|fim▁end|> | Wrap `file` in a "real" so stuff can be added to it after creation.
"""
|
<|file_name|>tls.rs<|end_file_name|><|fim▁begin|>#[macro_use] extern crate log;
extern crate env_logger;
extern crate futures;
extern crate lapin_futures as lapin;
extern crate rustls;<|fim▁hole|>extern crate tokio_core;
extern crate tokio_rustls;
extern crate webpki_roots;
use futures::future::Future;
use lapin::client::ConnectionOptions;
use lapin::channel::ConfirmSelectOptions;
use rustls::ClientConfig;
use std::sync::Arc;
use tokio_core::reactor::Core;
use tokio_core::net::TcpStream;
use tokio_rustls::ClientConfigExt;
fn main() {
env_logger::init().unwrap();
let host = "localhost";
let port = 5671;
let username = "guest";
let password = "guest";
let mut config = ClientConfig::new();
config.root_store.add_trust_anchors(&webpki_roots::ROOTS);
let config = Arc::new(config);
let mut core = Core::new().unwrap();
let handle = core.handle();
let raw_stream = std::net::TcpStream::connect((host, port)).unwrap();
core.run(
TcpStream::from_stream(raw_stream, &handle).map(|stream| futures::future::ok(stream)).unwrap().and_then(|stream| {
config.connect_async(host, stream)
}).and_then(|stream| {
lapin::client::Client::connect(stream, &ConnectionOptions {
username: username.to_string(),
password: password.to_string(),
..Default::default()
})
}).and_then(|(client, _)| {
client.create_confirm_channel(ConfirmSelectOptions::default()).and_then(|channel| {
let id = channel.id;
info!("created channel with id: {}", id);
Ok(())
})
})
).unwrap();
}<|fim▁end|> | |
<|file_name|>wmb.comment.js<|end_file_name|><|fim▁begin|>/**************************************************
* Funkcje związane z geolokalizacją GPS
**************************************************/
WMB.Comment = {
/**
* Funkcja dodająca nowy komentarz do zgłoszenia
*
* @method onAddSubmit
*/
onAddSubmit: function(marker_id) {
if (WMB.User.isLoggedIn()) {
if ($('#add-comment-form')[0].checkValidity()) {
var post_data = new FormData();
post_data.append('user_id', parseInt(getCookie('user_id')));
post_data.append('marker_id', parseInt(marker_id));
post_data.append('comment', $('#comment').val());
$.ajax({
url: REST_API_URL + 'comment',
beforeSend: function(request) {
request.setRequestHeader('Authorization', 'Token ' + getCookie('token'));
},
type: 'POST',
cache: false,
processData: false,
contentType: false,
data: post_data,
success: function(s) {
$('#add-comment-modal').modal('hide');
bootbox.alert('Pomyślnie dodaną komentarz.', function() {
location.reload();
});
},
error: function(XMLHttpRequest, textStatus, errorThrown) {
$('#add-comment-modal').modal('hide');
var response = JSON.parse(XMLHttpRequest.responseText);
bootbox.alert(response.message);
}
});
}
} else {
bootbox.alert('Aby móc dodawać komentarze musisz być zalogowany.');
}
},
onEditClick: function(id) {
// Pobranie danych o komentarzu
$.ajax({
url: REST_API_URL + 'comment/id/' + id,
type: 'GET',
success: function(data) {
// Wstawienie danych do formularza
$('#comment_id').val(id);
$('#comment').val(data.comment);
$('#edit-comment-modal').modal('show');
},
error: function(XMLHttpRequest, textStatus, errorThrown) {
var response = JSON.parse(XMLHttpRequest.responseText);
bootbox.alert(response.message, function() {
window.location.href = WEBSITE_URL;
});
}
});
},
onEditSubmit: function() {
if ($('#edit-comment-form')[0].checkValidity()) {
var comment_data = new FormData(),
comment_id = parseInt($('#comment_id').val()),
comment = $('#comment').val();
comment_data.append('comment', comment);
$.ajax({
url: REST_API_URL + 'comment/id/' + comment_id + '/edit',
beforeSend: function (request) {
request.setRequestHeader('Authorization', 'Token ' + getCookie('token'));
},
type: 'POST',
cache: false,
processData: false,
contentType: false,
data: comment_data,
success: function(s) {
$('#edit-comment-modal').modal('hide');
bootbox.alert('Pomyślnie zedytowano komentarz.', function() {<|fim▁hole|> });
},
error: function(XMLHttpRequest, textStatus, errorThrown) {
var response = JSON.parse(XMLHttpRequest.responseText);
bootbox.alert(response.message);
}
});
}
},
onDeleteClick: function(id) {
$.ajax({
url: REST_API_URL + 'comment/id/' + id,
beforeSend: function (request) {
request.setRequestHeader('Authorization', 'Token ' + getCookie('token'));
},
type: 'DELETE',
cache: false,
processData: false,
contentType: false,
success: function(data) {
$('#edit-comment-modal').modal('hide');
bootbox.alert('Pomyślnie usunięto komentarz.', function() {
location.reload();
});
},
error: function(XMLHttpRequest, textStatus, errorThrown) {
var response = JSON.parse(XMLHttpRequest.responseText);
bootbox.alert(response.message, function() {
window.location.href = WEBSITE_URL;
});
}
});
},
/**
* Funkcja służąca do zmiany statusu komentarza
*
* @method changeStatus
* @param {Integer} id Identyfikator komentarza
* @param {Integer} status_id Identyfikator statusu
*/
changeStatus: function(id, status_id) {
if (WMB.User.isLoggedIn()) {
$.ajax({
url: REST_API_URL + 'comment/id/' + id + '/status/' + status_id,
beforeSend: function (request) {
request.setRequestHeader('Authorization', 'Token ' + getCookie('token'));
},
type: 'PUT',
cache: false,
processData: false,
contentType: false,
success: function(data) {
if (status_id == 0) {
bootbox.alert('Pomyślnie zgłoszono nadużycie w komentarzu.', function() {
location.reload();
});
} else {
bootbox.alert('Pomyślnie zmieniono status komentarza na ' + comment_statuses[status_id] + '.', function() {
location.reload();
});
}
},
error: function(XMLHttpRequest, textStatus, errorThrown) {
var response = JSON.parse(XMLHttpRequest.responseText);
bootbox.alert(response.message, function() {
window.location.href = WEBSITE_URL;
});
}
});
} else {
bootbox.alert('Aby zgłosić nadużycie musisz być zalogowany/a.');
}
},
/**
* Funkcja pobierająca wszystkie komentarze dla danego zgłoszenia
*
* @method getAll
* @param {Integer} marker_id Identyfikator zgłoszenia
*/
getAll: function(marker_id) {
$.ajax({
url: REST_API_URL + 'comment/marker/' + marker_id,
type: 'GET',
cache: false,
dataType: 'json',
success: function(data) {
$.each(data.comments, function(i) {
if (data.comments[i].status_id != 0 && data.comments[i].user_id == parseInt(getCookie('user_id'))) {
$('#marker-comment-list').append(
'<tr>' +
'<td>' + data.comments[i].login + '</td>' +
'<td>' + data.comments[i].date + '</td>' +
'<td>' + data.comments[i].comment + '</td>' +
'<td>brak</td>' +
'</tr>');
} else if (data.comments[i].status_id != 0) {
$('#marker-comment-list').append(
'<tr>' +
'<td>' + data.comments[i].login + '</td>' +
'<td>' + data.comments[i].date + '</td>' +
'<td>' + data.comments[i].comment + '</td>' +
'<td><a href="#" onclick="WMB.Comment.changeStatus(' + data.comments[i].comment_id + ', 0)">Zgłoś nadużycie</a></td>' +
'</tr>');
}
});
}
});
}
}<|fim▁end|> | window.location.href = WEBSITE_URL + 'comments'; |
<|file_name|>line.js<|end_file_name|><|fim▁begin|>uv.LineGraph = function (graphdef, config) {
var self = this;
uv.Graph.call(self, graphdef, config).setDefaults().init();
self.linegroups = {};
self.dataset = uv.util.getDataArray(self.graphdef);
var linegroup, linepath, linefunc, idx, len = self.categories.length,
domainData = self.labels;
self.axes[self.config.graph.orientation === 'Horizontal' ? 'ver' : 'hor'].scale.domain(domainData);
for (idx = 0; idx < len; idx = idx + 1) {
linepath = self.chart.append('g').classed('cg-' + uv.util.formatClassName(self.categories[idx]), true)
.append('g').classed('cge-' + uv.util.formatClassName(self.categories[idx]), true).datum(self.dataset[idx]);
linegroup = {
path: linepath,
func: undefined
};
self['draw' + self.config.graph.orientation + 'Lines'](linegroup, idx);
self.linegroups[self.categories[idx]] = linegroup;
}
self.finalize();
};
uv.LineGraph.prototype = uv.util.inherits(uv.Graph);
uv.LineGraph.prototype.setDefaults = function () {
var self = this;
self.graphdef.stepup = 'normal';
self.config.scale.ordinality = 0;
return this;
};
uv.LineGraph.prototype.drawHorizontalLines = function (linegroup, idx) {
var self = this,
axes = self.axes,
config = self.config,
color = uv.util.getColorBand(self.config, idx);
self.axes.ver.scale.rangePoints([0, self.height()]);
<|fim▁hole|> linegroup.func = d3.svg.line()
.x(function (d) { return axes.hor.scale(d.value); })
.y(function (d) { return axes.ver.scale(d.name) + axes.ver.scale.rangeBand() / 2; })
.interpolate(uv.config.line.interpolation);
linegroup.path.append('path')
.classed('cr-' + uv.util.formatClassName(self.categories[idx]), true)
.attr('d', linegroup.func)
.style('fill', 'none')
.style('stroke', color)
.style('stroke-width', self.config.line.strokewidth)
.style('stroke-opacity', self.config.line.strokeopacity)
.transition()
.duration(3 * self.config.effects.duration)
.delay(2 * idx * self.config.effects.duration)
.style('stroke-opacity', 1)
.call(uv.util.endAll, function (d,i){
d3.select(this.parentNode.parentNode).selectAll('path').on('mouseover', uv.effects.line.mouseover(self, idx));
d3.select(this.parentNode.parentNode).selectAll('path').on('mouseout', uv.effects.line.mouseout(self, idx));
d3.select(this.parentNode.parentNode).selectAll('circle').on('mouseover', uv.effects.line.mouseover(self, idx));
d3.select(this.parentNode.parentNode).selectAll('circle').on('mouseout', uv.effects.line.mouseout(self, idx));
});
if (self.config.line.showcircles) {
linegroup.path.selectAll('circle')
.data(self.dataset[idx])
.enter().append('circle')
.classed('cr-' + uv.util.formatClassName(self.categories[idx]), true)
.attr('cx', linegroup.func.x())
.attr('cy', linegroup.func.y())
.attr('r', self.config.line.circleradius)
.style('fill', color)
.style('fill-opacity', self.config.line.circleopacity)
.style('stroke', '#fff')
.append('svg:title')
.text( function (d, i) { return uv.util.getTooltipText(self, self.categories[idx], self.labels[i], d);});
}
linegroup.path.selectAll('text')
.data(self.dataset[idx])
.enter().append('text')
.attr('x', function (d) { return axes.hor.scale(d.value); })
.attr('y', function(d) { return axes.ver.scale(d.name) + axes.ver.scale.rangeBand()/2; })
.attr('dx', 10)
.attr('dy', '.35em')
.attr('text-anchor', 'start')
.style('opacity', 0)
.classed('cr-' + uv.util.formatClassName(self.categories[idx]), true)
.style('fill', self.config.label.showlabel ? uv.util.getColorBand(self.config, idx) : 'none')
.style('font-family', self.config.line.fontfamily)
.style('font-size', self.config.line.fontsize)
.style('font-weight', self.config.line.fontweight)
.text(function(d) { return uv.util.getLabelValue(self, d); })
.transition()
.duration(3 * self.config.effects.duration)
.delay(2 * idx * self.config.effects.duration)
.style('opacity', 1);
return this;
};
uv.LineGraph.prototype.drawVerticalLines = function (linegroup, idx) {
var self = this,
axes = self.axes,
config = self.config,
color = uv.util.getColorBand(self.config, idx);
self.axes.hor.scale.rangePoints([0, self.width()]);
linegroup.func = d3.svg.line()
.x(function (d) { return axes.hor.scale(d.name) + axes.hor.scale.rangeBand() / 2; })
.y(function (d) { return axes.ver.scale(d.value); })
.interpolate(uv.config.line.interpolation);
linegroup.path.append('path')
.attr('d', linegroup.func)
.classed('cr-' + uv.util.formatClassName(self.categories[idx]), true)
.style('fill', 'none')
.style('stroke', color)
.style('stroke-width', self.config.line.strokewidth)
.style('stroke-opacity', self.config.line.strokeopacity)
.transition()
.duration(self.config.effects.duration)
.delay(2 * idx * self.config.effects.duration)
.style('stroke-opacity', 1)
.call(uv.util.endAll, function (d,i){
d3.select(this.parentNode.parentNode).selectAll('path').on('mouseover', uv.effects.line.mouseover(self, idx));
d3.select(this.parentNode.parentNode).selectAll('path').on('mouseout', uv.effects.line.mouseout(self, idx));
d3.select(this.parentNode.parentNode).selectAll('circle').on('mouseover', uv.effects.line.mouseover(self, idx));
d3.select(this.parentNode.parentNode).selectAll('circle').on('mouseout', uv.effects.line.mouseout(self, idx));
});
if (self.config.line.showcircles) {
linegroup.path.selectAll('circle')
.data(self.dataset[idx])
.enter().append('circle')
.attr('cx', linegroup.func.x())
.attr('cy', linegroup.func.y())
.attr('r', self.config.line.circleradius)
.classed('cr-' + uv.util.formatClassName(self.categories[idx]), true)
.style('fill', color)
.style('fill-opacity', self.config.line.circleopacity)
.style('stroke', '#fff')
.append('svg:title')
.text( function (d, i) { return uv.util.getTooltipText(self, self.categories[idx], self.labels[i], d);});
}
linegroup.path.selectAll('text')
.data(self.dataset[idx])
.enter().append('text')
.attr('x', function (d) { return axes.hor.scale(d.name) + axes.hor.scale.rangeBand() / 2; })
.attr('y', function (d) { return axes.ver.scale(d.value) - 20; })
.attr('dx', 0)
.attr('dy', '.71em')
.attr('text-anchor', 'middle')
.classed('cr-' + uv.util.formatClassName(self.categories[idx]), true)
.style('fill', self.config.label.showlabel ? uv.util.getColorBand(self.config, idx) : 'none')
.style('font-family', self.config.line.fontfamily)
.style('font-size', self.config.line.fontsize)
.style('font-weight', self.config.line.fontweight)
.style('opacity', 0)
.text(function(d) { return uv.util.getLabelValue(self, d); })
.transition()
.duration(3 * self.config.effects.duration)
.delay(2 * idx * self.config.effects.duration)
.style('opacity', 1);
return this;
};<|fim▁end|> | |
<|file_name|>task.go<|end_file_name|><|fim▁begin|>// Copyright 2018 The OpenPitrix Authors. All rights reserved.
// Use of this source code is governed by a Apache license
// that can be found in the LICENSE file.
package models
import (
"time"
"openpitrix.io/openpitrix/pkg/constants"
"openpitrix.io/openpitrix/pkg/db"
"openpitrix.io/openpitrix/pkg/logger"
"openpitrix.io/openpitrix/pkg/pb"
"openpitrix.io/openpitrix/pkg/sender"
"openpitrix.io/openpitrix/pkg/util/idutil"
"openpitrix.io/openpitrix/pkg/util/jsonutil"
"openpitrix.io/openpitrix/pkg/util/pbutil"
)
func NewTaskId() string {
return idutil.GetUuid("t-")
}
type Task struct {
TaskId string
JobId string
TaskAction string
Directive string
Owner string
OwnerPath sender.OwnerPath
Status string
ErrorCode uint32
Executor string
Target string
NodeId string
FailureAllowed bool
CreateTime time.Time
StatusTime time.Time
}
var TaskColumns = db.GetColumnsFromStruct(&Task{})
func NewTask(taskId, jobId, nodeId, target, taskAction, directive string, ownerPath sender.OwnerPath, failureAllowed bool) *Task {
if taskId == "" {
taskId = NewTaskId()
} else if taskId == constants.PlaceHolder {
taskId = ""
}
return &Task{
TaskId: taskId,
JobId: jobId,
NodeId: nodeId,
Target: target,
TaskAction: taskAction,
Directive: directive,
Owner: ownerPath.Owner(),
OwnerPath: ownerPath,
Status: constants.StatusPending,
CreateTime: time.Now(),
StatusTime: time.Now(),
FailureAllowed: failureAllowed,
}
}
func TaskToPb(task *Task) *pb.Task {
pbTask := pb.Task{}
pbTask.TaskId = pbutil.ToProtoString(task.TaskId)
pbTask.JobId = pbutil.ToProtoString(task.JobId)
pbTask.TaskAction = pbutil.ToProtoString(task.TaskAction)
pbTask.Directive = pbutil.ToProtoString(task.Directive)
pbTask.OwnerPath = task.OwnerPath.ToProtoString()
pbTask.Owner = pbutil.ToProtoString(task.Owner)
pbTask.Status = pbutil.ToProtoString(task.Status)
pbTask.ErrorCode = pbutil.ToProtoUInt32(task.ErrorCode)
pbTask.Executor = pbutil.ToProtoString(task.Executor)
pbTask.Target = pbutil.ToProtoString(task.Target)
pbTask.NodeId = pbutil.ToProtoString(task.NodeId)
pbTask.CreateTime = pbutil.ToProtoTimestamp(task.CreateTime)
pbTask.StatusTime = pbutil.ToProtoTimestamp(task.StatusTime)
pbTask.FailureAllowed = pbutil.ToProtoBool(task.FailureAllowed)
return &pbTask
}
func TasksToPbs(tasks []*Task) (pbTasks []*pb.Task) {
for _, task := range tasks {
pbTasks = append(pbTasks, TaskToPb(task))
}
return
}
func PbToTask(pbTask *pb.Task) *Task {
ownerPath := sender.OwnerPath(pbTask.GetOwnerPath().GetValue())
return &Task{
TaskId: pbTask.GetTaskId().GetValue(),
JobId: pbTask.GetJobId().GetValue(),
TaskAction: pbTask.GetTaskAction().GetValue(),
Directive: pbTask.GetDirective().GetValue(),
OwnerPath: ownerPath,
Owner: ownerPath.Owner(),
Status: pbTask.GetStatus().GetValue(),
ErrorCode: pbTask.GetErrorCode().GetValue(),
Executor: pbTask.GetExecutor().GetValue(),
Target: pbTask.GetTarget().GetValue(),
NodeId: pbTask.GetNodeId().GetValue(),
FailureAllowed: pbTask.GetFailureAllowed().GetValue(),
CreateTime: pbutil.GetTime(pbTask.GetCreateTime()),
StatusTime: pbutil.GetTime(pbTask.GetStatusTime()),
}
}
func PbsToTasks(pbTasks []*pb.Task) (tasks []*Task) {
for _, pbTask := range pbTasks {
tasks = append(tasks, PbToTask(pbTask))
}
return<|fim▁hole|>func (t *Task) GetTimeout(defaultTimeout time.Duration) time.Duration {
if t.Directive == "" {
return defaultTimeout
}
directive := make(map[string]interface{})
err := jsonutil.Decode([]byte(t.Directive), &directive)
if err != nil {
logger.Error(nil, "Decode task [%s] directive [%s] failed: %+v.", t.TaskId, t.Directive, err)
return defaultTimeout
}
timeout, exist := directive[constants.TimeoutName]
if !exist {
return defaultTimeout
}
tm := timeout.(float64)
if tm <= 0 {
return defaultTimeout
}
return time.Duration(tm) * time.Second
}<|fim▁end|> | }
|
<|file_name|>tests.py<|end_file_name|><|fim▁begin|>from django.http import HttpResponse
from django.utils import unittest
from django.test.client import Client
from postleware import PostResponseCachebusterMiddleware
class PostResponseMiddleware(unittest.TestCase):
def setUp(self):
self.client = Client()
def test_header_added_when_necessary(self):
# 'Cache-Control: no-cache' is added to POSTs
response = self.client.post('/test1', {'foo':'bar'})
self.assertEqual(response['Cache-Control'], 'no-cache')
# 'Cache-Control' is NOT added to GETs
response = self.client.get('/test1')
self.assertFalse(response.has_header('Cache-Control'))
def test_header_not_added_when_present(self):
middleware = PostResponseCachebusterMiddleware()
test_header_setting = 'test-setting'
raw_response = HttpResponse()
# 'Cache-Control' header isn't modified when present on POSTs
request = MockRequest('POST')
raw_response['Cache-Control'] = test_header_setting
response = middleware.process_response(request, raw_response)
self.assertEqual(response['Cache-Control'], test_header_setting)
# 'Cache-Control' header isn't modified when present on GETs
request = MockRequest('GET')
raw_response['Cache-Control'] = test_header_setting
response = middleware.process_response(request, raw_response)
self.assertEqual(response['Cache-Control'], test_header_setting)
class MockRequest(object):
def __init__(self, method=None):<|fim▁hole|><|fim▁end|> | self.method = method |
<|file_name|>iam.go<|end_file_name|><|fim▁begin|>package iam
import (<|fim▁hole|> // "github.com/quintilesims/layer0/api/config"
"strings"
"github.com/quintilesims/layer0/common/aws/provider"
)
type Provider interface {
UploadServerCertificate(string, string, string, string, *string) (*ServerCertificateMetadata, error)
ListCertificates() ([]*ServerCertificateMetadata, error)
GetUser(username *string) (*User, error)
DeleteServerCertificate(certName string) error
CreateRole(roleName, servicePrincipal string) (*Role, error)
GetRole(roleName string) (*Role, error)
PutRolePolicy(roleName, policy string) error
GetAccountId() (string, error)
DeleteRole(roleName string) error
DeleteRolePolicy(roleName, policyName string) error
ListRolePolicies(roleName string) ([]*string, error)
ListRoles() ([]*string, error)
}
type iamInternal interface {
UploadServerCertificate(input *iam.UploadServerCertificateInput) (output *iam.UploadServerCertificateOutput, err error)
ListServerCertificates(*iam.ListServerCertificatesInput) (*iam.ListServerCertificatesOutput, error)
DeleteServerCertificate(input *iam.DeleteServerCertificateInput) (*iam.DeleteServerCertificateOutput, error)
GetUser(input *iam.GetUserInput) (*iam.GetUserOutput, error)
PutRolePolicy(*iam.PutRolePolicyInput) (*iam.PutRolePolicyOutput, error)
CreateRole(*iam.CreateRoleInput) (*iam.CreateRoleOutput, error)
GetRole(*iam.GetRoleInput) (*iam.GetRoleOutput, error)
DeleteRole(*iam.DeleteRoleInput) (*iam.DeleteRoleOutput, error)
DeleteRolePolicy(*iam.DeleteRolePolicyInput) (*iam.DeleteRolePolicyOutput, error)
ListRolePolicies(*iam.ListRolePoliciesInput) (*iam.ListRolePoliciesOutput, error)
ListRoles(*iam.ListRolesInput) (*iam.ListRolesOutput, error)
}
type IAM struct {
credProvider provider.CredProvider
region string
Connect func() (iamInternal, error)
}
type ServerCertificateMetadata struct {
*iam.ServerCertificateMetadata
}
func NewServerCertificateMetadata(name, arn string) *ServerCertificateMetadata {
return &ServerCertificateMetadata{
&iam.ServerCertificateMetadata{
ServerCertificateName: aws.String(name),
Arn: aws.String(arn),
},
}
}
type User struct {
*iam.User
}
func NewUser() *User {
return &User{&iam.User{}}
}
type Role struct {
*iam.Role
}
func Connect(credProvider provider.CredProvider, region string) (iamInternal, error) {
connection, err := provider.GetIAMConnection(credProvider, region)
if err != nil {
return nil, err
}
return connection, nil
}
func NewIAM(credProvider provider.CredProvider, region string) (Provider, error) {
iam := IAM{
credProvider: credProvider,
region: region,
Connect: func() (iamInternal, error) { return Connect(credProvider, region) },
}
_, err := iam.Connect()
if err != nil {
return nil, err
}
return &iam, nil
}
func (this *IAM) UploadServerCertificate(name, path, body, pk string, optionalChain *string) (*ServerCertificateMetadata, error) {
input := &iam.UploadServerCertificateInput{
ServerCertificateName: aws.String(name),
CertificateBody: aws.String(body),
CertificateChain: optionalChain,
PrivateKey: aws.String(pk),
Path: aws.String(path),
}
connection, err := this.Connect()
if err != nil {
return nil, err
}
output, err := connection.UploadServerCertificate(input)
if err != nil {
return nil, err
}
metadata := output.ServerCertificateMetadata
return &ServerCertificateMetadata{metadata}, nil
}
func (this *IAM) ListCertificates() ([]*ServerCertificateMetadata, error) {
connection, err := this.Connect()
if err != nil {
return nil, err
}
output, err := connection.ListServerCertificates(&iam.ListServerCertificatesInput{})
if err != nil {
return nil, err
}
certs := []*ServerCertificateMetadata{}
for _, metadata := range output.ServerCertificateMetadataList {
certs = append(certs, &ServerCertificateMetadata{metadata})
}
return certs, nil
}
func (this *IAM) GetUser(username *string) (*User, error) {
input := &iam.GetUserInput{
UserName: username,
}
connection, err := this.Connect()
if err != nil {
return nil, err
}
output, err := connection.GetUser(input)
if err != nil {
return nil, err
}
var user *User
if output.User != nil {
user = &User{output.User}
}
return user, nil
}
func (this *IAM) DeleteServerCertificate(certName string) error {
input := &iam.DeleteServerCertificateInput{
ServerCertificateName: aws.String(certName),
}
connection, err := this.Connect()
if err != nil {
return err
}
_, err = connection.DeleteServerCertificate(input)
return err
}
func (this *IAM) CreateRole(roleName, servicePrincipal string) (*Role, error) {
input := &iam.CreateRoleInput{
AssumeRolePolicyDocument: aws.String(fmt.Sprintf(`{"Version":"2008-10-17","Statement":[{"Sid":"","Effect":"Allow","Principal":{"Service":["%s"]},"Action":["sts:AssumeRole"]}]}`, servicePrincipal)),
RoleName: &roleName,
}
connection, err := this.Connect()
if err != nil {
return nil, err
}
out, err := connection.CreateRole(input)
if err != nil {
return nil, err
}
return &Role{out.Role}, nil
}
func (this *IAM) GetRole(roleName string) (*Role, error) {
input := &iam.GetRoleInput{
RoleName: &roleName,
}
connection, err := this.Connect()
if err != nil {
return nil, err
}
output, err := connection.GetRole(input)
if err != nil {
return nil, err
}
return &Role{output.Role}, nil
}
func (this *IAM) DeleteRole(roleName string) error {
input := &iam.DeleteRoleInput{
RoleName: &roleName,
}
connection, err := this.Connect()
if err != nil {
return err
}
_, err = connection.DeleteRole(input)
return err
}
func (this *IAM) DeleteRolePolicy(roleName, policyName string) error {
input := &iam.DeleteRolePolicyInput{
RoleName: &roleName,
PolicyName: &policyName,
}
connection, err := this.Connect()
if err != nil {
return err
}
_, err = connection.DeleteRolePolicy(input)
return err
}
func (this *IAM) ListRolePolicies(roleName string) ([]*string, error) {
input := &iam.ListRolePoliciesInput{
RoleName: &roleName,
}
connection, err := this.Connect()
if err != nil {
return nil, err
}
output, err := connection.ListRolePolicies(input)
if err != nil {
return nil, err
}
return output.PolicyNames, nil
}
func (this *IAM) ListRoles() ([]*string, error) {
input := &iam.ListRolesInput{}
connection, err := this.Connect()
if err != nil {
return nil, err
}
output, err := connection.ListRoles(input)
if err != nil {
return nil, err
}
roles := []*string{}
for _, role := range output.Roles {
roles = append(roles, role.RoleName)
}
return roles, nil
}
func (this *IAM) PutRolePolicy(roleName, policy string) error {
input := &iam.PutRolePolicyInput{
PolicyName: &roleName,
PolicyDocument: &policy,
RoleName: &roleName,
}
connection, err := this.Connect()
if err != nil {
return err
}
_, err = connection.PutRolePolicy(input)
return err
}
func (this *IAM) GetAccountId() (string, error) {
connection, err := this.Connect()
if err != nil {
return "", err
}
out, err := connection.GetUser(nil)
if err != nil {
return "", fmt.Errorf("[ERROR] Failed to get current IAM user: %s", err.Error())
}
// Sample ARN: "arn:aws:iam::123456789012:user/layer0/l0/bootstrap-user-user-ABCDEFGHIJKL"
return strings.Split(*out.User.Arn, ":")[4], nil
}<|fim▁end|> | "fmt"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/service/iam" |
<|file_name|>rest_providers.py<|end_file_name|><|fim▁begin|># Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Classes providing REST data sources for common CourseBuilder items."""
__author__ = 'Mike Gainer ([email protected])'
from common import schema_fields
from common import utils
from models import courses
from models import data_sources
from models import jobs
from models import models
from models import transforms
from tools import verify
class AssessmentsDataSource(data_sources.AbstractSmallRestDataSource):
@classmethod
def get_name(cls):
return 'assessments'
@classmethod
def get_title(cls):
return 'Assessments'
@classmethod
def get_schema(cls, unused_app_context, unused_catch_and_log):
reg = schema_fields.FieldRegistry(
'Analytics',
description='Sets of questions determining student skill')
reg.add_property(schema_fields.SchemaField(
'unit_id', 'Unit ID', 'integer',
description='Key uniquely identifying this particular assessment'))
reg.add_property(schema_fields.SchemaField(
'title', 'Title', 'string',
description='Human-readable title describing the assessment'))
reg.add_property(schema_fields.SchemaField(
'weight', 'Weight', 'number',
'Scalar indicating how the results of this assessment are '
'to be weighted versus the results of peer assessments.'))
reg.add_property(schema_fields.SchemaField(
'html_check_answers', 'Check Answers', 'boolean',
'Whether students may check their answers before submitting '
'the assessment.'))
reg.add_property(schema_fields.SchemaField(
'properties', 'Properties', 'object',
'Set of key/value additional properties, not further defined.'))
return reg.get_json_schema_dict()['properties']
@classmethod
def fetch_values(cls, app_context, *args, **kwargs):
course = courses.Course(handler=None, app_context=app_context)
assessments = course.get_units_of_type(verify.UNIT_TYPE_ASSESSMENT)
ret = []
for assessment in assessments:
ret.append({
'unit_id': assessment.unit_id,
'title': assessment.title,
'weight': assessment.weight,
'html_check_answers': assessment.html_check_answers,
'properties': assessment.properties})
return ret, 0
class UnitsDataSource(data_sources.AbstractSmallRestDataSource):
@classmethod
def get_name(cls):
return 'units'
@classmethod
def get_title(cls):
return 'Units'
@classmethod
def get_schema(cls, unused_app_context, unused_catch_and_log):
reg = schema_fields.FieldRegistry(
'Units',
description='Sets of lessons providing course content')
reg.add_property(schema_fields.SchemaField(
'unit_id', 'Unit ID', 'integer',
description='Key uniquely identifying this particular unit'))
reg.add_property(schema_fields.SchemaField(
'title', 'Title', 'string',
description='Human-readable title describing the unit'))
reg.add_property(schema_fields.SchemaField(
'properties', 'Properties', 'object',
'Set of key/value additional properties, not further defined.'))
return reg.get_json_schema_dict()['properties']
@classmethod
def fetch_values(cls, app_context, *args, **kwargs):
course = courses.Course(handler=None, app_context=app_context)
units = course.get_units_of_type(verify.UNIT_TYPE_UNIT)
ret = []
for unit in units:
ret.append({
'unit_id': unit.unit_id,
'title': unit.title,
'properties': unit.properties,
})
return ret, 0
class LessonsDataSource(data_sources.AbstractSmallRestDataSource):
@classmethod
def get_name(cls):
return 'lessons'
@classmethod
def get_title(cls):
return 'Lessons'
@classmethod
def get_schema(cls, unused_app_context, unused_catch_and_log):
reg = schema_fields.FieldRegistry(
'Lessons',
description='Sets of lessons providing course content')
reg.add_property(schema_fields.SchemaField(
'lesson_id', 'Unit ID', 'integer',
description='Key uniquely identifying which lesson this is'))
reg.add_property(schema_fields.SchemaField(
'unit_id', 'Unit ID', 'integer',
description='Key uniquely identifying unit lesson is in'))
reg.add_property(schema_fields.SchemaField(
'title', 'Title', 'string',
description='Human-readable title describing the unit'))
reg.add_property(schema_fields.SchemaField(
'scored', 'Scored', 'boolean',
'Boolean: Whether questions in this lesson count for scoring.'))
reg.add_property(schema_fields.SchemaField(
'has_activity', 'Has Activity', 'boolean',
'Boolean: does this lesson contain an activity?'))
reg.add_property(schema_fields.SchemaField(
'activity_title', 'Activity Title', 'string',
'Title of the activity (if lesson has an activity)'))
return reg.get_json_schema_dict()['properties']
@classmethod
def fetch_values(cls, app_context, *args, **kwargs):
course = courses.Course(handler=None, app_context=app_context)
lessons = course.get_lessons_for_all_units()
ret = []
for lesson in lessons:
ret.append({
'lesson_id': lesson.unit_id,
'unit_id': lesson.unit_id,
'title': lesson.title,
'scored': lesson.scored,
'has_activity': lesson.has_activity,
'activity_title': lesson.activity_title,
})
return ret, 0
class StudentAssessmentScoresDataSource(
data_sources.AbstractDbTableRestDataSource):
"""Unpack student assessment scores from student record.
NOTE: Filtering/ordering, if present, will be done based on Student
attributes, not scores. (The scores are in an encoded string in a
field which is not indexed anyhow.) The only meaningful field to
index or filter on is enrolled_on.
"""
@classmethod
def get_name(cls):
return 'assessment_scores'
@classmethod
def get_title(cls):
return 'Assessment Scores'
@classmethod
def get_context_class(cls):
return data_sources.DbTableContext
@classmethod
def get_schema(cls, unused_app_context, unused_catch_and_log):
reg = schema_fields.FieldRegistry('Unit',
description='Course sub-components')
reg.add_property(schema_fields.SchemaField(
'user_id', 'User ID', 'string',
description='Student ID encrypted with a session-specific key'))
reg.add_property(schema_fields.SchemaField(
'id', 'Unit ID', 'string',
description='ID of assessment for this score.'))
reg.add_property(schema_fields.SchemaField(
'title', 'Title', 'string',
description='Title of the assessment for this score.'))
reg.add_property(schema_fields.SchemaField(
'score', 'Score', 'integer',
description='Value from 0 to 100 indicating % correct.'))
reg.add_property(schema_fields.SchemaField(
'weight', 'Weight', 'integer',
description='Value from 0 to 100 indicating % correct.'))
reg.add_property(schema_fields.SchemaField(
'completed', 'Completed', 'boolean',
description='Whether the assessment was completed.'))
reg.add_property(schema_fields.SchemaField(
'human_graded', 'Human Graded', 'boolean',
description='Score is from a human (vs. automatic) grading.'))
return reg.get_json_schema_dict()['properties']
@classmethod
def get_entity_class(cls):
return models.Student
@classmethod
def _postprocess_rows(cls, app_context, source_context,
unused_schema, unused_log, unused_page_number,
students):
transform_fn = cls._build_transform_fn(source_context)
with utils.Namespace(app_context.get_namespace_name()):
course = courses.Course(handler=None, app_context=app_context)
students_with_scores = [s for s in students if s.scores]
student_scores = []
for student in students_with_scores:
scores = course.get_all_scores(student)
for score in scores:
if not score['attempted']:
continue
# user_id is PII and must be encoded to obscure its value.
score['user_id'] = transform_fn(student.user_id)
student_scores.append(score)
# Provide a ranking by student, 0 ... #students, low to high.
scored_students = {}
for score in student_scores:
current_score = scored_students.get(score['user_id'], 0)
scored_students[score['user_id']] = current_score + (<|fim▁hole|> enumerate(
sorted(scored_students.items(),
lambda i1, i2: cmp(i1[1], i2[1])))}
# Provide a ranking by assessment, 0 ... #assessments, low to high
scored_assessments = {}
for score in student_scores:
title = score['title']
if title not in scored_assessments:
scored_assessments[title] = []
scored_assessments[title].append(
score['weight'] * score['score'])
for title in scored_assessments:
avg = (sum(scored_assessments[title]) * 1.0 /
len(scored_assessments[title]))
scored_assessments[title] = avg
ranked_assessments = {kv[0]: rank for rank, kv in
enumerate(
sorted(scored_assessments.items(),
lambda i1, i2: cmp(i1[1], i2[1])))}
for score in student_scores:
score['user_rank'] = ranked_students[score['user_id']]
score['assessment_rank'] = ranked_assessments[score['title']]
return student_scores
class StudentsDataSource(data_sources.AbstractDbTableRestDataSource):
@classmethod
def get_entity_class(cls):
return models.Student
@classmethod
def get_name(cls):
return 'students'
@classmethod
def get_title(cls):
return 'Students'
@classmethod
def _postprocess_rows(cls, app_context, source_context, schema,
log, page_number, rows):
ret = super(StudentsDataSource, cls)._postprocess_rows(
app_context, source_context, schema, log, page_number, rows)
# These don't add any value, and do add substantially to data volume.
# (The user_id field is what's valuable for matching to other items
# such as StudentAnswersEntity records.)
for item in ret:
del item['key']
del item['key_by_user_id']
if 'additional_fields' not in item or not item['additional_fields']:
item['additional_fields'] = {}
else:
item['additional_fields'] = (
transforms.nested_lists_as_string_to_dict(
item['additional_fields']))
return ret
class LabelsOnStudentsGenerator(jobs.AbstractCountingMapReduceJob):
@staticmethod
def get_description():
return 'labels on students'
@staticmethod
def entity_class():
return models.Student
@staticmethod
def map(student):
for label_id_str in utils.text_to_list(student.labels):
yield (label_id_str, 1)
class LabelsOnStudentsDataSource(data_sources.AbstractRestDataSource):
@staticmethod
def required_generators():
return [LabelsOnStudentsGenerator]
@classmethod
def get_name(cls):
return 'labels_on_students'
@classmethod
def get_title(cls):
return 'Labels on Students'
@classmethod
def get_default_chunk_size(cls):
return 0 # Meaning we don't need pagination
@classmethod
def get_context_class(cls):
return data_sources.NullContextManager
@classmethod
def get_schema(cls, app_context, log):
reg = schema_fields.FieldRegistry(
'Students By Label',
description='Count of students marked with each label')
reg.add_property(schema_fields.SchemaField(
'title', 'Title', 'string',
description='Name for this label'))
reg.add_property(schema_fields.SchemaField(
'description', 'Description', 'string',
description='Human-readable text describing the label'))
reg.add_property(schema_fields.SchemaField(
'type', 'Type', 'string',
description='Title of label group to which this label belongs.'))
reg.add_property(schema_fields.SchemaField(
'count', 'Count', 'integer',
description='Number of students with this label applied'))
return reg.get_json_schema_dict()['properties']
@classmethod
def fetch_values(cls, app_context, source_context, schema, log, page_number,
labels_on_students_job):
label_counts = jobs.MapReduceJob.get_results(labels_on_students_job)
counts = {int(x[0]): int(x[1]) for x in label_counts}
type_titles = {lt.type: lt.title for lt in models.LabelDTO.LABEL_TYPES}
ret = []
for label in models.LabelDAO.get_all():
ret.append({
'title': label.title,
'description': label.description,
'type': type_titles[label.type],
'count': counts.get(label.id, 0),
})
return ret, 0<|fim▁end|> | score['weight'] * score['score'])
ranked_students = {kv[0]: rank for rank, kv in |
<|file_name|>OrderedMap-test.js<|end_file_name|><|fim▁begin|>/**
* Copyright 2013-2015, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
* @emails react-core
*/
'use strict';
var OrderedMap;
/**
* Shared, reusable objects.
*/
var hasEmptyStringKey = {
'thisKeyIsFine': {data: []},
'': {thisShouldCauseAFailure: []},
'thisKeyIsAlsoFine': {data: []}
};
/**
* Used as map/forEach callback.
*/
var duplicate = function(itm, key, count) {
return {
uniqueID: itm.uniqueID,
val: itm.val + key + count + this.justToTestScope
};
};
// Should not be allowed - because then null/'null' become impossible to
// distinguish. Every key MUST be a string period!
var hasNullAndUndefStringKey = [
{uniqueID: 'undefined', val: 'thisIsUndefined'},
{uniqueID: 'null', val: 'thisIsNull'}
];
var hasNullKey = [
{uniqueID: 'thisKeyIsFine', data: []},
{uniqueID: 'thisKeyIsAlsoFine', data: []},
{uniqueID: null, data: []}
];
var hasObjectKey = [
{uniqueID: 'thisKeyIsFine', data: []},
{uniqueID: 'thisKeyIsAlsoFine', data: []},
{uniqueID: {}, data: []}
];
var hasArrayKey = [
{uniqueID: 'thisKeyIsFine', data: []},
{uniqueID: 'thisKeyIsAlsoFine', data: []},
{uniqueID: [], data: []}
];
// This should be allowed
var hasNullStringKey = [
{uniqueID: 'thisKeyIsFine', data: []},
{uniqueID: 'thisKeyIsAlsoFine', data: []},
{uniqueID: 'null', data: []}
];
var hasUndefinedKey = [
{uniqueID: 'thisKeyIsFine', data: []},
{uniqueID: 'thisKeyIsAlsoFine', data: []},
{uniqueID: undefined, data: []}
];
var hasUndefinedStringKey = [
{uniqueID: 'thisKeyIsFine', data: []},
{uniqueID: 'thisKeyIsAlsoFine', data: []},
{uniqueID: 'undefined', data: []}
];
var hasPositiveNumericKey = [
{uniqueID: 'notANumber', data: []},
{uniqueID: '5', data: []},
{uniqueID: 'notAnotherNumber', data: []}
];
var hasZeroStringKey = [
{uniqueID: 'greg', data: 'grego'},
{uniqueID: '0', data: '0o'},
{uniqueID: 'tom', data: 'tomo'}
];
var hasZeroNumberKey = [
{uniqueID: 'greg', data: 'grego'},
{uniqueID: 0, data: '0o'},
{uniqueID: 'tom', data: 'tomo'}
];
var hasAllNumericStringKeys = [
{uniqueID: '0', name: 'Gregory'},
{uniqueID: '2', name: 'James'},
{uniqueID: '1', name: 'Tom'}
];
var hasAllNumericKeys = [
{uniqueID: 0, name: 'Gregory'},
{uniqueID: 2, name: 'James'},
{uniqueID: 1, name: 'Tom'}
];
var hasAllValidKeys = [
{uniqueID: 'keyOne', value: 'valueOne'},
{uniqueID: 'keyTwo', value: 'valueTwo'}
];
var hasDuplicateKeys = [
{uniqueID: 'keyOne', value: 'valueOne'},
{uniqueID: 'keyTwo', value: 'valueTwo'},
{uniqueID: 'keyOne', value: 'valueThree'}
];
var idEntities = [
{uniqueID: 'greg', name: 'Gregory'},
{uniqueID: 'james', name: 'James'},
{uniqueID: 'tom', name: 'Tom'}
];
var hasEmptyKey = [
{uniqueID: 'greg', name: 'Gregory'},
{uniqueID: '', name: 'James'},
{uniqueID: 'tom', name: 'Tom'}
];
var extractUniqueID = function(entity) {
return entity.uniqueID;
};
describe('OrderedMap', function() {
beforeEach(function() {
require('mock-modules').dumpCache();
OrderedMap = require('OrderedMap');
});
it('should create according to simple object with keys', function() {
OrderedMap.fromArray(hasAllValidKeys, extractUniqueID);
// Iterate over and ensure key order.
});
it('should create from array when providing an identity CB', function() {
expect(function() {
OrderedMap.fromArray(idEntities, extractUniqueID);
}).not.toThrow();
});
it('should throw if constructing from Array without identity CB', function() {
OrderedMap.fromArray(idEntities, extractUniqueID);
// Iterate and ensure key order
});
it('should not throw when fromArray extracts a numeric key', function() {
expect(function() {
OrderedMap.fromArray(hasPositiveNumericKey, extractUniqueID);
}).not.toThrow();
});
it('should throw when any key is the empty string', function() {
expect(function() {
OrderedMap.fromArray(hasEmptyKey, extractUniqueID);
}).toThrow();
});
it('should not throw when a key is the string "undefined" or "null"',
function() {
var om = OrderedMap.fromArray(hasNullAndUndefStringKey, extractUniqueID);
expect(om.length).toBe(2);
expect(om.indexOfKey('undefined')).toBe(0);
expect(om.indexOfKey('null')).toBe(1);
expect(om.keyAfter('undefined')).toBe('null');
expect(om.keyAfter('null')).toBe(undefined);
expect(om.keyBefore('undefined')).toBe(undefined);
expect(om.has('undefined')).toBe(true);
expect(om.has('null')).toBe(true);
expect(om.get('undefined').val).toBe('thisIsUndefined');
expect(om.get('null').val).toBe('thisIsNull');
});
/**
* Numeric keys are cast to strings.
*/
it('should not throw when a key is the number zero', function() {
var om = OrderedMap.fromArray(hasZeroNumberKey, extractUniqueID);
expect(om.length).toBe(3);
expect(om.indexOfKey('0')).toBe(1);
expect(om.indexOfKey(0)).toBe(1);
});
it('should throw when any key is falsey', function() {
expect(function() {
OrderedMap.fromArray(hasEmptyStringKey, extractUniqueID);
}).toThrow();
expect(function() {
OrderedMap.fromArray(hasNullKey, extractUniqueID);
}).toThrow();
expect(function() {
OrderedMap.fromArray(hasUndefinedKey, extractUniqueID);
}).toThrow();
});
it('should not throw on string keys "undefined/null"', function() {
expect(function() {
OrderedMap.fromArray(hasNullStringKey, extractUniqueID);
}).not.toThrow();
expect(function() {
OrderedMap.fromArray(hasUndefinedStringKey, extractUniqueID);
}).not.toThrow();
});
it('should throw on extracting keys that are not strings/nums', function() {
expect(function() {
OrderedMap.fromArray(hasObjectKey, extractUniqueID);
}).toThrow();
expect(function() {
OrderedMap.fromArray(hasArrayKey, extractUniqueID);
}).toThrow();
});
it('should throw if instantiating with duplicate key', function() {
expect(function() {
OrderedMap.fromArray(hasDuplicateKeys, extractUniqueID);
}).toThrow();
});
it('should not throw when a key is the string "0"', function() {
var verifyOM = function(om) {
expect(om.length).toBe(3);
expect(om.indexOfKey('greg')).toBe(0);
expect(om.indexOfKey('0')).toBe(1);
expect(om.indexOfKey(0)).toBe(1); // Casts on writes and reads.
expect(om.indexOfKey('tom')).toBe(2);
expect(om.keyAfter('greg')).toBe('0');
expect(om.keyAfter('0')).toBe('tom');
expect(om.keyAfter(0)).toBe('tom');
expect(om.keyAfter('tom')).toBe(undefined);
expect(om.keyBefore('greg')).toBe(undefined);
expect(om.keyBefore(0)).toBe('greg');
expect(om.keyBefore('0')).toBe('greg');
expect(om.keyBefore('tom')).toBe('0');
expect(om.has('undefined')).toBe(false);
expect(om.has(0)).toBe(true);
expect(om.has('0')).toBe(true);
};
verifyOM(OrderedMap.fromArray(hasZeroStringKey, extractUniqueID));
verifyOM(OrderedMap.fromArray(hasZeroNumberKey, extractUniqueID));
});
it('should throw when getting invalid public key', function() {
var om = OrderedMap.fromArray(hasAllValidKeys, extractUniqueID);
expect(function() {
om.has(undefined);
}).toThrow();
expect(function() {
om.get(undefined);
}).toThrow();
expect(function() {
om.has(null);
}).toThrow();
expect(function() {
om.get(null);
}).toThrow();
expect(function() {
om.has('');
}).toThrow();
expect(function() {
om.get('');
}).toThrow();
});
it('should throw when any key is falsey', function() {
expect(function() {
OrderedMap.fromArray(hasEmptyStringKey, extractUniqueID);
}).toThrow();
expect(function() {
OrderedMap.fromArray(hasNullKey, extractUniqueID);
}).toThrow();
expect(function() {
OrderedMap.fromArray(hasUndefinedKey, extractUniqueID);
}).toThrow();
});
it('should throw when fromArray is passed crazy args', function() {
// Test passing another OrderedMap (when it expects a plain object.)
// This is probably not what you meant to do! We should error.
var validOM = OrderedMap.fromArray(hasAllValidKeys, extractUniqueID);
expect(function() {
OrderedMap.fromArray({uniqueID: 'asdf'}, extractUniqueID);
}).toThrow();
expect(function() {
OrderedMap.fromArray(validOM, extractUniqueID);
}).toThrow();
});
it('should throw when fromArray is passed crazy things', function() {
expect(function() {
OrderedMap.fromArray(null, extractUniqueID);
}).toThrow();
expect(function() {
OrderedMap.fromArray('stringgg', extractUniqueID);
}).toThrow();
expect(function() {
OrderedMap.fromArray(undefined, extractUniqueID);
}).toThrow();
expect(function() {
OrderedMap.fromArray(new Date(), extractUniqueID);
}).toThrow();
expect(function() {
OrderedMap.fromArray({}, extractUniqueID);
}).toThrow();
// Test failure without extractor
expect(function() {
OrderedMap.fromArray(idEntities);
}).toThrow();
expect(function() {
OrderedMap.fromArray(idEntities, extractUniqueID);
}).not.toThrow();
});
// Testing methods that accept other `OrderedMap`s.
it('should throw when from/merge is passed an non-OrderedMap.', function() {
// Test passing an array to construction.
expect(function() {
OrderedMap.from(idEntities, extractUniqueID);
}).toThrow();
// Test passing an array to merge.
expect(function() {
OrderedMap.fromArray(idEntities, extractUniqueID)
.merge(idEntities, extractUniqueID);
}).toThrow();
// Test passing a plain object to merge.
expect(function() {
OrderedMap.fromArray(
idEntities,
extractUniqueID
).merge({blah: 'willFail'});
}).toThrow();
});
it('should throw when accessing key before/after of non-key', function() {
var om = OrderedMap.fromArray([
{uniqueID: 'first'},
{uniqueID: 'two'}], extractUniqueID
);
expect(function() {
om.keyBefore('dog');
}).toThrow();
expect(function() {
om.keyAfter('cat');
}).toThrow();
expect(function() {
om.keyAfter(null);
}).toThrow();
expect(function() {
om.keyAfter(undefined);
}).toThrow();
});
it('should throw passing invalid/not-present-keys to before/after',
function() {
var om = OrderedMap.fromArray([
{uniqueID: 'one', val: 'first'},
{uniqueID: 'two', val: 'second'},
{uniqueID: 'three', val: 'third'},
{uniqueID: 'four', val: 'fourth'}
], extractUniqueID);
expect(function() {
om.keyBefore('');
}).toThrow();
expect(function() {
om.keyBefore(null);
}).toThrow();
expect(function() {
om.keyBefore(undefined);
}).toThrow();
expect(function() {
om.keyBefore('notInTheOrderedMap!');
}).toThrow();
expect(function() {
om.keyAfter('');
}).toThrow();
expect(function() {
om.keyAfter(null);
}).toThrow();
expect(function() {
om.keyAfter(undefined);
}).toThrow();
expect(function() {
om.keyAfter('notInTheOrderedMap!');
}).toThrow();<|fim▁hole|>
expect(function() {
om.nthKeyAfter('', 1);
}).toThrow();
expect(function() {
om.nthKeyAfter(null, 1);
}).toThrow();
expect(function() {
om.nthKeyAfter(undefined, 1);
}).toThrow();
expect(function() {
om.nthKeyAfter('notInTheOrderedMap!', 1);
}).toThrow();
expect(function() {
om.nthKeyBefore('', 1);
}).toThrow();
expect(function() {
om.nthKeyBefore(null, 1);
}).toThrow();
expect(function() {
om.nthKeyBefore(undefined, 1);
}).toThrow();
expect(function() {
om.nthKeyBefore('notInTheOrderedMap!', 1);
}).toThrow();
});
it('should correctly determine the nth key after before', function() {
var om = OrderedMap.fromArray([
{uniqueID: 'one', val: 'first'},
{uniqueID: 'two', val: 'second'},
{uniqueID: 'three', val: 'third'},
{uniqueID: 'four', val: 'fourth'}
], extractUniqueID);
expect(om.keyBefore('one')).toBe(undefined); // first key
expect(om.keyBefore('two')).toBe('one');
expect(om.keyBefore('three')).toBe('two');
expect(om.keyBefore('four')).toBe('three');
expect(om.keyAfter('one')).toBe('two'); // first key
expect(om.keyAfter('two')).toBe('three');
expect(om.keyAfter('three')).toBe('four');
expect(om.keyAfter('four')).toBe(undefined);
expect(om.nthKeyBefore('one', 0)).toBe('one'); // first key
expect(om.nthKeyBefore('one', 1)).toBe(undefined);
expect(om.nthKeyBefore('one', 2)).toBe(undefined);
expect(om.nthKeyBefore('two', 0)).toBe('two');
expect(om.nthKeyBefore('two', 1)).toBe('one');
expect(om.nthKeyBefore('four', 0)).toBe('four');
expect(om.nthKeyBefore('four', 1)).toBe('three');
expect(om.nthKeyAfter('one', 0)).toBe('one');
expect(om.nthKeyAfter('one', 1)).toBe('two');
expect(om.nthKeyAfter('one', 2)).toBe('three');
expect(om.nthKeyAfter('two', 0)).toBe('two');
expect(om.nthKeyAfter('two', 1)).toBe('three');
expect(om.nthKeyAfter('four', 0)).toBe('four');
expect(om.nthKeyAfter('four', 1)).toBe(undefined);
});
it('should compute key indices correctly', function() {
var om = OrderedMap.fromArray([
{uniqueID: 'one', val: 'first'},
{uniqueID: 'two', val: 'second'}
], extractUniqueID);
expect(om.keyAtIndex(0)).toBe('one');
expect(om.keyAtIndex(1)).toBe('two');
expect(om.keyAtIndex(2)).toBe(undefined);
expect(om.indexOfKey('one')).toBe(0);
expect(om.indexOfKey('two')).toBe(1);
expect(om.indexOfKey('nope')).toBe(undefined);
expect(function() {
om.indexOfKey(null);
}).toThrow();
expect(function() {
om.indexOfKey(undefined);
}).toThrow();
expect(function() {
om.indexOfKey(''); // Empty key is not allowed
}).toThrow();
});
it('should compute indices on array that extracted numeric ids', function() {
var som = OrderedMap.fromArray(hasZeroStringKey, extractUniqueID);
expect(som.keyAtIndex(0)).toBe('greg');
expect(som.keyAtIndex(1)).toBe('0');
expect(som.keyAtIndex(2)).toBe('tom');
expect(som.indexOfKey('greg')).toBe(0);
expect(som.indexOfKey('0')).toBe(1);
expect(som.indexOfKey('tom')).toBe(2);
var verifyNumericKeys = function(nom) {
expect(nom.keyAtIndex(0)).toBe('0');
expect(nom.keyAtIndex(1)).toBe('2');
expect(nom.keyAtIndex(2)).toBe('1');
expect(nom.indexOfKey('0')).toBe(0);
expect(nom.indexOfKey('2')).toBe(1); // Prove these are not ordered by
expect(nom.indexOfKey('1')).toBe(2); // their keys
};
var omStringNumberKeys =
OrderedMap.fromArray(hasAllNumericStringKeys, extractUniqueID);
verifyNumericKeys(omStringNumberKeys);
var omNumericKeys =
OrderedMap.fromArray(hasAllNumericKeys, extractUniqueID);
verifyNumericKeys(omNumericKeys);
});
it('should compute indices on mutually exclusive merge', function() {
var om = OrderedMap.fromArray([
{uniqueID: 'one', val: 'first'},
{uniqueID: 'two', val: 'second'}
], extractUniqueID);
var om2 = OrderedMap.fromArray([
{uniqueID: 'three', val: 'third'}
], extractUniqueID);
var res = om.merge(om2);
expect(res.length).toBe(3);
expect(res.keyAtIndex(0)).toBe('one');
expect(res.keyAtIndex(1)).toBe('two');
expect(res.keyAtIndex(2)).toBe('three');
expect(res.keyAtIndex(3)).toBe(undefined);
expect(res.indexOfKey('one')).toBe(0);
expect(res.indexOfKey('two')).toBe(1);
expect(res.indexOfKey('three')).toBe(2);
expect(res.indexOfKey('dog')).toBe(undefined);
expect(res.has('one')).toBe(true);
expect(res.has('two')).toBe(true);
expect(res.has('three')).toBe(true);
expect(res.has('dog')).toBe(false);
expect(res.get('one').val).toBe('first');
expect(res.get('two').val).toBe('second');
expect(res.get('three').val).toBe('third');
expect(res.get('dog')).toBe(undefined);
});
it('should compute indices on intersected merge', function() {
var oneTwo = OrderedMap.fromArray([
{uniqueID: 'one', val: 'first'},
{uniqueID: 'two', val: 'secondOM1'}
], extractUniqueID);
var testOneTwoMergedWithTwoThree = function(res) {
expect(res.length).toBe(3);
expect(res.keyAtIndex(0)).toBe('one');
expect(res.keyAtIndex(1)).toBe('two');
expect(res.keyAtIndex(2)).toBe('three');
expect(res.keyAtIndex(3)).toBe(undefined);
expect(res.indexOfKey('one')).toBe(0);
expect(res.indexOfKey('two')).toBe(1);
expect(res.indexOfKey('three')).toBe(2);
expect(res.indexOfKey('dog')).toBe(undefined);
expect(res.has('one')).toBe(true);
expect(res.has('two')).toBe(true);
expect(res.has('three')).toBe(true);
expect(res.has('dog')).toBe(false);
expect(res.get('one').val).toBe('first');
expect(res.get('two').val).toBe('secondOM2');
expect(res.get('three').val).toBe('third');
expect(res.get('dog')).toBe(undefined);
};
var result =
oneTwo.merge(OrderedMap.fromArray([
{uniqueID: 'two', val: 'secondOM2'},
{uniqueID: 'three', val: 'third'}
], extractUniqueID));
testOneTwoMergedWithTwoThree(result);
// Everything should be exactly as before, since the ordering of `two` was
// already determined by `om`.
result = oneTwo.merge(
OrderedMap.fromArray([
{uniqueID: 'three', val: 'third'},
{uniqueID: 'two', val:'secondOM2'}
], extractUniqueID)
);
testOneTwoMergedWithTwoThree(result);
var testTwoThreeMergedWithOneTwo = function(res) {
expect(res.length).toBe(3);
expect(res.keyAtIndex(0)).toBe('two');
expect(res.keyAtIndex(1)).toBe('three');
expect(res.keyAtIndex(2)).toBe('one');
expect(res.keyAtIndex(3)).toBe(undefined);
expect(res.indexOfKey('two')).toBe(0);
expect(res.indexOfKey('three')).toBe(1);
expect(res.indexOfKey('one')).toBe(2);
expect(res.indexOfKey('cat')).toBe(undefined);
expect(res.has('two')).toBe(true);
expect(res.has('three')).toBe(true);
expect(res.has('one')).toBe(true);
expect(res.has('dog')).toBe(false);
expect(res.get('one').val).toBe('first');
expect(res.get('two').val).toBe('secondOM1');
expect(res.get('three').val).toBe('third');
expect(res.get('dog')).toBe(undefined);
};
result = OrderedMap.fromArray([
{uniqueID: 'two', val: 'secondOM2'},
{uniqueID: 'three', val: 'third'}
], extractUniqueID).merge(oneTwo);
testTwoThreeMergedWithOneTwo(result);
});
it('should merge mutually exclusive keys to the end.', function() {
var om = OrderedMap.fromArray([
{uniqueID: 'one', val: 'first'},
{uniqueID: 'two', val: 'second'}
], extractUniqueID);
var om2 = OrderedMap.fromArray([
{uniqueID: 'three', val: 'first'},
{uniqueID: 'four', val: 'second'}
], extractUniqueID);
var res = om.merge(om2);
expect(res.length).toBe(4);
});
it('should map correctly', function() {
var om = OrderedMap.fromArray([
{uniqueID: 'x', val: 'xx'},
{uniqueID: 'y', val: 'yy'},
{uniqueID: 'z', val: 'zz'}
], extractUniqueID);
var scope = {justToTestScope: 'justTestingScope'};
var verifyResult = function(omResult) {
expect(omResult.length).toBe(3);
expect(omResult.keyAtIndex(0)).toBe('x');
expect(omResult.keyAtIndex(1)).toBe('y');
expect(omResult.keyAtIndex(2)).toBe('z');
expect(omResult.get('x').val).toBe('xxx0justTestingScope');
expect(omResult.get('y').val).toBe('yyy1justTestingScope');
expect(omResult.get('z').val).toBe('zzz2justTestingScope');
};
var resultOM = om.map(function(itm, key, count) {
return {
uniqueID: itm.uniqueID,
val: itm.val + key + count + this.justToTestScope
};
}, scope);
verifyResult(resultOM);
var resArray = [];
om.forEach(function(itm, key, count) {
resArray.push({
uniqueID: itm.uniqueID,
val: itm.val + key + count + this.justToTestScope
});
}, scope);
resultOM = OrderedMap.fromArray(resArray, extractUniqueID);
verifyResult(resultOM);
});
it('should filter correctly', function() {
var om = OrderedMap.fromArray([
{uniqueID: 'x', val: 'xx'},
{uniqueID: 'y', val: 'yy'},
{uniqueID: 'z', val: 'zz'}
], extractUniqueID);
var scope = {justToTestScope: 'justTestingScope'};
var filteringCallback = function(item, key, indexInOriginal) {
expect(this).toBe(scope);
expect(key === 'x' || key === 'y' || key === 'z').toBe(true);
if (key === 'x') {
expect(item.val).toBe('xx');
expect(indexInOriginal).toBe(0);
return false;
} else if (key === 'y') {
expect(item.val).toBe('yy');
expect(indexInOriginal).toBe(1);
return true;
} else {
expect(item.val).toBe('zz');
expect(indexInOriginal).toBe(2);
return true;
}
};
var verifyResult = function(omResult) {
expect(omResult.length).toBe(2);
expect(omResult.keyAtIndex(0)).toBe('y');
expect(omResult.keyAtIndex(1)).toBe('z');
expect(omResult.has('x')).toBe(false);
expect(omResult.has('z')).toBe(true);
expect(omResult.get('z').val).toBe('zz');
expect(omResult.has('y')).toBe(true);
expect(omResult.get('y').val).toBe('yy');
};
var resultOM = om.filter(filteringCallback, scope);
verifyResult(resultOM);
});
it('should throw when providing invalid ranges to ranging', function() {
var om = OrderedMap.fromArray([
{uniqueID: 'x', val: 'xx'},
{uniqueID: 'y', val: 'yy'},
{uniqueID: 'z', val: 'zz'}
], extractUniqueID);
var scope = {justToTestScope: 'justTestingScope'};
expect(function() {
om.mapRange(duplicate, 0, 3, scope);
}).not.toThrow();
expect(function() {
om.filterRange(duplicate, 0, 3, scope);
}).not.toThrow();
expect(function() {
om.forEachRange(duplicate, 0, 3, scope);
}).not.toThrow();
expect(function() {
om.mapKeyRange(duplicate, 'x', 3, scope);
}).toThrow(
'Invariant Violation: mapKeyRange must be given keys ' +
'that are present.'
);
expect(function() {
om.forEachKeyRange(duplicate, 'x', 3, scope);
}).toThrow(
'Invariant Violation: forEachKeyRange must be given keys ' +
'that are present.'
);
expect(function() {
om.mapRange(duplicate, 0, 4, scope);
}).toThrow();
expect(function() {
om.filterRange(duplicate, 0, 4, scope);
}).toThrow();
expect(function() {
om.forEachRange(duplicate, 0, 4, scope);
}).toThrow();
expect(function() {
om.mapKeyRange(duplicate, 'x', null, scope);
}).toThrow();
expect(function() {
om.forEachKeyRange(duplicate, 'x', null, scope);
}).toThrow();
expect(function() {
om.mapRange(duplicate, -1, 1, scope);
}).toThrow();
expect(function() {
om.filterRange(duplicate, -1, 1, scope);
}).toThrow();
expect(function() {
om.forEachRange(duplicate, -1, 1, scope);
}).toThrow();
expect(function() {
om.mapKeyRange(duplicate, null, 'y', scope);
}).toThrow();
expect(function() {
om.forEachKeyRange(duplicate, null, 'y', scope);
}).toThrow();
expect(function() {
om.mapRange(duplicate, 0, 0, scope);
}).not.toThrow();
expect(function() {
om.filterRange(duplicate, 0, 0, scope);
}).not.toThrow();
expect(function() {
om.forEachRange(duplicate, 0, 0, scope);
}).not.toThrow();
expect(function() {
om.mapKeyRange(duplicate, 'x', 'x', scope);
}).not.toThrow();
expect(function() {
om.forEachKeyRange(duplicate, 'x', 'x', scope);
}).not.toThrow();
expect(function() {
om.mapRange(duplicate, 0, -1, scope);
}).toThrow();
expect(function() {
om.filterRange(duplicate, 0, -1, scope);
}).toThrow();
expect(function() {
om.forEachRange(duplicate, 0, -1, scope);
}).toThrow();
expect(function() {
om.mapKeyRange(duplicate, 'x', null, scope);
}).toThrow();
expect(function() {
om.forEachKeyRange(duplicate, 'x', null, scope);
}).toThrow();
expect(function() {
om.mapRange(duplicate, 2, 1, scope);
}).not.toThrow();
expect(function() {
om.filterRange(duplicate, 2, 1, scope);
}).not.toThrow();
expect(function() {
om.forEachRange(duplicate, 2, 1, scope);
}).not.toThrow();
expect(function() {
om.mapKeyRange(duplicate, 'z', 'z', scope);
}).not.toThrow();
expect(function() {
om.forEachKeyRange(duplicate, 'z', 'z', scope);
}).not.toThrow();
expect(function() {
om.mapRange(duplicate, 2, 2, scope);
}).toThrow();
expect(function() {
om.filterRange(duplicate, 2, 2, scope);
}).toThrow();
expect(function() {
om.forEachRange(duplicate, 2, 2, scope);
}).toThrow();
expect(function() {
om.mapKeyRange(duplicate, 'z', null, scope);
}).toThrow();
expect(function() {
om.forEachKeyRange(duplicate, 'z', null, scope);
}).toThrow();
// Provide keys in reverse order - should throw.
expect(function() {
om.mapKeyRange(duplicate, 'y', 'x', scope);
}).toThrow();
expect(function() {
om.forEachKeyRange(duplicate, 'y', 'x', scope);
}).toThrow();
});
// TEST length zero map, or keyrange start===end
it('should map range correctly', function() {
var om = OrderedMap.fromArray([
{uniqueID: 'x', val: 'xx'},
{uniqueID: 'y', val: 'yy'},
{uniqueID: 'z', val: 'zz'}
], extractUniqueID);
var scope = {justToTestScope: 'justTestingScope'};
var verifyThreeItems = function(omResult) {
expect(omResult.length).toBe(3);
expect(omResult.keyAtIndex(0)).toBe('x');
expect(omResult.keyAtIndex(1)).toBe('y');
expect(omResult.keyAtIndex(2)).toBe('z');
expect(omResult.get('x').val).toBe('xxx0justTestingScope');
expect(omResult.get('y').val).toBe('yyy1justTestingScope');
expect(omResult.get('z').val).toBe('zzz2justTestingScope');
};
var verifyFirstTwoItems = function(omResult) {
expect(omResult.length).toBe(2);
expect(omResult.keyAtIndex(0)).toBe('x');
expect(omResult.keyAtIndex(1)).toBe('y');
expect(omResult.get('x').val).toBe('xxx0justTestingScope');
expect(omResult.get('y').val).toBe('yyy1justTestingScope');
};
var verifyLastTwoItems = function(omResult) {
expect(omResult.length).toBe(2);
expect(omResult.keyAtIndex(0)).toBe('y');
expect(omResult.keyAtIndex(1)).toBe('z');
expect(omResult.get('y').val).toBe('yyy1justTestingScope');
expect(omResult.get('z').val).toBe('zzz2justTestingScope');
};
var verifyMiddleItem = function(omResult) {
expect(omResult.length).toBe(1);
expect(omResult.keyAtIndex(0)).toBe('y');
expect(omResult.get('y').val).toBe('yyy1justTestingScope');
};
var verifyEmpty = function(omResult) {
expect(omResult.length).toBe(0);
};
var omResultThree = om.mapRange(duplicate, 0, 3, scope);
verifyThreeItems(omResultThree);
var resArray = [];
var pushToResArray = function(itm, key, count) {
resArray.push({
uniqueID: itm.uniqueID,
val: itm.val + key + count + this.justToTestScope
});
};
om.forEachRange(pushToResArray, 0, 3, scope);
omResultThree = OrderedMap.fromArray(resArray, extractUniqueID);
verifyThreeItems(omResultThree);
var omResultFirstTwo = om.mapRange(duplicate, 0, 2, scope);
verifyFirstTwoItems(omResultFirstTwo);
resArray = [];
om.forEachRange(pushToResArray, 0, 2, scope);
omResultFirstTwo = OrderedMap.fromArray(resArray, extractUniqueID);
verifyFirstTwoItems(omResultFirstTwo);
var omResultLastTwo = om.mapRange(duplicate, 1, 2, scope);
verifyLastTwoItems(omResultLastTwo);
resArray = [];
om.forEachRange(pushToResArray, 1, 2, scope);
omResultLastTwo = OrderedMap.fromArray(resArray, extractUniqueID);
verifyLastTwoItems(omResultLastTwo);
var omResultMiddle = om.mapRange(duplicate, 1, 1, scope);
verifyMiddleItem(omResultMiddle);
resArray = [];
om.forEachRange(pushToResArray, 1, 1, scope);
omResultMiddle = OrderedMap.fromArray(resArray, extractUniqueID);
verifyMiddleItem(omResultMiddle);
var omResultNone = om.mapRange(duplicate, 1, 0, scope);
verifyEmpty(omResultNone);
});
it('should extract the original array correctly', function() {
var sourceArray = [
{uniqueID: 'x', val: 'xx'},
{uniqueID: 'y', val: 'yy'},
{uniqueID: 'z', val: 'zz'}
];
var om = OrderedMap.fromArray(sourceArray, extractUniqueID);
expect(om.toArray()).toEqual(sourceArray);
});
});<|fim▁end|> | |
<|file_name|>luiza.py<|end_file_name|><|fim▁begin|>import asyncio
import sys
import config
import sender
import receiver
print(sys.argv)<|fim▁hole|>
async def receiveMessageFromSerial():
return "Message"
def help():
print('Luiza 1.0 - ([email protected])')
print('Usage: python3 app.py [Options][Message][source][dest]')
print('')
print('SENDING MESSAGE')
print(' You will send a message from source to dest. 3 containing the text "Sending Message from Luiza"')
print(' python3 app.py --send "Sending Message from Luiza" 1 3')
print('RECEIVING MESSAGE')
print(' You will receive a message using the address 3')
print(' python3 app.py --read 3')
quit()
if len(sys.argv) == 1:
help()
if(sys.argv[1] == '--send'):
if len(sys.argv) < 3:
print('ERR: An error occurred. The command was Invalid.')
help()
else:
if(len(sys.argv[2]) < 10):
print('ERR: Message size must be less than 10.')
quit()
sender.message(sys.argv[3], sys.argv[4], sys.argv[2])
if(sys.argv[1] == '--read'):
if len(sys.argv) < 3:
print('ERR: An error occurred. The command was Invalid. Dest to read not informed !!')
help()
loop = asyncio.get_event_loop()
loop.run_until_complete(receiver.start())
loop.close()<|fim▁end|> | |
<|file_name|>script_msg.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use canvas_traits::CanvasMsg;
use euclid::point::Point2D;
use euclid::size::Size2D;
use ipc_channel::ipc::IpcSender;
use msg::constellation_msg::{AnimationState, DocumentState, IframeLoadInfo, NavigationDirection};<|fim▁hole|>use style_traits::viewport::ViewportConstraints;
use url::Url;
use util::cursor::Cursor;
/// Messages from the layout to the constellation.
#[derive(Deserialize, Serialize)]
pub enum LayoutMsg {
/// Indicates whether this pipeline is currently running animations.
ChangeRunningAnimationsState(PipelineId, AnimationState),
/// Layout thread failure.
Failure(Failure),
/// Requests that the constellation inform the compositor of the a cursor change.
SetCursor(Cursor),
/// Notifies the constellation that the viewport has been constrained in some manner
ViewportConstrained(PipelineId, ViewportConstraints),
}
/// Messages from the script to the constellation.
#[derive(Deserialize, Serialize)]
pub enum ScriptMsg {
/// Indicates whether this pipeline is currently running animations.
ChangeRunningAnimationsState(PipelineId, AnimationState),
/// Requests that a new 2D canvas thread be created. (This is done in the constellation because
/// 2D canvases may use the GPU and we don't want to give untrusted content access to the GPU.)
CreateCanvasPaintThread(Size2D<i32>, IpcSender<(IpcSender<CanvasMsg>, usize)>),
/// Requests that a new WebGL thread be created. (This is done in the constellation because
/// WebGL uses the GPU and we don't want to give untrusted content access to the GPU.)
CreateWebGLPaintThread(Size2D<i32>,
GLContextAttributes,
IpcSender<Result<(IpcSender<CanvasMsg>, usize), String>>),
/// Dispatched after the DOM load event has fired on a document
/// Causes a `load` event to be dispatched to any enclosing frame context element
/// for the given pipeline.
DOMLoad(PipelineId),
/// Script thread failure.
Failure(Failure),
/// Notifies the constellation that this frame has received focus.
Focus(PipelineId),
/// Re-send a mouse button event that was sent to the parent window.
ForwardMouseButtonEvent(PipelineId, MouseEventType, MouseButton, Point2D<f32>),
/// Re-send a mouse move event that was sent to the parent window.
ForwardMouseMoveEvent(PipelineId, Point2D<f32>),
/// Requests that the constellation retrieve the current contents of the clipboard
GetClipboardContents(IpcSender<String>),
/// <head> tag finished parsing
HeadParsed,
/// All pending loads are complete.
LoadComplete(PipelineId),
/// A new load has been requested.
LoadUrl(PipelineId, LoadData),
/// Dispatch a mozbrowser event to a given iframe. Only available in experimental mode.
MozBrowserEvent(PipelineId, SubpageId, MozBrowserEvent),
/// HTMLIFrameElement Forward or Back navigation.
Navigate(Option<(PipelineId, SubpageId)>, NavigationDirection),
/// Favicon detected
NewFavicon(Url),
/// Status message to be displayed in the chrome, eg. a link URL on mouseover.
NodeStatus(Option<String>),
/// Notification that this iframe should be removed.
RemoveIFrame(PipelineId),
/// A load has been requested in an IFrame.
ScriptLoadedURLInIFrame(IframeLoadInfo),
/// Requests that the constellation set the contents of the clipboard
SetClipboardContents(String),
/// Mark a new document as active
ActivateDocument(PipelineId),
/// Set the document state for a pipeline (used by screenshot / reftests)
SetDocumentState(PipelineId, DocumentState),
/// Update the pipeline Url, which can change after redirections.
SetFinalUrl(PipelineId, Url),
}<|fim▁end|> | use msg::constellation_msg::{Failure, MozBrowserEvent, PipelineId};
use msg::constellation_msg::{LoadData, SubpageId};
use msg::constellation_msg::{MouseButton, MouseEventType};
use offscreen_gl_context::GLContextAttributes; |
<|file_name|>editor.js<|end_file_name|><|fim▁begin|>(function(customer_id) {
tinymce.create('tinymce.plugins.ItStream_AttachToPost', {
customer_id: customer_id,
init : function(editor, plugin_url) {
editor.addButton('player_scheduling', {
title : 'Embed ItStream Player',
cmd : 'itm_scheduling',
image : plugin_url + '/scheduling.gif'
});
// Register a new TinyMCE command
editor.addCommand('itm_scheduling', this.render_attach_to_post_interface, {
editor: editor,
plugin: editor.plugins.ItStream_AttachToPost
});
},
createControl : function(n, cm) {
return null;
},
getInfo : function() {
return {
longname : 'ItStream Scheduling Button',
author : 'It-Marketing',
authorurl : 'http://www.itmarketingsrl.it/',
infourl : 'http://wiki.moxiecode.com/index.php/TinyMCE:Plugins/example',
version : "0.1"
};
},
wm_close_event: function() {
// Restore scrolling for the main content window when the attach to post interface is closed
jQuery('html,body').css('overflow', 'auto');
tinyMCE.activeEditor.selection.select(tinyMCE.activeEditor.dom.select('p')[0]);
tinyMCE.activeEditor.selection.collapse(0);
},
render_attach_to_post_interface: function() {
var attach_to_post_url = itstream_ajax.attach_to_post;
if (typeof(customer_id) != 'undefined') {
attach_to_post_url += "?id=" + itstream_ajax.customer_id;
}
var win = window;
while (win.parent != null && win.parent != win) {
win = win.parent;
}
win = jQuery(win);
var winWidth = win.width();
<|fim▁hole|> var popupWidth = 680;
var popupHeight = 560;
var minWidth = 320;
var minHeight = 200;
var maxWidth = winWidth - (winWidth * 0.05);
var maxHeight = winHeight - (winHeight * 0.05);
if (maxWidth < minWidth) { maxWidth = winWidth - 10; }
if (maxHeight < minHeight) { maxHeight = winHeight - 10; }
if (popupWidth > maxWidth) { popupWidth = maxWidth; }
if (popupHeight > maxHeight) { popupHeight = maxHeight; }
// Open a window
this.editor.windowManager.open({
url: attach_to_post_url,
id: 'its_attach_to_post_dialog',
width: popupWidth,
height: popupHeight,
title: 'ItStream - Embed Player',
inline: 1
/*buttons: [{
text: 'Close',
onclick: 'close'
}]*/
});
// Ensure that the window cannot be scrolled - XXX actually allow scrolling in the main window and disable it for the inner-windows/frames/elements as to create a single scrollbar
jQuery('html,body').css('overflow', 'hidden');
jQuery('#its_attach_to_post_dialog_ifr').css('overflow-y', 'auto');
jQuery('#its_attach_to_post_dialog_ifr').css('overflow-x', 'hidden');
}
});
// Register plugin
tinymce.PluginManager.add( 'itstream', tinymce.plugins.ItStream_AttachToPost );
})(itstream_ajax.customer_id);<|fim▁end|> | var winHeight = win.height();
|
<|file_name|>color.js<|end_file_name|><|fim▁begin|>/**
Copyright (c) 2007 Bill Orcutt (http://lilyapp.org, http://publicbeta.cx)
Permission is hereby granted, free of charge, to any person obtaining<|fim▁hole|>distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
/**
* Construct a new color object
* @class
* @constructor
* @extends LilyObjectBase
*/
function $color(arg)
{
var thisPtr=this;
var websafe=arg||false;
this.outlet1 = new this.outletClass("outlet1",this,"random color in hexadecimal");
this.inlet1=new this.inletClass("inlet1",this,"\"bang\" outputs random color");
// getRandomColor()
// Returns a random hex color. Passing true for safe returns a web safe color
//code hijacked from http://www.scottandrew.com/js/js_util.js
function getRandomColor(safe)
{
var vals,r,n;
if (safe)
{
v = "0369CF";
n = 3;
} else
{
v = "0123456789ABCDEF";
n = 6;
}
var c = "#";
for (var i=0;i<n;i++)
{
var ch = v.charAt(Math.round(Math.random() * (v.length-1)));
c += (safe)?ch+ch:ch;
}
return c;
}
function RGBtoHex(R,G,B) {
return toHex(R)+toHex(G)+toHex(B);
}
function toHex(N) {
if (N==null) return "00";
N=parseInt(N); if (N==0 || isNaN(N)) return "00";
N=Math.max(0,N); N=Math.min(N,255); N=Math.round(N);
return "0123456789ABCDEF".charAt((N-N%16)/16) + "0123456789ABCDEF".charAt(N%16);
}
function HSLtoRGB (h,s,l) {
if (s == 0) return [l,l,l] // achromatic
h=h*360/255;s/=255;l/=255;
if (l <= 0.5) rm2 = l + l * s;
else rm2 = l + s - l * s;
rm1 = 2.0 * l - rm2;
return [toRGB1(rm1, rm2, h + 120.0),toRGB1(rm1, rm2, h),toRGB1(rm1, rm2, h - 120.0)];
}
function toRGB1(rm1,rm2,rh) {
if (rh > 360.0) rh -= 360.0;
else if (rh < 0.0) rh += 360.0;
if (rh < 60.0) rm1 = rm1 + (rm2 - rm1) * rh / 60.0;
else if (rh < 180.0) rm1 = rm2;
else if (rh < 240.0) rm1 = rm1 + (rm2 - rm1) * (240.0 - rh) / 60.0;
return Math.round(rm1 * 255);
}
//output random color
this.inlet1["random"]=function() {
thisPtr.outlet1.doOutlet(getRandomColor(websafe));
}
//convert RGB to hex
this.inlet1["RGBtoHEX"]=function(rgb) {
var tmp = rgb.split(" ");
thisPtr.outlet1.doOutlet("#"+RGBtoHex(tmp[0],tmp[1],tmp[2]));
}
//convert HSL to hex
this.inlet1["HSLtoHEX"]=function(hsl) {
var tmp = hsl.split(" ");
var rgb = HSLtoRGB(tmp[0],tmp[1],tmp[2]);
thisPtr.outlet1.doOutlet("#"+RGBtoHex(rgb[0],rgb[1],rgb[2]));
}
return this;
}
var $colorMetaData = {
textName:"color",
htmlName:"color",
objectCategory:"Math",
objectSummary:"Various color related utilities",
objectArguments:"websafe colors only [false]"
}<|fim▁end|> | a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish, |
<|file_name|>MathTools.H<|end_file_name|><|fim▁begin|>#ifndef ATOOLS_Math_MathTools_H
#define ATOOLS_Math_MathTools_H
/* Declarations for discrete functions */
#ifdef __GNUC__
// GNU C++ Compiler
#include <cmath>
#include <cstdlib>
/*
if __GNUC__ == 3 && __GNUC_MINOR__ == 0.
if defined __GNUC__ && defined __cplusplus && __GNUC_MINOR__ >= 8
if !defined __GNUC__ || __GNUC__ < 2 || __GNUC_MINOR__ < 7
#define GCC_VERSION (__GNUC__ * 10000 \
+ __GNUC_MINOR__ * 100 \
+ __GNUC_PATCHLEVEL__)
...
Test for GCC > 3.2.0
#if GCC_VERSION > 30200
*/
#endif
#if defined(__sgi) && !defined(__GNUC__)
// SGI IRIX C++ Compiler, complex but not double methods need "std::", e.g. Abs() exp()
#include <iostream>
#include <math.h>
#endif
#include "ATOOLS/Math/MyComplex.H"
namespace ATOOLS {
template <class Type> const Type &Min(const Type &a,const Type &b)
{ return a<b?a:b; }
template <class Type> const Type &Max(const Type &a,const Type &b)
{ return a>b?a:b; }
template <class Type> Type &Min(Type &a,Type &b)
{ return a<b?a:b; }
template <class Type> Type &Max(Type &a,Type &b)
{ return a>b?a:b; }
inline double Accu() {return 1.e-12;}
inline double SqrtAccu() {return 1.e-6;}
inline int Sign(const int& a) { return a<0?-1:1; }
inline double Sign(const double& a) { return a<0.0?-1.0:1.0; }
inline double Theta(const double &a) { return a<0.0?0.0:1.0; }
inline int iabs(const int& a) { return a<0?-a:a; }
inline double dabs(const double& a) { return a<0.0?-a:a; }
template <typename Scalar>
inline Scalar sqr(const Scalar &x) { return x*x; }
template <typename Scalar> inline std::complex<Scalar>
csqr(const std::complex<Scalar> &x) { return x*x; }
inline int IsZero(const double &a,const double &crit)
{ return dabs(a)<crit?1:0; }
inline int IsZero(const Complex &a,const double &crit)
{ return std::abs(a)<crit?1:0; }
inline int IsEqual(const double &a,const double &b)
{
if (a==0. && b==0.) return 1;
return (dabs(a-b)/(dabs(a)+dabs(b))<Accu()) ? 1 : 0;
}
inline int IsEqual(const double &a,const double &b,const double &crit)
{
if (a==0. && b==0.) return 1;
return (dabs(a-b)/(dabs(a)+dabs(b))<crit) ? 1 : 0;
}
inline int IsEqual(const Complex &a,const Complex &b)
{
if (a==Complex(0.,0.) && b==Complex(0.,0.)) return 1;
return (std::abs(a-b)/(std::abs(a)+std::abs(b))<Accu()) ? 1 : 0;
}
inline Complex csqrt(const double &d)
{
if (d<0) return Complex(0.,sqrt(-d));
return sqrt(d);<|fim▁hole|>
#define GAMMA_E 0.5772156649015328606
double Gammln(double xx);
double ReIncompleteGamma0(double x,double prec=1.e-6);
double DiLog(double x);
Complex DiLog(const Complex& x);
int Factorial(const int n);
double ExpIntegral(int n, double x);
template<typename Scalar> inline bool IsNan(const Scalar& x);
template<typename Scalar> inline bool IsBad(const Scalar& x);
template<typename Scalar> inline bool IsZero(const Scalar& x);
template<typename Scalar> inline Scalar Abs(const Scalar& x);
template<typename Scalar> inline Scalar Abs(const std::complex<Scalar>& x);
template<> inline bool IsNan<double>(const double& x) {
return std::isnan(x)||std::isnan(-x);
}
template<> inline bool IsBad<double>(const double& x) {
return IsNan(x)||std::isinf(x)||std::isinf(-x);
}
template<> inline bool IsZero<double>(const double& x) {
return dabs(x)<Accu()?1:0;
}
template<> inline double Abs<double>(const double& x) {
return x>0.0?x:-x;
}
template<> inline bool IsNan<long double>(const long double& x) {
return std::isnan(x)||std::isnan(-x);
}
template<> inline bool IsBad<long double>(const long double& x) {
return IsNan(x)||std::isinf(x)||std::isinf(-x);
}
template<> inline bool IsZero<long double>(const long double& x) {
return dabs(x)<Accu()?1:0;
}
template<> inline long double Abs<long double>(const long double& x) {
return x>0.0?x:-x;
}
template<> inline bool IsNan<Complex>(const Complex& x) {
return (std::isnan(real(x)) || std::isnan(imag(x)) ||
std::isnan(-real(x)) || std::isnan(-imag(x)));
}
template<> inline bool IsBad<Complex>(const Complex& x) {
return IsNan(x)||std::isinf(real(x))||std::isinf(imag(x))
||std::isinf(-real(x))||std::isinf(-imag(x));
}
template<> inline bool IsZero<Complex>(const Complex& x) {
return std::abs(x)<Accu()?1:0;
}
template<> inline double Abs<double>(const Complex& x) {
return std::abs(x);
}
template<> inline bool IsNan<std::complex<long double> >
(const std::complex<long double>& x) {
return (std::isnan(real(x)) || std::isnan(imag(x)) ||
std::isnan(-real(x)) || std::isnan(-imag(x)));
}
template<> inline bool IsBad<std::complex<long double> >
(const std::complex<long double>& x) {
return IsNan(x)||std::isinf(real(x))||std::isinf(imag(x))
||std::isinf(-real(x))||std::isinf(-imag(x));
}
template<> inline bool IsZero<std::complex<long double> >
(const std::complex<long double>& x) {
return std::abs(x)<Accu()?1:0;
}
template<> inline long double Abs<long double>
(const std::complex<long double>& x) {
return std::abs(x);
}
template<class T1, class T2>
struct promote_trait {
};
#define DECLARE_PROMOTE(A,B,C) \
template<> struct promote_trait<A,B> { \
typedef C T_promote; \
}
DECLARE_PROMOTE(double,Complex,Complex);
DECLARE_PROMOTE(Complex,double,Complex);
DECLARE_PROMOTE(int,double,double);
DECLARE_PROMOTE(double,int,double);
DECLARE_PROMOTE(int,Complex,Complex);
DECLARE_PROMOTE(Complex,int,Complex);
DECLARE_PROMOTE(double,double,double);
DECLARE_PROMOTE(Complex,Complex,Complex);
DECLARE_PROMOTE(long double,std::complex<long double>,
std::complex<long double>);
DECLARE_PROMOTE(std::complex<long double>,long double,
std::complex<long double>);
DECLARE_PROMOTE(int,long double,long double);
DECLARE_PROMOTE(long double,int,long double);
DECLARE_PROMOTE(int,std::complex<long double>,std::complex<long double>);
DECLARE_PROMOTE(std::complex<long double>,int,std::complex<long double>);
DECLARE_PROMOTE(long double,long double,long double);
DECLARE_PROMOTE(std::complex<long double>,std::complex<long double>,
std::complex<long double>);
#define PROMOTE(TYPE1,TYPE2) typename promote_trait<TYPE1,TYPE2>::T_promote
/*!
\file
\brief contains a collection of simple mathematical functions
*/
/*!
\fn inline Type Min(Type a, Type b)
\brief returns the minimum of two numbers
*/
/*!
\fn inline Type Max(Type a, Type b)
\brief returns the maximum of two numbers
*/
/*!
\fn inline int Sign(const int& a) {return (a<0) ? -1 : 1;}
\brief returns the sign of the argument
*/
/*!
\fn inline int iabs(const int& a) {return a>0 ? a : -a;}
\brief returns the absolute value of the argument
*/
/*!
\fn inline double dabs(const double& a) {return a>0 ? a : -a;}
\brief returns the absolute value of the argument
*/
/*!
\fn inline double sqr(double x) {return x*x;}
\brief returns the argument squared
*/
/*!
\fn inline double Accu() {return 1.e-12;};
\brief returns a (platform dependent) precission, default is \f$1^{-12}\f$
*/
/*!
\fn inline int IsZero(const double a)
\brief returns \em true if argument is smaller than Accu()
*/
/*!
\fn inline int IsZero(const Complex& a)
\brief returns \em true if argument is smaller than Accu()
*/
/*!
\fn inline int IsEqual(const double a,const double b)
\brief returns \em true if arguments are equal (compared to Accu())
*/
/*!
\fn inline int IsEqual(const Complex& a,const Complex& b)
\brief returns \em true if arguments are equal (compared to Accu())
*/
/*!
\fn inline Complex csqrt(const double d)
\brief returns the complex root of a (possibly negative) float or double variable
*/
/*!
\fn inline Complex csqr(Complex x)
\brief returns the argument squared
*/
/*!
\fn double Gammln(double xx)
\brief calculates the logarithm of the Gammafunction
*/
/*!
\fn double ReIncomplietGamma0(double xx)
\brief calculates the real part of the incomplete Gammafunction.
*/
/*!
\fn double DiLog(double x)
\brief calculates the real part of Li_2(x).
*/
}
#endif<|fim▁end|> | } |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | //! Mathematical helper functions and types.
pub mod nq;
pub mod utils; |
<|file_name|>rt_gen.go<|end_file_name|><|fim▁begin|>package btree
// NOTE: THIS FILE WAS PRODUCED BY THE
// MSGP CODE GENERATION TOOL (github.com/tinylib/msgp)
// DO NOT EDIT
import (
"github.com/tinylib/msgp/msgp"
)
// MarshalMsg implements msgp.Marshaler
func (z *Tr) MarshalMsg(b []byte) (o []byte, err error) {
o = msgp.Require(b, z.Msgsize())
// map header, size 4
// string "u"
o = append(o, 0x84, 0xa1, 0x75)
o, err = z.UUID.MarshalMsg(o)
if err != nil {
return
}
// string "c"
o = append(o, 0xa1, 0x63)
o = msgp.AppendInt(o, z.Count)
// string "r"
o = append(o, 0xa1, 0x72)
o, err = z.Root.MarshalMsg(o)
if err != nil {
return
}
// string "nw"
o = append(o, 0xa2, 0x6e, 0x77)
o = msgp.AppendInt(o, z.NodeWidth)
return
}
// UnmarshalMsg implements msgp.Unmarshaler
func (z *Tr) UnmarshalMsg(bts []byte) (o []byte, err error) {
var field []byte
_ = field
var isz uint32
isz, bts, err = msgp.ReadMapHeaderBytes(bts)
if err != nil {
return
}<|fim▁hole|> isz--
field, bts, err = msgp.ReadMapKeyZC(bts)
if err != nil {
return
}
switch msgp.UnsafeString(field) {
case "u":
bts, err = z.UUID.UnmarshalMsg(bts)
if err != nil {
return
}
case "c":
z.Count, bts, err = msgp.ReadIntBytes(bts)
if err != nil {
return
}
case "r":
bts, err = z.Root.UnmarshalMsg(bts)
if err != nil {
return
}
case "nw":
z.NodeWidth, bts, err = msgp.ReadIntBytes(bts)
if err != nil {
return
}
default:
bts, err = msgp.Skip(bts)
if err != nil {
return
}
}
}
o = bts
return
}
func (z *Tr) Msgsize() (s int) {
s = 1 + 2 + z.UUID.Msgsize() + 2 + msgp.IntSize + 2 + z.Root.Msgsize() + 3 + msgp.IntSize
return
}<|fim▁end|> | for isz > 0 { |
<|file_name|>sidebar-items.js<|end_file_name|><|fim▁begin|><|fim▁hole|>initSidebarItems({"mod":[["big5","Big5 and HKSCS."]]});<|fim▁end|> | |
<|file_name|>UserTest.py<|end_file_name|><|fim▁begin|># Copyright (C) 2014 Adam Schubert <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
__author__="Adam Schubert <[email protected]>"
__date__ ="$12.10.2014 2:20:45$"
import tests.DwaTestCase as DwaTestCase
import unittest
import time
class UserTest(DwaTestCase.DwaTestCase):
def setUp(self):
DwaTestCase.DwaTestCase.setUp(self)
self.user = self.d.user()
self.username = self.credential['username'] + 'UserTest' + str(time.time())
def testCreate(self):
params = {}
params['password'] = self.credential['password']
params['username'] = self.username
params['nickname'] = DwaTestCase.generateNickname()
params['email'] = self.username + '@divine-warfare.com'
params['active'] = True
#create
message = self.user.create(params)['message']
<|fim▁hole|>
delParams = {}
delParams['user_id'] = userData['id']
delParams['user_token'] = userData['token']
self.user.delete(delParams)
self.assertEqual(message, 'User created')
def testDelete(self):
params = {}
params['password'] = self.credential['password']
params['username'] = self.username
params['nickname'] = DwaTestCase.generateNickname()
params['email'] = self.username + '@divine-warfare.com'
params['active'] = True
#create
self.user.create(params)
userData = self.user.token({'password': params['password'], 'username': params['username']})
delParams = {}
delParams['user_id'] = userData['id']
delParams['user_token'] = userData['token']
#delete
message = self.user.delete(delParams)['message']
self.assertEqual(message, 'User deleted')
def testList(self):
data = self.user.list({'limit': 20, 'page': 0})
self.assertEqual(data['message'], 'OK')
self.assertIsNotNone(data['data'])
self.assertIsNotNone(data['pages'])
def testToken(self):
data = self.user.token(self.credential)
self.assertEqual(data['message'], 'Token created')
self.assertEqual(len(data['token']), 32)
self.assertIsNotNone(data['id'])
self.assertRegexpMatches(data['token_expiration'], '(\d{4})-(\d{2})-(\d{2}) (\d{2}):(\d{2}):(\d{2})')
def testPassword(self):
data_token = self.user.token(self.credential)
data = self.user.password({'old_password': self.credential['password'], 'new_password': self.credential['password'], 'user_token': data_token['token'], 'user_id': data_token['id']})
self.assertEqual(data['message'], 'Password changed')
def testActive(self):
data_token = self.user.token(self.credential)
data = self.user.active({'user_id': data_token['id'], 'active': True, 'user_token': data_token['token']})
self.assertEqual(data['message'], 'User activated')
def testDeactive(self):
data_token = self.user.token(self.credential)
data = self.user.active({'user_id': data_token['id'], 'active': False, 'user_token': data_token['token']})
self.assertEqual(data['message'], 'User deactivated')
#Will fail cos our mailserver checks if maildir exists...
#@unittest.expectedFailure
def testRequestPasswordReset(self):
email = self.credential['username'] + '@example.com';
content_fill = 'abc' * 5333 #16k of shit
data = self.user.request_password_reset({'email': email, 'email_content': 'URL: example.com/password/reset/{reset_token}' + content_fill, 'email_subject': 'Password reset unittest', 'email_from': '[email protected]'})
#self.assertEqual(data['message'], 'Email with reset token has been send')
self.assertEqual(data['message'], 'Email not found')
@unittest.expectedFailure
def testDoPasswordReset(self):
#we use USER token as password reset token, cos we dont have reset token (and we cant have it cos it is only in email) so this call will fail, and that is a good thing :)
data_token = self.user.token(self.credential)
data = self.user.request_password_reset({'reset_token': data_token['token'], 'new_password': 'newPassword'})
self.assertEqual(data['message'], 'Password changed')<|fim▁end|> | #delete
userData = self.user.token({'password': params['password'], 'username': params['username']}) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.