prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>mulpd.rs<|end_file_name|><|fim▁begin|>use ::{BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode};
use ::RegType::*;
use ::instruction_def::*;
use ::Operand::*;
use ::Reg::*;
use ::RegScale::*;
fn mulpd_1() {
run_test(&Instruction { mnemonic: Mnemonic::MULPD, operand1: Some(Direct(XMM2)), operand2: Some(Direct(XMM7)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[102, 15, 89, 215], OperandSize::Dword)
}<|fim▁hole|>
fn mulpd_3() {
run_test(&Instruction { mnemonic: Mnemonic::MULPD, operand1: Some(Direct(XMM6)), operand2: Some(Direct(XMM5)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[102, 15, 89, 245], OperandSize::Qword)
}
fn mulpd_4() {
run_test(&Instruction { mnemonic: Mnemonic::MULPD, operand1: Some(Direct(XMM5)), operand2: Some(IndirectScaledDisplaced(RSI, Two, 1406090988, Some(OperandSize::Xmmword), None)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[102, 15, 89, 44, 117, 236, 62, 207, 83], OperandSize::Qword)
}<|fim▁end|> |
fn mulpd_2() {
run_test(&Instruction { mnemonic: Mnemonic::MULPD, operand1: Some(Direct(XMM0)), operand2: Some(IndirectScaledDisplaced(EDI, Four, 1728580241, Some(OperandSize::Xmmword), None)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[102, 15, 89, 4, 189, 145, 10, 8, 103], OperandSize::Dword)
} |
<|file_name|>Babel.py<|end_file_name|><|fim▁begin|>import sublime
import sublime_plugin
import json
from os.path import dirname, realpath, join
from .node_bridge import node_bridge
# monkeypatch `Region` to be iterable
sublime.Region.totuple = lambda self: (self.a, self.b)
sublime.Region.__iter__ = lambda self: self.totuple().__iter__()
BIN_PATH = join(sublime.packages_path(), dirname(realpath(__file__)), 'babel-transform.js')
class BabelCommand(sublime_plugin.TextCommand):
def run(self, edit):
view = self.view
selected_text = self.get_text()
code = self.babelify(selected_text)
if code:
w = sublime.Window.new_file(view.window())
w.settings().set('default_extension', 'js')
w.set_syntax_file(view.settings().get('syntax'))
w.set_scratch(True)
w.insert(edit, 0, code)
def babelify(self, data):
try:
return node_bridge(data, BIN_PATH, [json.dumps({
'filename': self.view.file_name(),
'debug': self.get_setting('debug'),
'ensure_newline_at_eof': self.get_setting('ensure_newline_at_eof'),<|fim▁hole|> except Exception as e:
return str(e)
def get_text(self):
if not self.has_selection():
region = sublime.Region(0, self.view.size())
return self.view.substr(region)
selected_text = ''
for region in self.view.sel():
selected_text = selected_text + self.view.substr(region) + '\n'
return selected_text
def has_selection(self):
for sel in self.view.sel():
start, end = sel
if start != end:
return True
return False
def get_setting(self, key):
settings = self.view.settings().get('Babel')
if settings is None:
settings = sublime.load_settings('Babel.sublime-settings')
return settings.get(key)<|fim▁end|> | 'use_local_babel': self.get_setting('use_local_babel'),
'node_modules': self.get_setting('node_modules'),
'options': self.get_setting('options')
})]) |
<|file_name|>EndConditionScience.java<|end_file_name|><|fim▁begin|>package com.avrgaming.civcraft.endgame;
import java.util.ArrayList;
import com.avrgaming.civcraft.main.CivGlobal;
import com.avrgaming.civcraft.main.CivMessage;
import com.avrgaming.civcraft.object.Civilization;
import com.avrgaming.civcraft.object.Town;
import com.avrgaming.civcraft.sessiondb.SessionEntry;
import com.avrgaming.civcraft.structure.wonders.Wonder;
public class EndConditionScience extends EndGameCondition {
String techname;
@Override
public void onLoad() {
techname = this.getString("tech");
}
@Override
public boolean check(Civilization civ) {
if (!civ.hasTechnology(techname)) {
return false;
}
if (civ.isAdminCiv()) {
return false;
}
boolean hasGreatLibrary = false;
for (Town town : civ.getTowns()) {
if (town.getMotherCiv() != null) {
continue;
}
for (Wonder wonder :town.getWonders()) {
if (wonder.isActive()) {
if (wonder.getConfigId().equals("w_greatlibrary")) {
hasGreatLibrary = true;
break;
}
}
}
if (hasGreatLibrary) {
break;
}
}
if (!hasGreatLibrary) {
return false;
}
return true;
}
@Override
public boolean finalWinCheck(Civilization civ) {
Civilization rival = getMostAccumulatedBeakers();
if (rival != civ) {
CivMessage.global(civ.getName()+" doesn't have enough beakers for a scientific victory. The rival civilization of "+rival.getName()+" has more!");
return false;
}
<|fim▁hole|> public Civilization getMostAccumulatedBeakers() {
double most = 0;
Civilization mostCiv = null;
for (Civilization civ : CivGlobal.getCivs()) {
double beakers = getExtraBeakersInCiv(civ);
if (beakers > most) {
most = beakers;
mostCiv = civ;
}
}
return mostCiv;
}
@Override
public String getSessionKey() {
return "endgame:science";
}
@Override
protected void onWarDefeat(Civilization civ) {
/* remove any extra beakers we might have. */
CivGlobal.getSessionDB().delete_all(getBeakerSessionKey(civ));
civ.removeTech(techname);
CivMessage.sendCiv(civ, "We were defeated while trying to achieve a science victory! We've lost all of our accumulated beakers and our victory tech!");
civ.save();
this.onFailure(civ);
}
public static String getBeakerSessionKey(Civilization civ) {
return "endgame:sciencebeakers:"+civ.getId();
}
public double getExtraBeakersInCiv(Civilization civ) {
ArrayList<SessionEntry> entries = CivGlobal.getSessionDB().lookup(getBeakerSessionKey(civ));
if (entries.size() == 0) {
return 0;
}
return Double.valueOf(entries.get(0).value);
}
public void addExtraBeakersToCiv(Civilization civ, double beakers) {
ArrayList<SessionEntry> entries = CivGlobal.getSessionDB().lookup(getBeakerSessionKey(civ));
double current = 0;
if (entries.size() == 0) {
CivGlobal.getSessionDB().add(getBeakerSessionKey(civ), ""+beakers, civ.getId(), 0, 0);
current += beakers;
} else {
current = Double.valueOf(entries.get(0).value);
current += beakers;
CivGlobal.getSessionDB().update(entries.get(0).request_id, entries.get(0).key, ""+current);
}
//DecimalFormat df = new DecimalFormat("#.#");
//CivMessage.sendCiv(civ, "Added "+df.format(beakers)+" beakers to our scientific victory! We now have "+df.format(current)+" beakers saved up.");
}
public static Double getBeakersFor(Civilization civ) {
ArrayList<SessionEntry> entries = CivGlobal.getSessionDB().lookup(getBeakerSessionKey(civ));
if (entries.size() == 0) {
return 0.0;
} else {
return Double.valueOf(entries.get(0).value);
}
}
}<|fim▁end|> | return true;
}
|
<|file_name|>controllers.js<|end_file_name|><|fim▁begin|>angular.module('starter.controllers', [])
// A simple controller that fetches a list of data from a service
.controller('JobIndexCtrl', function($rootScope, PetService) {
// "Pets" is a service returning mock data (services.js)
$rootScope.pets = PetService.all();
})
// A simple controller that shows a tapped item's data
.controller('PetDetailCtrl', function($scope, $rootScope, $state, $stateParams, PetService) {<|fim▁hole|> $scope.goBack = $state.go('job.pet-index');
})
.controller('NewJobDetailCtrl', function($scope, $rootScope, $state, $stateParams, PetService) {
$rootScope.pet = PetService.get($stateParams.jobId);
$scope.goBack = $state.go('new-job');
})
.controller('LoginCtrl', function($scope, $state) {
$scope.login = function () {
$state.go("tab.adopt");
};
})
.controller('JobCreationCtrl', function($scope, $rootScope, $state, $stateParams, $localstorage, PetService) {
$scope.createJob = function () {
var title = document.getElementById("title");
var desc = document.getElementById("desc");
var location = document.getElementById("location");
if (title.value.trim() !== "" && desc.value.trim() !== "" && location.value.trim() !== "") {
var newJobId = $localstorage.length() - 1;
var newJob = {
id: String(newJobId),
title: String(title.value.trim()),
description: String(desc.value.trim()),
location: String(location.value.trim()),
quote: {
damageDesc: "",
estimatedCost: "",
estimatedTime: "",
},
report: {
fixDesc: "",
actualCost: "",
startTime: "",
finishTime: ""
}
};
$rootScope.pet = PetService.get(newJobId);
$rootScope.pets = PetService.all();
$localstorage.setObject(newJobId, newJob);
$state.go('new-quote', {'jobId' : newJobId});
}
}
});<|fim▁end|> | $rootScope.pet = PetService.get($stateParams.jobId); |
<|file_name|>struct_field.rs<|end_file_name|><|fim▁begin|>struct S {
foobar: i32,
frobnicator: i32,
}
fn main() {
let _ = S {
foo<caret>
frobnicator: 92
};<|fim▁hole|><|fim▁end|> | } |
<|file_name|>StackedBarChart.stories.tsx<|end_file_name|><|fim▁begin|>import React from "react"
import { StackedBarChart } from "./StackedBarChart.js"
import {
SampleColumnSlugs,
SynthesizeFruitTable,
SynthesizeGDPTable,
} from "../../coreTable/OwidTableSynthesizers.js"
export default {
title: "StackedBarChart",
component: StackedBarChart,
}
export const ColumnsAsSeries = (): JSX.Element => {
const table = SynthesizeFruitTable()
return (
<svg width={600} height={600}>
<StackedBarChart
manager={{ table, selection: table.sampleEntityName(1) }}
/>
</svg>
)
}
export const EntitiesAsSeries = (): JSX.Element => {
const table = SynthesizeGDPTable({ entityCount: 5 })
const manager = {
table,
selection: table.availableEntityNames,
yColumnSlugs: [SampleColumnSlugs.Population],
}
return (
<svg width={600} height={600}>
<StackedBarChart manager={manager} />
</svg>
)
}
export const EntitiesAsSeriesWithMissingRows = (): JSX.Element => {
const table = SynthesizeGDPTable({ entityCount: 5 }).dropRandomRows(30)
const manager = {
table,<|fim▁hole|> yColumnSlugs: [SampleColumnSlugs.Population],
}
return (
<svg width={600} height={600}>
<StackedBarChart manager={manager} />
</svg>
)
}<|fim▁end|> | selection: table.availableEntityNames, |
<|file_name|>font_list.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use core_text;
use text::util::unicode_plane;
use ucd::{Codepoint, UnicodeBlock};
pub fn for_each_available_family<F>(mut callback: F)
where
F: FnMut(String),
{
let family_names = core_text::font_collection::get_family_names();
for family_name in family_names.iter() {
callback(family_name.to_string());
}
}
pub fn for_each_variation<F>(family_name: &str, mut callback: F)
where
F: FnMut(String),
{
debug!("Looking for faces of family: {}", family_name);
let family_collection = core_text::font_collection::create_for_family(family_name);
if let Some(family_collection) = family_collection {
if let Some(family_descriptors) = family_collection.get_descriptors() {
for family_descriptor in family_descriptors.iter() {
callback(family_descriptor.font_name());
}
}
}
}
pub fn system_default_family(_generic_name: &str) -> Option<String> {
None
}
// Based on gfxPlatformMac::GetCommonFallbackFonts() in Gecko
pub fn fallback_font_families(codepoint: Option<char>) -> Vec<&'static str> {
let mut families = vec!["Lucida Grande"];
if let Some(codepoint) = codepoint {
match unicode_plane(codepoint) {
// https://en.wikipedia.org/wiki/Plane_(Unicode)#Basic_Multilingual_Plane
0 => {
if let Some(block) = codepoint.block() {
match block {
UnicodeBlock::Arabic |
UnicodeBlock::Syriac |
UnicodeBlock::ArabicSupplement |
UnicodeBlock::Thaana |
UnicodeBlock::NKo => {
families.push("Geeza Pro");
},
UnicodeBlock::Devanagari => {
families.push("Devanagari Sangam MN");
},
UnicodeBlock::Gurmukhi => {
families.push("Gurmukhi MN");
},
UnicodeBlock::Gujarati => {
families.push("Gujarati Sangam MN");
},
UnicodeBlock::Tamil => {
families.push("Tamil MN");
},
UnicodeBlock::Lao => {
families.push("Lao MN");
},
UnicodeBlock::Tibetan => {
families.push("Songti SC");
},
UnicodeBlock::Myanmar => {
families.push("Myanmar MN");
},
UnicodeBlock::Ethiopic |
UnicodeBlock::EthiopicSupplement |
UnicodeBlock::EthiopicExtended |
UnicodeBlock::EthiopicExtendedA => {
families.push("Kefa");
},
UnicodeBlock::Cherokee => {
families.push("Plantagenet Cherokee");
},
UnicodeBlock::UnifiedCanadianAboriginalSyllabics |
UnicodeBlock::UnifiedCanadianAboriginalSyllabicsExtended => {
families.push("Euphemia UCAS");
},
UnicodeBlock::Mongolian |
UnicodeBlock::YiSyllables |
UnicodeBlock::YiRadicals => {
families.push("STHeiti");
},
UnicodeBlock::Khmer | UnicodeBlock::KhmerSymbols => {
families.push("Khmer MN");
},
UnicodeBlock::TaiLe => {
families.push("Microsoft Tai Le");
},
UnicodeBlock::GeneralPunctuation |
UnicodeBlock::SuperscriptsandSubscripts |
UnicodeBlock::CurrencySymbols |
UnicodeBlock::CombiningDiacriticalMarksforSymbols |
UnicodeBlock::LetterlikeSymbols |
UnicodeBlock::NumberForms |
UnicodeBlock::Arrows |
UnicodeBlock::MathematicalOperators |
UnicodeBlock::MiscellaneousTechnical |
UnicodeBlock::ControlPictures |
UnicodeBlock::OpticalCharacterRecognition |
UnicodeBlock::EnclosedAlphanumerics |
UnicodeBlock::BoxDrawing |
UnicodeBlock::BlockElements |
UnicodeBlock::GeometricShapes |
UnicodeBlock::MiscellaneousSymbols |
UnicodeBlock::Dingbats |
UnicodeBlock::MiscellaneousMathematicalSymbolsA |
UnicodeBlock::SupplementalArrowsA |
UnicodeBlock::SupplementalArrowsB |
UnicodeBlock::MiscellaneousMathematicalSymbolsB |
UnicodeBlock::SupplementalMathematicalOperators |<|fim▁hole|> UnicodeBlock::MiscellaneousSymbolsandArrows |
UnicodeBlock::SupplementalPunctuation => {
families.push("Hiragino Kaku Gothic ProN");
families.push("Apple Symbols");
families.push("Menlo");
families.push("STIXGeneral");
},
UnicodeBlock::BraillePatterns => {
families.push("Apple Braille");
},
UnicodeBlock::Bopomofo |
UnicodeBlock::HangulCompatibilityJamo |
UnicodeBlock::Kanbun |
UnicodeBlock::BopomofoExtended |
UnicodeBlock::CJKStrokes |
UnicodeBlock::KatakanaPhoneticExtensions => {
families.push("Hiragino Sans GB");
},
UnicodeBlock::YijingHexagramSymbols |
UnicodeBlock::CyrillicExtendedB |
UnicodeBlock::Bamum |
UnicodeBlock::ModifierToneLetters |
UnicodeBlock::LatinExtendedD |
UnicodeBlock::ArabicPresentationFormsA |
UnicodeBlock::HalfwidthandFullwidthForms |
UnicodeBlock::Specials => {
families.push("Apple Symbols");
},
_ => {},
}
}
},
// https://en.wikipedia.org/wiki/Plane_(Unicode)#Supplementary_Multilingual_Plane
1 => {
families.push("Apple Symbols");
families.push("STIXGeneral");
},
// https://en.wikipedia.org/wiki/Plane_(Unicode)#Supplementary_Ideographic_Plane
2 => {
// Systems with MS Office may have these fonts
families.push("MingLiU-ExtB");
families.push("SimSun-ExtB");
},
_ => {},
}
}
families.push("Geneva");
families.push("Arial Unicode MS");
families
}
pub static SANS_SERIF_FONT_FAMILY: &'static str = "Helvetica";<|fim▁end|> | |
<|file_name|>train.py<|end_file_name|><|fim▁begin|># Copyright 2020 The SQLFlow Authors. All rights reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import six
import xgboost as xgb
from runtime.model import collect_metadata
from runtime.model import oss as pai_model_store
from runtime.model import save_metadata
from runtime.pai.pai_distributed import make_distributed_info_without_evaluator
from runtime.step.xgboost.save import save_model_to_local_file
from runtime.xgboost.dataset import xgb_dataset
from runtime.xgboost.pai_rabit import PaiXGBoostTracker, PaiXGBoostWorker
def dist_train(flags,
datasource,
select,
model_params,
train_params,
feature_metas,
feature_column_names,
label_meta,
validation_select,
disk_cache=False,
batch_size=None,
epoch=1,
load_pretrained_model=False,
is_pai=False,
pai_train_table="",
pai_validate_table="",
oss_model_dir="",
transform_fn=None,
feature_column_code="",
model_repo_image="",
original_sql=""):
if not is_pai:
raise Exception(
"XGBoost distributed training is only supported on PAI")
num_workers = len(flags.worker_hosts.split(","))
cluster, node, task_id = make_distributed_info_without_evaluator(flags)
master_addr = cluster["ps"][0].split(":")
master_host = master_addr[0]
master_port = int(master_addr[1]) + 1
tracker = None
print("node={}, task_id={}, cluster={}".format(node, task_id, cluster))
try:
if node == 'ps':
if task_id == 0:
tracker = PaiXGBoostTracker(host=master_host,
nworkers=num_workers,
port=master_port)
else:
if node != 'chief':
task_id += 1
envs = PaiXGBoostWorker.gen_envs(host=master_host,
port=master_port,
ttl=200,
nworkers=num_workers,
task_id=task_id)
xgb.rabit.init(envs)
rank = xgb.rabit.get_rank()
train(datasource,
select,
model_params,
train_params,
feature_metas,
feature_column_names,
label_meta,
validation_select,
disk_cache,
batch_size,
epoch,
load_pretrained_model,
is_pai,
pai_train_table,
pai_validate_table,
rank,
nworkers=num_workers,
oss_model_dir=oss_model_dir,
transform_fn=transform_fn,
feature_column_code=feature_column_code,
model_repo_image=model_repo_image,
original_sql=original_sql)
except Exception as e:
print("node={}, id={}, exception={}".format(node, task_id, e))
six.reraise(*sys.exc_info()) # For better backtrace
finally:
if tracker is not None:
tracker.join()
if node != 'ps':
xgb.rabit.finalize()
def train(datasource,
select,
model_params,
train_params,
feature_metas,
feature_column_names,
label_meta,
validation_select,
disk_cache=False,
batch_size=None,
epoch=1,
load_pretrained_model=False,
is_pai=False,
pai_train_table="",
pai_validate_table="",
rank=0,
nworkers=1,
oss_model_dir="",
transform_fn=None,
feature_column_code="",
model_repo_image="",
original_sql=""):
if batch_size == -1:
batch_size = None
print("Start training XGBoost model...")
dtrain = xgb_dataset(datasource,
'train.txt',
select,
feature_metas,
feature_column_names,
label_meta,
is_pai,
pai_train_table,
cache=disk_cache,
batch_size=batch_size,
epoch=epoch,
rank=rank,
nworkers=nworkers,
transform_fn=transform_fn,
feature_column_code=feature_column_code)
if len(validation_select.strip()) > 0:
dvalidate = list(
xgb_dataset(datasource,
'validate.txt',
validation_select,
feature_metas,
feature_column_names,
label_meta,
is_pai,
pai_validate_table,
rank=rank,
nworkers=nworkers,
transform_fn=transform_fn,
feature_column_code=feature_column_code))[0]
filename = "my_model"
if load_pretrained_model:
bst = xgb.Booster()
bst.load_model(filename)
else:
bst = None
re = None
for per_batch_dmatrix in dtrain:
watchlist = [(per_batch_dmatrix, "train")]
if len(validation_select.strip()) > 0:
watchlist.append((dvalidate, "validate"))
re = dict()
bst = xgb.train(model_params,
per_batch_dmatrix,
evals=watchlist,
evals_result=re,
xgb_model=bst,
**train_params)
print("Evaluation result: %s" % re)
if rank == 0:
# TODO(sneaxiy): collect features and label
metadata = collect_metadata(original_sql=original_sql,
select=select,
validation_select=validation_select,
model_repo_image=model_repo_image,
class_name=model_params.get("booster"),
attributes=model_params,
features=None,
label=None,
evaluation=re)
save_model_to_local_file(bst, model_params, filename)
save_metadata("model_meta.json", metadata)
if is_pai and len(oss_model_dir) > 0:
save_model(oss_model_dir, filename, model_params, train_params,
feature_metas, feature_column_names, label_meta,
feature_column_code)
def save_model(model_dir, filename, model_params, train_params, feature_metas,
feature_column_names, label_meta, feature_column_code):
pai_model_store.save_file(model_dir, filename)
pai_model_store.save_file(model_dir, "{}.pmml".format(filename))
pai_model_store.save_file(model_dir, "model_meta.json")<|fim▁hole|> pai_model_store.save_metas(
model_dir,
1,
"xgboost_model_desc",
"", # estimator = ""
model_params,
train_params,
feature_metas,
feature_column_names,
label_meta,
feature_column_code)<|fim▁end|> | # (TODO:lhw) remove this function call, use the new metadata in load_metas |
<|file_name|>rest_client.py<|end_file_name|><|fim▁begin|>import copy, getpass, logging, pprint, re, urllib, urlparse
import httplib2
from django.utils import datastructures, simplejson
from autotest_lib.frontend.afe import rpc_client_lib
from autotest_lib.client.common_lib import utils
_request_headers = {}
def _get_request_headers(uri):
server = urlparse.urlparse(uri)[0:2]
if server in _request_headers:
return _request_headers[server]
headers = rpc_client_lib.authorization_headers(getpass.getuser(), uri)
headers['Content-Type'] = 'application/json'
_request_headers[server] = headers
return headers
def _clear_request_headers(uri):
server = urlparse.urlparse(uri)[0:2]
if server in _request_headers:
del _request_headers[server]
def _site_verify_response_default(headers, response_body):
return headers['status'] != '401'
class RestClientError(Exception):
pass
class ClientError(Exception):
pass
class ServerError(Exception):
pass
class Response(object):
def __init__(self, httplib_response, httplib_content):
self.status = int(httplib_response['status'])
self.headers = httplib_response
self.entity_body = httplib_content
def decoded_body(self):
return simplejson.loads(self.entity_body)
def __str__(self):
return '\n'.join([str(self.status), self.entity_body])
class Resource(object):
def __init__(self, representation_dict, http):
self._http = http
assert 'href' in representation_dict
for key, value in representation_dict.iteritems():
setattr(self, str(key), value)
def __repr__(self):
return 'Resource(%r)' % self._representation()
def pprint(self):
# pretty-print support for debugging/interactive use
pprint.pprint(self._representation())
@classmethod
def load(cls, uri, http=None):
if not http:
http = httplib2.Http()
directory = cls({'href': uri}, http)
return directory.get()
<|fim▁hole|>
def _read_representation(self, value):
# recursively convert representation dicts to Resource objects
if isinstance(value, list):
return [self._read_representation(element) for element in value]
if isinstance(value, dict):
converted_dict = dict((key, self._read_representation(sub_value))
for key, sub_value in value.iteritems())
if 'href' in converted_dict:
return type(self)(converted_dict, http=self._http)
return converted_dict
return value
def _write_representation(self, value):
# recursively convert Resource objects to representation dicts
if isinstance(value, list):
return [self._write_representation(element) for element in value]
if isinstance(value, dict):
return dict((key, self._write_representation(sub_value))
for key, sub_value in value.iteritems())
if isinstance(value, Resource):
return value._representation()
return value
def _representation(self):
return dict((key, self._write_representation(value))
for key, value in self.__dict__.iteritems()
if not key.startswith('_')
and not callable(value))
def _do_request(self, method, uri, query_parameters, encoded_body):
uri_parts = [uri]
if query_parameters:
if '?' in uri:
uri_parts += '&'
else:
uri_parts += '?'
uri_parts += urllib.urlencode(query_parameters, doseq=True)
full_uri = ''.join(uri_parts)
if encoded_body:
entity_body = simplejson.dumps(encoded_body)
else:
entity_body = None
logging.debug('%s %s', method, full_uri)
if entity_body:
logging.debug(entity_body)
site_verify = utils.import_site_function(
__file__, 'autotest_lib.frontend.shared.site_rest_client',
'site_verify_response', _site_verify_response_default)
headers, response_body = self._http.request(
full_uri, method, body=entity_body,
headers=_get_request_headers(uri))
if not site_verify(headers, response_body):
logging.debug('Response verification failed, clearing headers and '
'trying again:\n%s', response_body)
_clear_request_headers(uri)
headers, response_body = _http.request(
full_uri, method, body=entity_body,
headers=_get_request_headers(uri))
logging.debug('Response: %s', headers['status'])
return Response(headers, response_body)
def _request(self, method, query_parameters=None, encoded_body=None):
if query_parameters is None:
query_parameters = {}
response = self._do_request(method, self.href, query_parameters,
encoded_body)
if 300 <= response.status < 400: # redirection
return self._do_request(method, response.headers['location'],
query_parameters, encoded_body)
if 400 <= response.status < 500:
raise ClientError(str(response))
if 500 <= response.status < 600:
raise ServerError(str(response))
return response
def _stringify_query_parameter(self, value):
if isinstance(value, (list, tuple)):
return ','.join(self._stringify_query_parameter(item)
for item in value)
return str(value)
def _iterlists(self, mapping):
"""This effectively lets us treat dicts as MultiValueDicts."""
if hasattr(mapping, 'iterlists'): # mapping is already a MultiValueDict
return mapping.iterlists()
return ((key, (value,)) for key, value in mapping.iteritems())
def get(self, query_parameters=None, **kwarg_query_parameters):
"""
@param query_parameters: a dict or MultiValueDict
"""
query_parameters = copy.copy(query_parameters) # avoid mutating original
if query_parameters is None:
query_parameters = {}
query_parameters.update(kwarg_query_parameters)
string_parameters = datastructures.MultiValueDict()
for key, values in self._iterlists(query_parameters):
string_parameters.setlist(
key, [self._stringify_query_parameter(value)
for value in values])
response = self._request('GET',
query_parameters=string_parameters.lists())
assert response.status == 200
return self._read_representation(response.decoded_body())
def get_full(self, results_limit, query_parameters=None,
**kwarg_query_parameters):
"""
Like get() for collections, when the full collection is expected.
@param results_limit: maxmimum number of results to allow
@raises ClientError if there are more than results_limit results.
"""
result = self.get(query_parameters=query_parameters,
items_per_page=results_limit,
**kwarg_query_parameters)
if result.total_results > results_limit:
raise ClientError(
'Too many results (%s > %s) for request %s (%s %s)'
% (result.total_results, results_limit, self.href,
query_parameters, kwarg_query_parameters))
return result
def put(self):
response = self._request('PUT', encoded_body=self._representation())
assert response.status == 200
return self._read_representation(response.decoded_body())
def delete(self):
response = self._request('DELETE')
assert response.status == 204 # no content
def post(self, request_dict):
# request_dict may still have resources in it
request_dict = self._write_representation(request_dict)
response = self._request('POST', encoded_body=request_dict)
assert response.status == 201 # created
return self._read_representation({'href': response.headers['location']})<|fim▁end|> | |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>//! # Teleborg
//!
//! Teleborg is a fast, reliable and easy to use wrapper for the [Telegram bot
//! API](https://core.telegram.org/bots/api).
//! This crate aims to provide everything the user needs to create a high
//! performant Telegram bot.
//!
//! ## Getting started
//!
//! ``` no_run
//! extern crate teleborg;
//!
//! use teleborg::{Dispatcher, Bot, Updater};
//! use teleborg::objects::Update;
//!
//! fn main() {
//! // Make sure you have your token
//! let bot_token = "bot_token".to_string();
//! // Creating a dispatcher which registers all the command and message handlers
//! let mut dispatcher = Dispatcher::new();
//! // Registering our command which we create below in the form as a function
//! dispatcher.add_command_handler("test", test, false);
//! // Start the updater, the Updater will start the threads, one of which will poll for updates
//! // and send those to the Dispatcher's thread which will act upon it with the registered handlers
//! Updater::start(Some(bot_token), None, None, None, dispatcher);
//! }
//!<|fim▁hole|>//! }
//! ```
#[macro_use] extern crate log;
extern crate reqwest;
extern crate serde;
#[macro_use] extern crate serde_derive;
extern crate serde_json;
pub use reqwest::StatusCode;
pub use self::bot::{Bot, ParseMode, ChatAction};
pub use self::command::Command;
pub use self::dispatcher::Dispatcher;
pub use self::updater::Updater;
pub mod error;
pub mod objects;
mod bot;
mod command;
mod dispatcher;
mod marker;
mod updater;
/// Pass this to a method which requires markup where you do not want markup.
pub const NO_MARKUP: Option<objects::NullMarkup> = None;
impl<T: Sync + Send + 'static + FnMut(&Bot, objects::Update, Option<Vec<&str>>)> Command for T {
fn execute(&mut self, bot: &Bot, update: objects::Update, args: Option<Vec<&str>>) {
self(bot, update, args);
}
}
/// Construct an API URL with the base bot URL and an
/// array of strings which will construct the path.
fn construct_api_url(bot_url: &str, path: &[&str]) -> String {
format!("{}/{}", bot_url, path.join("/"))
}<|fim▁end|> | //! // Our first command handler
//! fn test(bot: &Bot, update: Update, _: Option<Vec<&str>>) {
//! bot.reply_to_message(&update, "It works!").unwrap(); |
<|file_name|>install_cmd.py<|end_file_name|><|fim▁begin|>import os
import gflags
from textwrap import dedent
import hashlib
import zipfile
from ..exceptions import UsageError
from ..Bunch import Bunch
from ..TreeScanner import TreeScanner
from ..PakManifest import PakManifest, PakFile, PakFolder
def help():
return dedent("""\
Command Usage: pak install pkg_name-1.0.pak /target/path
""")
<|fim▁hole|>def run(args):
if len(args) != 2:
raise UsageError("Got %d arguments, but I only expected 2 (%s)" % (len(args), ' '.join(args)))
else:
pak_path, target_path = args
# Validate target_path path
target_path = os.path.abspath(target_path)
if not os.path.exists(target_path):
raise UsageError("Target path doesn't exist: " + target_path)
if not os.path.isdir(target_path):
raise UsageError("Target path isn't a directory: " + target_path)
# Examine pak path
pak_path = os.path.abspath(pak_path)
if not os.path.exists(pak_path):
raise UsageError("Package doesn't exist: " + pak_path)
if not os.path.isfile(pak_path):
raise UsageError("Package path is not a file: " + pak_path)
# Open package
pak = zipfile.ZipFile(pak_path, 'r')
# Get manifest
manifest = PakManifest()
with pak.open('manifest.json', 'r') as fh:
manifest.load_string(fh.read())
# List files in target
existing = Bunch(
files = dict(),
dirs = dict(),
paths = set(),
scanner = TreeScanner(target_path),
)
for target_obj in existing.scanner.scan():
existing.paths.add(target_obj.path)
if target_obj.obj_type == 'folder':
existing.dirs[target_obj.path] = target_obj
elif target_obj.obj_type == 'file':
existing.files[target_obj.path] = target_obj<|fim▁end|> | |
<|file_name|>views.py<|end_file_name|><|fim▁begin|># vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Views for managing keypairs.
"""
from django.core.urlresolvers import reverse
from django.core.urlresolvers import reverse_lazy
from django import http
from django.template.defaultfilters import slugify # noqa
from django.utils.translation import ugettext_lazy as _
from django.views.generic import TemplateView # noqa
from django.views.generic import View # noqa
from horizon import exceptions
from horizon import forms
from openstack_dashboard import api
from openstack_dashboard.dashboards.project.access_and_security.keypairs \
import forms as project_forms
class CreateView(forms.ModalFormView):
form_class = project_forms.CreateKeypair
template_name = 'project/access_and_security/keypairs/create.html'
success_url = 'horizon:project:access_and_security:keypairs:download'
def get_success_url(self):
return reverse(self.success_url,
kwargs={"keypair_name": self.request.POST['name']})
class ImportView(forms.ModalFormView):
form_class = project_forms.ImportKeypair
template_name = 'project/access_and_security/keypairs/import.html'
success_url = reverse_lazy('horizon:project:access_and_security:index')
def get_object_id(self, keypair):
return keypair.name
class DownloadView(TemplateView):
def get_context_data(self, keypair_name=None):
return {'keypair_name': keypair_name}
template_name = 'project/access_and_security/keypairs/download.html'
class GenerateView(View):
def get(self, request, keypair_name=None):
try:
keypair = api.nova.keypair_create(request, keypair_name)
except Exception:
redirect = reverse('horizon:project:access_and_security:index')
exceptions.handle(self.request,
_('Unable to create key pair: %(exc)s'),
redirect=redirect)<|fim▁hole|> 'attachment; filename=%s.pem' % slugify(keypair.name)
response.write(keypair.private_key)
response['Content-Length'] = str(len(response.content))
return response<|fim▁end|> |
response = http.HttpResponse(content_type='application/binary')
response['Content-Disposition'] = \ |
<|file_name|>deployments.go<|end_file_name|><|fim▁begin|>// Copyright 2019 Istio Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package mock
import (
"fmt"
"sync"
v1 "k8s.io/api/apps/v1"
autoscalingv1 "k8s.io/api/autoscaling/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/types"
"k8s.io/apimachinery/pkg/watch"
appsv1 "k8s.io/client-go/kubernetes/typed/apps/v1"<|fim▁hole|>
type deploymentImpl struct {
mux sync.Mutex
deployments map[string]*v1.Deployment
watches Watches
}
func newAppsInterface() appsv1.DeploymentInterface {
return &deploymentImpl{
deployments: make(map[string]*v1.Deployment),
}
}
func (d *deploymentImpl) Create(obj *v1.Deployment) (*v1.Deployment, error) {
d.mux.Lock()
defer d.mux.Unlock()
d.deployments[obj.Name] = obj
d.watches.Send(watch.Event{
Type: watch.Added,
Object: obj,
})
return obj, nil
}
func (d *deploymentImpl) Update(obj *v1.Deployment) (*v1.Deployment, error) {
d.mux.Lock()
defer d.mux.Unlock()
d.deployments[obj.Name] = obj
d.watches.Send(watch.Event{
Type: watch.Modified,
Object: obj,
})
return obj, nil
}
func (d *deploymentImpl) UpdateStatus(*v1.Deployment) (*v1.Deployment, error) {
panic("not implemented")
}
func (d *deploymentImpl) Delete(name string, options *metav1.DeleteOptions) error {
d.mux.Lock()
defer d.mux.Unlock()
obj := d.deployments[name]
if obj == nil {
return fmt.Errorf("unable to delete deployment %s", name)
}
delete(d.deployments, name)
d.watches.Send(watch.Event{
Type: watch.Deleted,
Object: obj,
})
return nil
}
func (d *deploymentImpl) DeleteCollection(options *metav1.DeleteOptions, listOptions metav1.ListOptions) error {
panic("not implemented")
}
func (d *deploymentImpl) Get(name string, options metav1.GetOptions) (*v1.Deployment, error) {
panic("not implemented")
}
func (d *deploymentImpl) List(opts metav1.ListOptions) (*v1.DeploymentList, error) {
d.mux.Lock()
defer d.mux.Unlock()
out := &v1.DeploymentList{}
for _, v := range d.deployments {
out.Items = append(out.Items, *v)
}
return out, nil
}
func (d *deploymentImpl) Watch(opts metav1.ListOptions) (watch.Interface, error) {
d.mux.Lock()
defer d.mux.Unlock()
w := NewWatch()
d.watches = append(d.watches, w)
// Send add events for all current resources.
for _, d := range d.deployments {
w.Send(watch.Event{
Type: watch.Added,
Object: d,
})
}
return w, nil
}
func (d *deploymentImpl) Patch(name string, pt types.PatchType, data []byte, subresources ...string) (result *v1.Deployment, err error) {
panic("not implemented")
}
func (d *deploymentImpl) GetScale(deploymentName string, options metav1.GetOptions) (*autoscalingv1.Scale, error) {
panic("not implemented")
}
func (d *deploymentImpl) UpdateScale(deploymentName string, scale *autoscalingv1.Scale) (*autoscalingv1.Scale, error) {
panic("not implemented")
}<|fim▁end|> | )
var _ appsv1.DeploymentInterface = &deploymentImpl{} |
<|file_name|>gen-crons.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import os
from optparse import OptionParser
from jinja2 import Template
HEADER = '!!AUTO-GENERATED!! Edit bin/crontab/crontab.tpl instead.'
TEMPLATE = open(os.path.join(os.path.dirname(__file__), 'crontab.tpl')).read()
<|fim▁hole|> parser.add_option('-w', '--webapp',
help='Location of web app (required)')
parser.add_option('-u', '--user',
help=('Prefix cron with this user. '
'Only define for cron.d style crontabs.'))
parser.add_option('-p', '--python', default='/usr/bin/python2.7',
help='Python interpreter to use.')
(opts, args) = parser.parse_args()
if not opts.webapp:
parser.error('-w must be defined')
ctx = {'django': 'cd %s; %s manage.py' % (opts.webapp, opts.python)}
ctx['cron'] = '%s cron' % ctx['django']
if opts.user:
for k, v in ctx.iteritems():
ctx[k] = '%s %s' % (opts.user, v)
# Needs to stay below the opts.user injection.
ctx['python'] = opts.python
ctx['header'] = HEADER
print Template(TEMPLATE).render(**ctx)
if __name__ == '__main__':
main()<|fim▁end|> | def main():
parser = OptionParser() |
<|file_name|>main.go<|end_file_name|><|fim▁begin|>/* Copyright 2015 LinkedIn Corp. Licensed under the Apache License, Version
* 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*/
package main
import (
"flag"
"fmt"
log "github.com/cihub/seelog"
"github.com/samuel/go-zookeeper/zk"
"os"
"os/signal"
"syscall"
"time"
)
type KafkaCluster struct {
Client *KafkaClient
Zookeeper *ZookeeperClient
}
type ApplicationContext struct {
Config *BurrowConfig
Storage *OffsetStorage
Clusters map[string]*KafkaCluster
Server *HttpServer
Emailer *Emailer
HttpNotifier *HttpNotifier
NotifierLock *zk.Lock
}
func loadNotifiers(app *ApplicationContext) error {
// Set up the Emailer, if configured
if len(app.Config.Email) > 0 {
log.Info("Configuring Email notifier")
emailer, err := NewEmailer(app)
if err != nil {
log.Criticalf("Cannot configure email notifier: %v", err)
return err
}
app.Emailer = emailer
}
// Set up the HTTP Notifier, if configured
if app.Config.Httpnotifier.Url != "" {
log.Info("Configuring HTTP notifier")
httpnotifier, err := NewHttpNotifier(app)
if err != nil {
log.Criticalf("Cannot configure HTTP notifier: %v", err)
return err
}
app.HttpNotifier = httpnotifier
}
return nil
}
func startNotifiers(app *ApplicationContext) {
// Do not proceed until we get the Zookeeper lock
err := app.NotifierLock.Lock()
if err != nil {
log.Criticalf("Cannot get ZK notifier lock: %v", err)
os.Exit(1)
}
log.Info("Acquired Zookeeper notifier lock")
if app.Emailer != nil {
log.Info("Starting Email notifier")
app.Emailer.Start()
}
if app.HttpNotifier != nil {
log.Info("Starting HTTP notifier")
app.HttpNotifier.Start()
}
}
func stopNotifiers(app *ApplicationContext) {
// Ignore errors on unlock - we're quitting anyways, and it might not be locked
app.NotifierLock.Unlock()
if app.Emailer != nil {
log.Info("Stopping Email notifier")
app.Emailer.Stop()
}
if app.HttpNotifier != nil {
log.Info("Stopping HTTP notifier")
app.HttpNotifier.Stop()<|fim▁hole|>// Why two mains? Golang doesn't let main() return, which means defers will not run.
// So we do everything in a separate main, that way we can easily exit out with an error code and still run defers
func burrowMain() int {
// The only command line arg is the config file
var cfgfile = flag.String("config", "burrow.cfg", "Full path to the configuration file")
flag.Parse()
// Load and validate the configuration
fmt.Fprintln(os.Stderr, "Reading configuration from", *cfgfile)
appContext := &ApplicationContext{Config: ReadConfig(*cfgfile)}
if err := ValidateConfig(appContext); err != nil {
log.Criticalf("Cannot validate configuration: %v", err)
return 1
}
// Create the PID file to lock out other processes. Defer removal so it's the last thing to go
createPidFile(appContext.Config.General.LogDir + "/" + appContext.Config.General.PIDFile)
defer removePidFile(appContext.Config.General.LogDir + "/" + appContext.Config.General.PIDFile)
// Set up stderr/stdout to go to a separate log file
openOutLog(appContext.Config.General.LogDir + "/burrow.out")
fmt.Println("Started Burrow at", time.Now().Format("January 2, 2006 at 3:04pm (MST)"))
// If a logging config is specified, replace the existing loggers
if appContext.Config.General.LogConfig != "" {
NewLogger(appContext.Config.General.LogConfig)
}
// Start a local Zookeeper client (used for application locks)
log.Info("Starting Zookeeper client")
zkconn, _, err := zk.Connect(appContext.Config.Zookeeper.Hosts, time.Duration(appContext.Config.Zookeeper.Timeout)*time.Second)
if err != nil {
log.Criticalf("Cannot start Zookeeper client: %v", err)
return 1
}
defer zkconn.Close()
// Start an offsets storage module
log.Info("Starting Offsets Storage module")
appContext.Storage, err = NewOffsetStorage(appContext)
if err != nil {
log.Criticalf("Cannot configure offsets storage module: %v", err)
return 1
}
defer appContext.Storage.Stop()
// Start an HTTP server
log.Info("Starting HTTP server")
appContext.Server, err = NewHttpServer(appContext)
if err != nil {
log.Criticalf("Cannot start HTTP server: %v", err)
return 1
}
defer appContext.Server.Stop()
// Start Kafka clients and Zookeepers for each cluster
appContext.Clusters = make(map[string]*KafkaCluster, len(appContext.Config.Kafka))
for cluster, _ := range appContext.Config.Kafka {
log.Infof("Starting Zookeeper client for cluster %s", cluster)
zkconn, err := NewZookeeperClient(appContext, cluster)
if err != nil {
log.Criticalf("Cannot start Zookeeper client for cluster %s: %v", cluster, err)
return 1
}
defer zkconn.Stop()
log.Infof("Starting Kafka client for cluster %s", cluster)
client, err := NewKafkaClient(appContext, cluster)
if err != nil {
log.Criticalf("Cannot start Kafka client for cluster %s: %v", cluster, err)
return 1
}
defer client.Stop()
appContext.Clusters[cluster] = &KafkaCluster{Client: client, Zookeeper: zkconn}
}
// Set up the Zookeeper lock for notification
appContext.NotifierLock = zk.NewLock(zkconn, appContext.Config.Zookeeper.LockPath, zk.WorldACL(zk.PermAll))
// Load the notifiers, but do not start them
err = loadNotifiers(appContext)
if err != nil {
// Error was already logged
return 1
}
// Notifiers are started in a goroutine if we get the ZK lock
go startNotifiers(appContext)
defer stopNotifiers(appContext)
// Register signal handlers for exiting
exitChannel := make(chan os.Signal, 1)
signal.Notify(exitChannel, syscall.SIGINT, syscall.SIGQUIT, syscall.SIGSTOP, syscall.SIGTERM)
// Wait until we're told to exit
<-exitChannel
log.Info("Shutdown triggered")
return 0
}
func main() {
rv := burrowMain()
if rv != 0 {
fmt.Println("Burrow failed at", time.Now().Format("January 2, 2006 at 3:04pm (MST)"))
} else {
fmt.Println("Stopped Burrow at", time.Now().Format("January 2, 2006 at 3:04pm (MST)"))
}
os.Exit(rv)
}<|fim▁end|> | }
}
|
<|file_name|>MetricUnionTransform.java<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2016, Salesforce.com, Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* 3. Neither the name of Salesforce.com nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
package com.salesforce.dva.argus.service.metric.transform;
import com.salesforce.dva.argus.entity.Metric;
import com.salesforce.dva.argus.system.SystemAssert;
import java.util.*;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* COUNT, GROUP, UNION.
*<|fim▁hole|> //~ Static fields/initializers *******************************************************************************************************************
/** The metric name for this transform is result. */
public static final String RESULT_METRIC_NAME = "result";
//~ Instance fields ******************************************************************************************************************************
private final ValueReducer valueUnionReducer;
private final String defaultScope;
private final String defaultMetricName;
//~ Constructors *********************************************************************************************************************************
/**
* Creates a new ReduceTransform object.
*
* @param valueUnionReducer valueReducerOrMapping The valueMapping.
*/
protected MetricUnionTransform(ValueReducer valueUnionReducer) {
this.defaultScope = TransformFactory.Function.UNION.name();
this.defaultMetricName = TransformFactory.DEFAULT_METRIC_NAME;
this.valueUnionReducer = valueUnionReducer;
}
//~ Methods **************************************************************************************************************************************
@Override
public String getResultScopeName() {
return defaultScope;
}
/**
* If constants is not null, apply mapping transform to metrics list. Otherwise, apply reduce transform to metrics list
*
* @param metrics The metrics to transform.
*
* @return The transformed metrics.
*/
@Override
public List<Metric> transform(List<Metric> metrics) {
return union(metrics);
}
/**
* Performs a columnar union of metrics.
*
* @param metrics The metrics to merge.
*
* @return The merged metrics.
*/
public List<Metric> union(List<Metric> metrics) {
SystemAssert.requireArgument(metrics != null, "Cannot transform empty metric/metrics");
if (metrics.isEmpty()) {
return metrics;
}
Metric newMetric = reduce(metrics);
Map<Long, String> reducedDatapoints = newMetric.getDatapoints();
Set<Long> sharedTimestamps = reducedDatapoints.keySet();
Map<Long, String> unionDatapoints = new TreeMap<Long, String>();
for (Metric metric : metrics) {
for (Map.Entry<Long, String> entry : metric.getDatapoints().entrySet()) {
if (!sharedTimestamps.contains(entry.getKey())) {
unionDatapoints.put(entry.getKey(), entry.getValue());
}
}
}
newMetric.addDatapoints(unionDatapoints);
return Arrays.asList(newMetric);
}
/**
* Reduce transform for the list of metrics.
*
* @param metrics The list of metrics to reduce.
*
* @return The reduced metric.
*/
protected Metric reduce(List<Metric> metrics) {
SystemAssert.requireArgument(metrics != null, "Cannot transform empty metric/metrics");
/*
* if (metrics.isEmpty()) { return new Metric(defaultScope, defaultMetricName); }
*/
MetricDistiller distiller = new MetricDistiller();
distiller.distill(metrics);
Map<Long, List<String>> collated = collate(metrics);
Map<Long, String> minDatapoints = reduce(collated, metrics);
String newMetricName = distiller.getMetric() == null ? defaultMetricName : distiller.getMetric();
Metric newMetric = new Metric(defaultScope, newMetricName);
newMetric.setDisplayName(distiller.getDisplayName());
newMetric.setUnits(distiller.getUnits());
newMetric.setTags(distiller.getTags());
newMetric.setDatapoints(minDatapoints);
return newMetric;
}
private Map<Long, List<String>> collate(List<Metric> metrics) {
Map<Long, List<String>> collated = new HashMap<Long, List<String>>();
for (Metric metric : metrics) {
for (Map.Entry<Long, String> point : metric.getDatapoints().entrySet()) {
if (!collated.containsKey(point.getKey())) {
collated.put(point.getKey(), new ArrayList<String>());
}
collated.get(point.getKey()).add(point.getValue());
}
}
return collated;
}
private Map<Long, String> reduce(Map<Long, List<String>> collated, List<Metric> metrics) {
Map<Long, String> reducedDatapoints = new HashMap<>();
for (Map.Entry<Long, List<String>> entry : collated.entrySet()) {
if (entry.getValue().size() < metrics.size()) {
continue;
}
reducedDatapoints.put(entry.getKey(), this.valueUnionReducer.reduce(entry.getValue()));
}
return reducedDatapoints;
}
@Override
public List<Metric> transform(List<Metric> metrics, List<String> constants) {
throw new UnsupportedOperationException("Union transform can't be used with constants!");
}
@Override
public List<Metric> transform(List<Metric>... listOfList) {
throw new UnsupportedOperationException("Union doesn't need list of list");
}
}
/* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */<|fim▁end|> | * @author rzhang
*/
public class MetricUnionTransform implements Transform {
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from toyz.web import app<|fim▁hole|><|fim▁end|> | from toyz.web import tasks |
<|file_name|>types.d.ts<|end_file_name|><|fim▁begin|>export type Mixed = {};
export type Maybe<T> = T | null | undefined;
export type Constructor<T = Mixed> = new (...args: any[]) => T;
export type HTMLElementClass = typeof HTMLElement;
export type ComponentProps<T, E = HTMLElementClass> = { [P in keyof T]: PropOptions<T[P]> };
// NOTE:
// - all classes are just ambient definitions (opaque types like), so consumer cannot use them directly
// - inferring generics work only on instances, not on implementation type. So this will not give you type safety, you still have to manually annotate those props in your code
/**
* Implement this interface for any @skatejs/renderer-*
*/
export interface Renderer<O> {
renderer(root: Node | Element | ShadowRoot, html: (props?: Mixed, state?: Mixed) => O): void;
}
export interface WithComponent<P = Mixed, S = Mixed, C = Mixed>
extends CustomElement,
WithChildren,
WithLifecycle,
WithRenderer,
WithUpdate<P, S>,
WithContext<C> {}
export declare class WithComponent<P = Mixed, S = Mixed, C = Mixed> extends HTMLElement {
static readonly observedAttributes: string[];
}
// Custom Elements v1
export class CustomElement extends HTMLElement {
static readonly observedAttributes: string[];
connectedCallback(): void;
disconnectedCallback(): void;
attributeChangedCallback(name: string, oldValue: null | string, newValue: null | string): void;
adoptedCallback?(): void;
}
export declare class WithChildren extends HTMLElement {
childrenUpdated(): void;
}
export declare class WithLifecycle extends HTMLElement {
connecting(): void;
connected(): void;
disconnecting(): void;
disconnected(): void;
}
export declare class WithContext<C = Mixed> extends HTMLElement {
context: C;
}
export declare class WithRenderer<O = Mixed | null> extends HTMLElement implements Renderer<O> {
// getter for turning of ShadowDOM
readonly renderRoot?: this | Mixed;
updated(props?: Mixed, state?: Mixed): void;
// called before render
rendering?(): void;
render(props?: Mixed, state?: Mixed): O;
// Default renderer, returns string returned from render and adds it to root via innerHTML
// -> override to get own renderer
renderer(root: Element | Node | ShadowRoot, html: (props?: Mixed | undefined) => O): void;
<|fim▁hole|>export declare class WithUpdate<P = Mixed, S = Mixed> extends HTMLElement {
// Special hack for own components type checking.
// It works in combination with ElementAttributesProperty. It placed in jsx.d.ts.
// more detail, see: https://www.typescriptlang.org/docs/handbook/jsx.html
// and https://github.com/skatejs/skatejs/pull/952#issuecomment-264500153
readonly props: Readonly<P>;
state: S;
// Called when props have been set regardless of if they've changed. much like React's componentWillReceiveProps().
updating(props?: P, state?: S): void;
// Called to check whether or not the component should call updated(), much like React's shouldComponentUpdate().
shouldUpdate(props?: P, state?: S): boolean;
// Called if shouldUpdate returned true, much like React's componentWillUpdate()
updated(props?: Mixed, state?: Mixed): void;
// manually force update
triggerUpdate(): void;
}
export interface PropOptions<T = any> {
attribute?: PropOptionsAttribute;
coerce?: (value: any) => Maybe<T>;
default?: T;
deserialize?: (value: string | null) => Maybe<T>;
serialize?: (value: Maybe<T>) => string | null;
}
export type PropOptionsAttribute = PropOptionsAttributeIdentifier | PropOptionsAttributeIdentifierMap;
export type PropOptionsAttributeIdentifier = boolean | string;
export type PropOptionsAttributeIdentifierMap = {
source?: PropOptionsAttributeIdentifier;
target?: PropOptionsAttributeIdentifier;
};
export interface EventOptions extends CustomEventInit {
composed?: boolean;
}
export interface ComposedCustomEvent extends CustomEvent {
composed?: boolean;
composedPath?: () => Array<Node>;
}<|fim▁end|> | // called after render
rendered?(): void;
}
|
<|file_name|>test_utils.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Author: François Rossigneux <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from climate import tests<|fim▁hole|>
class DBUtilsTestCase(tests.TestCase):
"""Test case for DB Utils."""
pass<|fim▁end|> | |
<|file_name|>checker_test.go<|end_file_name|><|fim▁begin|>/*
Copyright 2017 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package policy
import (
"strings"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"k8s.io/apiserver/pkg/apis/audit"
"k8s.io/apiserver/pkg/authentication/user"
"k8s.io/apiserver/pkg/authorization/authorizer"
)
var (
tim = &user.DefaultInfo{
Name: "[email protected]",
Groups: []string{"humans", "developers"},
}
attrs = map[string]authorizer.Attributes{
"namespaced": &authorizer.AttributesRecord{
User: tim,
Verb: "get",
Namespace: "default",
APIGroup: "", // Core
APIVersion: "v1",
Resource: "pods",
Name: "busybox",
ResourceRequest: true,
Path: "/api/v1/namespaces/default/pods/busybox",
},
"cluster": &authorizer.AttributesRecord{
User: tim,
Verb: "get",
APIGroup: "rbac.authorization.k8s.io", // Core
APIVersion: "v1beta1",
Resource: "clusterroles",
Name: "edit",
ResourceRequest: true,
Path: "/apis/rbac.authorization.k8s.io/v1beta1/clusterroles/edit",
},
"nonResource": &authorizer.AttributesRecord{
User: tim,
Verb: "get",
ResourceRequest: false,
Path: "/logs/kubelet.log",
},
"subresource": &authorizer.AttributesRecord{
User: tim,
Verb: "get",
Namespace: "default",
APIGroup: "", // Core
APIVersion: "v1",
Resource: "pods",
Subresource: "log",
Name: "busybox",
ResourceRequest: true,
Path: "/api/v1/namespaces/default/pods/busybox",
},
"Unauthorized": &authorizer.AttributesRecord{
Verb: "get",
Namespace: "default",
APIGroup: "", // Core
APIVersion: "v1",
Resource: "pods",
Name: "busybox",
ResourceRequest: true,
Path: "/api/v1/namespaces/default/pods/busybox",
},
}
rules = map[string]audit.PolicyRule{
"default": {
Level: audit.LevelMetadata,
},
"create": {
Level: audit.LevelRequest,
Verbs: []string{"create"},
},
"tims": {
Level: audit.LevelMetadata,
Users: []string{"[email protected]"},
},
"humans": {
Level: audit.LevelMetadata,
UserGroups: []string{"humans"},
},
"serviceAccounts": {
Level: audit.LevelRequest,
UserGroups: []string{"system:serviceaccounts"},
},
"getPods": {
Level: audit.LevelRequestResponse,
Verbs: []string{"get"},
Resources: []audit.GroupResources{{Resources: []string{"pods"}}},
},
"getPodLogs": {
Level: audit.LevelRequest,
Verbs: []string{"get"},
Resources: []audit.GroupResources{{Resources: []string{"pods/log"}}},
},
"getPodWildcardMatching": {
Level: audit.LevelRequest,
Verbs: []string{"get"},
Resources: []audit.GroupResources{{Resources: []string{"*"}}},
},
"getPodResourceWildcardMatching": {
Level: audit.LevelRequest,
Verbs: []string{"get"},
Resources: []audit.GroupResources{{Resources: []string{"*/log"}}},
},
"getPodSubResourceWildcardMatching": {
Level: audit.LevelRequest,
Verbs: []string{"get"},
Resources: []audit.GroupResources{{Resources: []string{"pods/*"}}},
},
"getClusterRoles": {
Level: audit.LevelRequestResponse,
Verbs: []string{"get"},
Resources: []audit.GroupResources{{
Group: "rbac.authorization.k8s.io",
Resources: []string{"clusterroles"},
}},
Namespaces: []string{""},
},
"getLogs": {
Level: audit.LevelRequestResponse,
Verbs: []string{"get"},
NonResourceURLs: []string{
"/logs*",
},
},
"getMetrics": {
Level: audit.LevelRequest,
Verbs: []string{"get"},
NonResourceURLs: []string{
"/metrics",
},
},
"clusterRoleEdit": {
Level: audit.LevelRequest,
Resources: []audit.GroupResources{{
Group: "rbac.authorization.k8s.io",
Resources: []string{"clusterroles"},
ResourceNames: []string{"edit"},
}},
},
"omit RequestReceived": {
Level: audit.LevelRequest,
OmitStages: []audit.Stage{
audit.StageRequestReceived,
},
},
"only audit panic": {
Level: audit.LevelRequest,
OmitStages: []audit.Stage{
audit.StageRequestReceived,
audit.StageResponseStarted,
audit.StageResponseComplete,
},
},
}
)
func test(t *testing.T, req string, expLevel audit.Level, policyStages, expOmitStages []audit.Stage, ruleNames ...string) {
policy := audit.Policy{OmitStages: policyStages}
for _, rule := range ruleNames {
require.Contains(t, rules, rule)
policy.Rules = append(policy.Rules, rules[rule])
}
require.Contains(t, attrs, req)
actualLevel, actualOmitStages := NewPolicyRuleEvaluator(&policy).LevelAndStages(attrs[req])
assert.Equal(t, expLevel, actualLevel, "request:%s rules:%s", req, strings.Join(ruleNames, ","))
assert.True(t, stageEqual(expOmitStages, actualOmitStages), "request:%s rules:%s, expected stages: %v, actual stages: %v",
req, strings.Join(ruleNames, ","), expOmitStages, actualOmitStages)
}
func testAuditLevel(t *testing.T, stages []audit.Stage) {
test(t, "namespaced", audit.LevelMetadata, stages, stages, "default")
test(t, "namespaced", audit.LevelNone, stages, stages, "create")
test(t, "namespaced", audit.LevelMetadata, stages, stages, "tims")
test(t, "namespaced", audit.LevelMetadata, stages, stages, "humans")
test(t, "namespaced", audit.LevelNone, stages, stages, "serviceAccounts")
test(t, "namespaced", audit.LevelRequestResponse, stages, stages, "getPods")
test(t, "namespaced", audit.LevelNone, stages, stages, "getClusterRoles")
test(t, "namespaced", audit.LevelNone, stages, stages, "getLogs")
test(t, "namespaced", audit.LevelNone, stages, stages, "getMetrics")
test(t, "namespaced", audit.LevelMetadata, stages, stages, "getMetrics", "serviceAccounts", "default")
test(t, "namespaced", audit.LevelRequestResponse, stages, stages, "getMetrics", "getPods", "default")
test(t, "namespaced", audit.LevelRequestResponse, stages, stages, "getPodLogs", "getPods")
test(t, "cluster", audit.LevelMetadata, stages, stages, "default")
test(t, "cluster", audit.LevelNone, stages, stages, "create")
test(t, "cluster", audit.LevelMetadata, stages, stages, "tims")
test(t, "cluster", audit.LevelMetadata, stages, stages, "humans")
test(t, "cluster", audit.LevelNone, stages, stages, "serviceAccounts")
test(t, "cluster", audit.LevelNone, stages, stages, "getPods")
test(t, "cluster", audit.LevelRequestResponse, stages, stages, "getClusterRoles")
test(t, "cluster", audit.LevelRequest, stages, stages, "clusterRoleEdit", "getClusterRoles")
test(t, "cluster", audit.LevelNone, stages, stages, "getLogs")
test(t, "cluster", audit.LevelNone, stages, stages, "getMetrics")
test(t, "cluster", audit.LevelMetadata, stages, stages, "getMetrics", "serviceAccounts", "default")
test(t, "cluster", audit.LevelRequestResponse, stages, stages, "getMetrics", "getClusterRoles", "default")
test(t, "cluster", audit.LevelNone, stages, stages, "getPodLogs", "getPods")
test(t, "nonResource", audit.LevelMetadata, stages, stages, "default")
test(t, "nonResource", audit.LevelNone, stages, stages, "create")
test(t, "nonResource", audit.LevelMetadata, stages, stages, "tims")
test(t, "nonResource", audit.LevelMetadata, stages, stages, "humans")
test(t, "nonResource", audit.LevelNone, stages, stages, "serviceAccounts")
test(t, "nonResource", audit.LevelNone, stages, stages, "getPods")
test(t, "nonResource", audit.LevelNone, stages, stages, "getClusterRoles")
test(t, "nonResource", audit.LevelRequestResponse, stages, stages, "getLogs")
test(t, "nonResource", audit.LevelNone, stages, stages, "getMetrics")
test(t, "nonResource", audit.LevelMetadata, stages, stages, "getMetrics", "serviceAccounts", "default")
test(t, "nonResource", audit.LevelRequestResponse, stages, stages, "getLogs", "getClusterRoles", "default")
test(t, "nonResource", audit.LevelNone, stages, stages, "getPodLogs", "getPods")
test(t, "subresource", audit.LevelRequest, stages, stages, "getPodLogs", "getPods")
test(t, "subresource", audit.LevelRequest, stages, stages, "getPodWildcardMatching")
test(t, "subresource", audit.LevelRequest, stages, stages, "getPodResourceWildcardMatching")
test(t, "subresource", audit.LevelRequest, stages, stages, "getPodSubResourceWildcardMatching")
test(t, "Unauthorized", audit.LevelNone, stages, stages, "tims")
test(t, "Unauthorized", audit.LevelMetadata, stages, stages, "tims", "default")
test(t, "Unauthorized", audit.LevelNone, stages, stages, "humans")
test(t, "Unauthorized", audit.LevelMetadata, stages, stages, "humans", "default")
}
func TestChecker(t *testing.T) {
testAuditLevel(t, nil)
// test omitStages pre rule
test(t, "namespaced", audit.LevelRequest, nil, []audit.Stage{audit.StageRequestReceived}, "omit RequestReceived", "getPods", "default")
test(t, "namespaced", audit.LevelRequest, nil, []audit.Stage{audit.StageRequestReceived, audit.StageResponseStarted, audit.StageResponseComplete}, "only audit panic", "getPods", "default")
test(t, "cluster", audit.LevelRequest, nil, []audit.Stage{audit.StageRequestReceived}, "omit RequestReceived", "getPods", "default")
test(t, "cluster", audit.LevelRequest, nil, []audit.Stage{audit.StageRequestReceived, audit.StageResponseStarted, audit.StageResponseComplete}, "only audit panic", "getPods", "default")
test(t, "nonResource", audit.LevelRequest, nil, []audit.Stage{audit.StageRequestReceived}, "omit RequestReceived", "getPods", "default")
test(t, "nonResource", audit.LevelRequest, nil, []audit.Stage{audit.StageRequestReceived, audit.StageResponseStarted, audit.StageResponseComplete}, "only audit panic", "getPods", "default")
}
func TestCheckerPolicyOmitStages(t *testing.T) {
policyStages := []audit.Stage{audit.StageRequestReceived, audit.StageResponseStarted}
testAuditLevel(t, policyStages)
// test omitStages policy wide
test(t, "namespaced", audit.LevelRequest, policyStages, []audit.Stage{audit.StageRequestReceived, audit.StageResponseStarted}, "omit RequestReceived", "getPods", "default")
test(t, "namespaced", audit.LevelRequest, policyStages, []audit.Stage{audit.StageRequestReceived, audit.StageResponseStarted, audit.StageResponseComplete}, "only audit panic", "getPods", "default")
test(t, "cluster", audit.LevelRequest, policyStages, []audit.Stage{audit.StageRequestReceived, audit.StageResponseStarted}, "omit RequestReceived", "getPods", "default")
test(t, "cluster", audit.LevelRequest, policyStages, []audit.Stage{audit.StageRequestReceived, audit.StageResponseStarted, audit.StageResponseComplete}, "only audit panic", "getPods", "default")
test(t, "nonResource", audit.LevelMetadata, policyStages, []audit.Stage{audit.StageRequestReceived, audit.StageResponseStarted}, "default", "omit RequestReceived", "getPods")
test(t, "nonResource", audit.LevelRequest, policyStages, []audit.Stage{audit.StageRequestReceived, audit.StageResponseStarted, audit.StageResponseComplete}, "only audit panic", "getPods", "default")
}
// stageEqual returns true if s1 and s2 are super set of each other
func stageEqual(s1, s2 []audit.Stage) bool {
m1 := make(map[audit.Stage]bool)
m2 := make(map[audit.Stage]bool)
for _, s := range s1 {
m1[s] = true
}
for _, s := range s2 {
m2[s] = true
}
if len(m1) != len(m2) {
return false
}
for key, value := range m1 {
if m2[key] != value {
return false
}
}<|fim▁hole|>func TestUnionStages(t *testing.T) {
var testCases = []struct {
s1, s2, exp []audit.Stage
}{
{
[]audit.Stage{},
[]audit.Stage{},
[]audit.Stage{},
},
{
[]audit.Stage{audit.StageRequestReceived},
[]audit.Stage{},
[]audit.Stage{audit.StageRequestReceived},
},
{
[]audit.Stage{audit.StageRequestReceived},
[]audit.Stage{audit.StageRequestReceived},
[]audit.Stage{audit.StageRequestReceived},
},
{
[]audit.Stage{audit.StageRequestReceived},
[]audit.Stage{audit.StageResponseStarted},
[]audit.Stage{audit.StageRequestReceived, audit.StageResponseStarted},
},
{
[]audit.Stage{audit.StageRequestReceived, audit.StageRequestReceived},
[]audit.Stage{audit.StageRequestReceived, audit.StageRequestReceived},
[]audit.Stage{audit.StageRequestReceived},
},
{
[]audit.Stage{audit.StageRequestReceived, audit.StageResponseStarted},
[]audit.Stage{audit.StagePanic, audit.StageRequestReceived},
[]audit.Stage{audit.StageRequestReceived, audit.StageResponseStarted, audit.StagePanic},
},
{
nil,
[]audit.Stage{audit.StageRequestReceived},
[]audit.Stage{audit.StageRequestReceived},
},
}
for _, tc := range testCases {
result := unionStages(tc.s1, tc.s2)
assert.Len(t, result, len(tc.exp))
for _, expStage := range tc.exp {
ok := false
for _, resultStage := range result {
if resultStage == expStage {
ok = true
break
}
}
assert.True(t, ok)
}
}
}<|fim▁end|> | return true
}
|
<|file_name|>writer.go<|end_file_name|><|fim▁begin|>package proxy
import (
"bytes"
"net/http"
)
type Response interface {
Status() int
Header() http.Header
Body() []byte
}
type WriterRecorder struct {<|fim▁hole|> http.ResponseWriter
status int
body bytes.Buffer
}
func (w *WriterRecorder) WriteHeader(status int) {
w.ResponseWriter.WriteHeader(status)
w.status = status
}
func (w *WriterRecorder) Write(body []byte) (n int, err error) {
if n, err := w.body.Write(body); err != nil {
return n, err
}
return w.ResponseWriter.Write(body)
}
func (w *WriterRecorder) Body() []byte {
return w.body.Bytes()
}
func (w *WriterRecorder) Status() int {
if w.status == 0 {
return 200
}
return w.status
}<|fim▁end|> | |
<|file_name|>roles-modal.js<|end_file_name|><|fim▁begin|>define(function(require, exports, module) {
var Notify = require('common/bootstrap-notify');
exports.run = function() {
var $form = $("#user-roles-form"),
isTeacher = $form.find('input[value=ROLE_TEACHER]').prop('checked'),
currentUser = $form.data('currentuser'),
editUser = $form.data('edituser');
if (currentUser == editUser) {
$form.find('input[value=ROLE_SUPER_ADMIN]').attr('disabled', 'disabled');
};
$form.find('input[value=ROLE_USER]').on('change', function(){
if ($(this).prop('checked') === false) {
$(this).prop('checked', true);
var user_name = $('#change-user-roles-btn').data('user') ;
Notify.info('用户必须拥有'+user_name+'角色');
}
});
$form.on('submit', function() {
var roles = [];
var $modal = $('#modal');
$form.find('input[name="roles[]"]:checked').each(function(){
roles.push($(this).val());
});
if ($.inArray('ROLE_USER', roles) < 0) {
var user_name = $('#change-user-roles-btn').data('user') ;
Notify.danger('用户必须拥有'+user_name+'角色');
return false;
}
if (isTeacher && $.inArray('ROLE_TEACHER', roles) < 0) {
if (!confirm('取消该用户的教师角色,同时将收回该用户所有教授的课程的教师权限。您真的要这么做吗?')) {
return false;
}
}
$form.find('input[value=ROLE_SUPER_ADMIN]').removeAttr('disabled');
$('#change-user-roles-btn').button('submiting').addClass('disabled');
$.post($form.attr('action'), $form.serialize(), function(html) {
$modal.modal('hide');
Notify.success('用户组保存成功');
var $tr = $(html);
$('#' + $tr.attr('id')).replaceWith($tr);
}).error(function(){
Notify.danger('操作失败');
});<|fim▁hole|> };
});<|fim▁end|> |
return false;
});
|
<|file_name|>i18n.js<|end_file_name|><|fim▁begin|>// i18n.js
App.i18n = {
strings: {},
loaded: false,
setLanguage: function(languageCode, callback) {
this.languageCode = languageCode;
App.localStorage.set('selected_interface_locale', languageCode);
this.loadStrings(callback);
},
getLanguage: function(languageCode) {
return this.languageCode;
},
translate: function(string, stringId) {
if (typeof(stringId) == 'undefined')
stringId = string;
if (typeof(this.strings[stringId]) === 'undefined' || this.strings[stringId] === false || this.strings[stringId] === '')
return string;
else
return this.strings[stringId];
},
translateDOM: function() {
var that = this;
$("[data-i18n]").each(function() {
var string = $(this).data('i18n');
string = that.translate(string);
$(this).text(string);
});
$("[data-i18nvalue]").each(function() {
var string = $(this).data('i18nvalue');
string = that.translate(string);<|fim▁hole|> string = that.translate(string);
$(this).attr('placeholder', string);
});
},
loadStrings: function(callback) {
var that = this;
var process = function(data) {
that.strings = data;
that.loaded = true;
that.translateDOM();
if (typeof(callback) == 'function')
callback();
};
this.loaded = false;
if (this.languageCode == 'default')
process({});
else
$.ajax({
url: App.settings.apiEntryPoint + 'i18n/bycode/' + this.languageCode.split('-').join(''),
data: {},
success: process,
dataType: 'json',
mimeType: 'application/json',
cache: true
});
}
};<|fim▁end|> | $(this).val(string);
});
$("[data-i18nplaceholder]").each(function() {
var string = $(this).data('i18nplaceholder'); |
<|file_name|>User.java<|end_file_name|><|fim▁begin|>package com.zhanghedr.mvc;
/**
* Created by zhanghedr on 1/2/17.
*/
public class User {
private String name;
private String email;
public User(String name, String email) {
this.name = name;
this.email = email;
}
public String getName() {
return name;
}<|fim▁hole|> this.name = name;
}
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email;
}
}<|fim▁end|> |
public void setName(String name) { |
<|file_name|>test_sdist.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""sdist tests"""
import os
import shutil
import sys
import tempfile
import unittest
import urllib
import unicodedata
import posixpath
from StringIO import StringIO
from setuptools.command.sdist import sdist
from setuptools.command.egg_info import manifest_maker
from setuptools.dist import Distribution
SETUP_ATTRS = {
'name': 'sdist_test',
'version': '0.0',
'packages': ['sdist_test'],
'package_data': {'sdist_test': ['*.txt']}
}
SETUP_PY = """\
from setuptools import setup
setup(**%r)
""" % SETUP_ATTRS
if sys.version_info >= (3,):
LATIN1_FILENAME = 'smörbröd.py'.encode('latin-1')
else:
LATIN1_FILENAME = 'sm\xf6rbr\xf6d.py'
# Cannot use context manager because of Python 2.4
def quiet():
global old_stdout, old_stderr
old_stdout, old_stderr = sys.stdout, sys.stderr
sys.stdout, sys.stderr = StringIO(), StringIO()
def unquiet():
sys.stdout, sys.stderr = old_stdout, old_stderr
# Fake byte literals to shut up Python <= 2.5
def b(s, encoding='utf-8'):
if sys.version_info >= (3,):
return s.encode(encoding)
return s
# HFS Plus returns decomposed UTF-8
def decompose(path):
if isinstance(path, unicode):
return unicodedata.normalize('NFD', path)
try:
path = path.decode('utf-8')
path = unicodedata.normalize('NFD', path)
path = path.encode('utf-8')
except UnicodeError:
pass # Not UTF-8
return path
# HFS Plus quotes unknown bytes like so: %F6
def hfs_quote(path):
if isinstance(path, unicode):
raise TypeError('bytes are required')
try:
u = path.decode('utf-8')
except UnicodeDecodeError:
path = urllib.quote(path) # Not UTF-8
else:
if sys.version_info >= (3,):
path = u
return path
class TestSdistTest(unittest.TestCase):
def setUp(self):
self.temp_dir = tempfile.mkdtemp()
f = open(os.path.join(self.temp_dir, 'setup.py'), 'w')
f.write(SETUP_PY)
f.close()
# Set up the rest of the test package
test_pkg = os.path.join(self.temp_dir, 'sdist_test')
os.mkdir(test_pkg)
# *.rst was not included in package_data, so c.rst should not be
# automatically added to the manifest when not under version control
for fname in ['__init__.py', 'a.txt', 'b.txt', 'c.rst']:
# Just touch the files; their contents are irrelevant
open(os.path.join(test_pkg, fname), 'w').close()
self.old_cwd = os.getcwd()
os.chdir(self.temp_dir)
def tearDown(self):
os.chdir(self.old_cwd)
shutil.rmtree(self.temp_dir)
def test_package_data_in_sdist(self):
"""Regression test for pull request #4: ensures that files listed in
package_data are included in the manifest even if they're not added to
version control.
"""
dist = Distribution(SETUP_ATTRS)
dist.script_name = 'setup.py'
cmd = sdist(dist)
cmd.ensure_finalized()
# squelch output
quiet()
try:
cmd.run()
finally:
unquiet()
manifest = cmd.filelist.files
self.assertTrue(os.path.join('sdist_test', 'a.txt') in manifest)
self.assertTrue(os.path.join('sdist_test', 'b.txt') in manifest)<|fim▁hole|>
def test_manifest_is_written_with_utf8_encoding(self):
# Test for #303.
dist = Distribution(SETUP_ATTRS)
dist.script_name = 'setup.py'
mm = manifest_maker(dist)
mm.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt')
os.mkdir('sdist_test.egg-info')
# UTF-8 filename
filename = posixpath.join('sdist_test', 'smörbröd.py')
# Add UTF-8 filename and write manifest
quiet()
try:
mm.run()
mm.filelist.files.append(filename)
mm.write_manifest()
finally:
unquiet()
manifest = open(mm.manifest, 'rbU')
contents = manifest.read()
manifest.close()
# The manifest should be UTF-8 encoded
try:
u = contents.decode('UTF-8')
except UnicodeDecodeError, e:
self.fail(e)
# The manifest should contain the UTF-8 filename
if sys.version_info >= (3,):
self.assertTrue(filename in u)
else:
self.assertTrue(filename in contents)
def test_manifest_is_written_with_surrogateescape_error_handler(self):
# Test for #303.
dist = Distribution(SETUP_ATTRS)
dist.script_name = 'setup.py'
mm = manifest_maker(dist)
mm.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt')
os.mkdir('sdist_test.egg-info')
# Latin-1 filename
filename = posixpath.join(b('sdist_test'), LATIN1_FILENAME)
# Add filename with surrogates and write manifest
quiet()
try:
mm.run()
if sys.version_info >= (3,):
u = filename.decode('utf-8', 'surrogateescape')
mm.filelist.files.append(u)
else:
mm.filelist.files.append(filename)
mm.write_manifest()
finally:
unquiet()
manifest = open(mm.manifest, 'rbU')
contents = manifest.read()
manifest.close()
# The manifest should contain the Latin-1 filename
self.assertTrue(filename in contents)
def test_manifest_is_read_with_utf8_encoding(self):
# Test for #303.
dist = Distribution(SETUP_ATTRS)
dist.script_name = 'setup.py'
cmd = sdist(dist)
cmd.ensure_finalized()
# UTF-8 filename
filename = os.path.join('sdist_test', 'smörbröd.py')
open(filename, 'w').close()
quiet()
try:
cmd.run()
finally:
unquiet()
# The filelist should contain the UTF-8 filename
if sys.platform == 'darwin':
filename = decompose(filename)
self.assertTrue(filename in cmd.filelist.files)
def test_manifest_is_read_with_surrogateescape_error_handler(self):
# Test for #303.
# This is hard to test on HFS Plus because it quotes unknown
# bytes (see previous test). Furthermore, egg_info.FileList
# only appends filenames that os.path.exist.
# We therefore write the manifest file by hand and check whether
# read_manifest produces a UnicodeDecodeError.
dist = Distribution(SETUP_ATTRS)
dist.script_name = 'setup.py'
cmd = sdist(dist)
cmd.ensure_finalized()
filename = os.path.join(b('sdist_test'), LATIN1_FILENAME)
quiet()
try:
cmd.run()
# Add Latin-1 filename to manifest
cmd.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt')
manifest = open(cmd.manifest, 'ab')
manifest.write(filename+b('\n'))
manifest.close()
# Re-read manifest
try:
cmd.read_manifest()
except UnicodeDecodeError, e:
self.fail(e)
finally:
unquiet()
def test_sdist_with_utf8_encoded_filename(self):
# Test for #303.
dist = Distribution(SETUP_ATTRS)
dist.script_name = 'setup.py'
cmd = sdist(dist)
cmd.ensure_finalized()
# UTF-8 filename
filename = os.path.join(b('sdist_test'), b('smörbröd.py'))
open(filename, 'w').close()
quiet()
try:
cmd.run()
finally:
unquiet()
# The filelist should contain the UTF-8 filename
# (in one representation or other)
if sys.version_info >= (3,):
filename = filename.decode(sys.getfilesystemencoding(), 'surrogateescape')
if sys.platform == 'darwin':
filename = decompose(filename)
self.assertTrue(filename in cmd.filelist.files)
def test_sdist_with_latin1_encoded_filename(self):
# Test for #303.
dist = Distribution(SETUP_ATTRS)
dist.script_name = 'setup.py'
cmd = sdist(dist)
cmd.ensure_finalized()
# Latin-1 filename
filename = os.path.join(b('sdist_test'), LATIN1_FILENAME)
open(filename, 'w').close()
quiet()
try:
cmd.run()
finally:
unquiet()
# The filelist should contain the Latin-1 filename
# (in one representation or other)
if sys.platform == 'darwin':
filename = hfs_quote(filename)
elif sys.version_info >= (3,):
filename = filename.decode(sys.getfilesystemencoding(), 'surrogateescape')
self.assertTrue(filename in cmd.filelist.files)
def test_decompose(self):
self.assertNotEqual('smörbröd.py', decompose('smörbröd.py'))
if sys.version_info >= (3,):
self.assertEqual(len('smörbröd.py'), 11)
self.assertEqual(len(decompose('smörbröd.py')), 13)
else:
self.assertEqual(len('smörbröd.py'), 13)
self.assertEqual(len(decompose('smörbröd.py')), 15)
def test_hfs_quote(self):
self.assertEqual(hfs_quote(LATIN1_FILENAME), 'sm%F6rbr%F6d.py')
# Bytes are required
if sys.version_info >= (3,):
self.assertRaises(TypeError, hfs_quote, 'smörbröd.py')
else:
self.assertRaises(TypeError, hfs_quote, 'smörbröd.py'.decode('utf-8'))
def test_suite():
return unittest.defaultTestLoader.loadTestsFromName(__name__)<|fim▁end|> | self.assertTrue(os.path.join('sdist_test', 'c.rst') not in manifest) |
<|file_name|>test_api.py<|end_file_name|><|fim▁begin|>""" Tests for OAuth Dispatch python API module. """
import unittest
from django.conf import settings
from django.http import HttpRequest
from django.test import TestCase
from oauth2_provider.models import AccessToken
from common.djangoapps.student.tests.factories import UserFactory
OAUTH_PROVIDER_ENABLED = settings.FEATURES.get('ENABLE_OAUTH2_PROVIDER')
if OAUTH_PROVIDER_ENABLED:
from openedx.core.djangoapps.oauth_dispatch import api
from openedx.core.djangoapps.oauth_dispatch.adapters import DOTAdapter
from openedx.core.djangoapps.oauth_dispatch.tests.constants import DUMMY_REDIRECT_URL
EXPECTED_DEFAULT_EXPIRES_IN = 36000
@unittest.skipUnless(OAUTH_PROVIDER_ENABLED, 'OAuth2 not enabled')
class TestOAuthDispatchAPI(TestCase):
""" Tests for oauth_dispatch's api.py module. """
def setUp(self):
super().setUp()
self.adapter = DOTAdapter()
self.user = UserFactory()
self.client = self.adapter.create_public_client(
name='public app',
user=self.user,
redirect_uri=DUMMY_REDIRECT_URL,
client_id='public-client-id',
)
def _assert_stored_token(self, stored_token_value, expected_token_user, expected_client):
stored_access_token = AccessToken.objects.get(token=stored_token_value)
assert stored_access_token.user.id == expected_token_user.id
assert stored_access_token.application.client_id == expected_client.client_id
assert stored_access_token.application.user.id == expected_client.user.id
def test_create_token_success(self):
token = api.create_dot_access_token(HttpRequest(), self.user, self.client)
assert token['access_token']
assert token['refresh_token']<|fim▁hole|> 'scope': '',
},
token,
)
self._assert_stored_token(token['access_token'], self.user, self.client)
def test_create_token_another_user(self):
another_user = UserFactory()
token = api.create_dot_access_token(HttpRequest(), another_user, self.client)
self._assert_stored_token(token['access_token'], another_user, self.client)
def test_create_token_overrides(self):
expires_in = 4800
token = api.create_dot_access_token(
HttpRequest(), self.user, self.client, expires_in=expires_in, scopes=['profile'],
)
self.assertDictContainsSubset({'scope': 'profile'}, token)
self.assertDictContainsSubset({'expires_in': expires_in}, token)<|fim▁end|> | self.assertDictContainsSubset(
{
'token_type': 'Bearer',
'expires_in': EXPECTED_DEFAULT_EXPIRES_IN, |
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import, unicode_literals
from django.conf.urls import url
from django.core.urlresolvers import RegexURLResolver
from django.http import Http404
from wagtail.wagtailcore.models import Page
from wagtail.wagtailcore.url_routing import RouteResult
_creation_counter = 0<|fim▁hole|>
def route(pattern, name=None):
def decorator(view_func):
global _creation_counter
_creation_counter += 1
# Make sure page has _routablepage_routes attribute
if not hasattr(view_func, '_routablepage_routes'):
view_func._routablepage_routes = []
# Add new route to view
view_func._routablepage_routes.append((
url(pattern, view_func, name=(name or view_func.__name__)),
_creation_counter,
))
return view_func
return decorator
class RoutablePageMixin(object):
"""
This class can be mixed in to a Page model, allowing extra routes to be
added to it.
"""
@classmethod
def get_subpage_urls(cls):
routes = []
for attr in dir(cls):
val = getattr(cls, attr, None)
if hasattr(val, '_routablepage_routes'):
routes.extend(val._routablepage_routes)
return tuple([
route[0]
for route in sorted(routes, key=lambda route: route[1])
])
@classmethod
def get_resolver(cls):
if '_routablepage_urlresolver' not in cls.__dict__:
subpage_urls = cls.get_subpage_urls()
cls._routablepage_urlresolver = RegexURLResolver(r'^/', subpage_urls)
return cls._routablepage_urlresolver
def reverse_subpage(self, name, args=None, kwargs=None):
"""
This method takes a route name/arguments and returns a URL path.
"""
args = args or []
kwargs = kwargs or {}
return self.get_resolver().reverse(name, *args, **kwargs)
def resolve_subpage(self, path):
"""
This method takes a URL path and finds the view to call.
"""
view, args, kwargs = self.get_resolver().resolve(path)
# Bind the method
view = view.__get__(self, type(self))
return view, args, kwargs
def route(self, request, path_components):
"""
This hooks the subpage URLs into Wagtail's routing.
"""
if self.live:
try:
path = '/'
if path_components:
path += '/'.join(path_components) + '/'
view, args, kwargs = self.resolve_subpage(path)
return RouteResult(self, args=(view, args, kwargs))
except Http404:
pass
return super(RoutablePageMixin, self).route(request, path_components)
def serve(self, request, view=None, args=None, kwargs=None):
if args is None:
args = []
if kwargs is None:
kwargs = {}
if view is None:
return super(RoutablePageMixin, self).serve(request, *args, **kwargs)
return view(request, *args, **kwargs)
def serve_preview(self, request, mode_name):
view, args, kwargs = self.resolve_subpage('/')
request.is_preview = True
return view(request, *args, **kwargs)
class RoutablePage(RoutablePageMixin, Page):
"""
This class extends Page by adding methods which allows extra routes to be
added to it.
"""
class Meta:
abstract = True<|fim▁end|> | |
<|file_name|>iterable.py<|end_file_name|><|fim▁begin|>import copy
from django import forms
from django.db import models
from django.core.exceptions import ValidationError, ImproperlyConfigured
from django.db.models.fields.subclassing import Creator
from djangae.forms.fields import ListFormField
from django.utils.text import capfirst
class _FakeModel(object):
"""
An object of this class can pass itself off as a model instance
when used as an arguments to Field.pre_save method (item_fields
of iterable fields are not actually fields of any model).
"""
def __init__(self, field, value):
setattr(self, field.attname, value)
class IterableField(models.Field):
__metaclass__ = models.SubfieldBase
@property
def _iterable_type(self): raise NotImplementedError()
def db_type(self, connection):
return 'list'
def get_prep_lookup(self, lookup_type, value):
if hasattr(value, 'prepare'):
return value.prepare()
if hasattr(value, '_prepare'):
return value._prepare()
if value is None:
raise ValueError("You can't query an iterable field with None")
if lookup_type == 'isnull' and value in (True, False):
return value
if lookup_type != 'exact' and lookup_type != 'in':
raise ValueError("You can only query using exact and in lookups on iterable fields")
if isinstance(value, (list, set)):
return [ self.item_field_type.to_python(x) for x in value ]
return self.item_field_type.to_python(value)
def get_prep_value(self, value):
if value is None:
raise ValueError("You can't set a {} to None (did you mean {}?)".format(
self.__class__.__name__, str(self._iterable_type())
))
if isinstance(value, basestring):
# Catch accidentally assigning a string to a ListField
raise ValueError("Tried to assign a string to a {}".format(self.__class__.__name__))
return super(IterableField, self).get_prep_value(value)
def __init__(self, item_field_type, *args, **kwargs):
# This seems bonkers, we shout at people for specifying null=True, but then do it ourselves. But this is because
# *we* abuse None values for our own purposes (to represent an empty iterable) if someone else tries to then
# all hell breaks loose
if kwargs.get("null", False):
raise RuntimeError("IterableFields cannot be set as nullable (as the datastore doesn't differentiate None vs []")
kwargs["null"] = True
default = kwargs.get("default", [])
self._original_item_field_type = copy.deepcopy(item_field_type) # For deconstruction purposes
if default is not None and not callable(default):
kwargs["default"] = lambda: self._iterable_type(default)
if hasattr(item_field_type, 'attname'):
item_field_type = item_field_type.__class__
if callable(item_field_type):
item_field_type = item_field_type()
if isinstance(item_field_type, models.ForeignKey):
raise ImproperlyConfigured("Lists of ForeignKeys aren't supported, use RelatedSetField instead")
self.item_field_type = item_field_type
# We'll be pretending that item_field is a field of a model
# with just one "value" field.
assert not hasattr(self.item_field_type, 'attname')
self.item_field_type.set_attributes_from_name('value')
super(IterableField, self).__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super(IterableField, self).deconstruct()
args = (self._original_item_field_type,)
del kwargs["null"]
return name, path, args, kwargs
def contribute_to_class(self, cls, name):
self.item_field_type.model = cls
self.item_field_type.name = name
super(IterableField, self).contribute_to_class(cls, name)
# If items' field uses SubfieldBase we also need to.
item_metaclass = getattr(self.item_field_type, '__metaclass__', None)
if item_metaclass and issubclass(item_metaclass, models.SubfieldBase):
setattr(cls, self.name, Creator(self))
def _map(self, function, iterable, *args, **kwargs):
return self._iterable_type(function(element, *args, **kwargs) for element in iterable)
def to_python(self, value):<|fim▁hole|> # Because a set cannot be defined in JSON, we must allow a list to be passed as the value
# of a SetField, as otherwise SetField data can't be loaded from fixtures
if not hasattr(value, "__iter__"): # Allows list/set, not string
raise ValueError("Tried to assign a {} to a {}".format(value.__class__.__name__, self.__class__.__name__))
return self._map(self.item_field_type.to_python, value)
def pre_save(self, model_instance, add):
"""
Gets our value from the model_instance and passes its items
through item_field's pre_save (using a fake model instance).
"""
value = getattr(model_instance, self.attname)
if value is None:
return None
return self._map(lambda item: self.item_field_type.pre_save(_FakeModel(self.item_field_type, item), add), value)
def get_db_prep_value(self, value, connection, prepared=False):
if not prepared:
value = self.get_prep_value(value)
if value is None:
return None
# If the value is an empty iterable, store None
if value == self._iterable_type([]):
return None
return self._map(self.item_field_type.get_db_prep_save, value,
connection=connection)
def get_db_prep_lookup(self, lookup_type, value, connection,
prepared=False):
"""
Passes the value through get_db_prep_lookup of item_field.
"""
return self.item_field_type.get_db_prep_lookup(
lookup_type, value, connection=connection, prepared=prepared)
def validate(self, value_list, model_instance):
""" We want to override the default validate method from django.db.fields.Field, because it
is only designed to deal with a single choice from the user.
"""
if not self.editable:
# Skip validation for non-editable fields
return
# Validate choices
if self.choices:
valid_values = []
for choice in self.choices:
if isinstance(choice[0], (list, tuple)):
# this is an optgroup, so look inside it for the options
for optgroup_choice in choice[0]:
valid_values.append(optgroup_choice[0])
else:
valid_values.append(choice[0])
for value in value_list:
if value not in valid_values:
# TODO: if there is more than 1 invalid value then this should show all of the invalid values
raise ValidationError(self.error_messages['invalid_choice'] % value)
# Validate null-ness
if value_list is None and not self.null:
raise ValidationError(self.error_messages['null'])
if not self.blank and not value_list:
raise ValidationError(self.error_messages['blank'])
# apply the default items validation rules
for value in value_list:
self.item_field_type.clean(value, model_instance)
def formfield(self, **kwargs):
""" If this field has choices, then we can use a multiple choice field.
NB: The choices must be set on *this* field, e.g. this_field = ListField(CharField(), choices=x)
as opposed to: this_field = ListField(CharField(choices=x))
"""
#Largely lifted straight from Field.formfield() in django.models.__init__.py
defaults = {'required': not self.blank, 'label': capfirst(self.verbose_name), 'help_text': self.help_text}
if self.has_default(): #No idea what this does
if callable(self.default):
defaults['initial'] = self.default
defaults['show_hidden_initial'] = True
else:
defaults['initial'] = self.get_default()
if self.choices:
form_field_class = forms.MultipleChoiceField
defaults['choices'] = self.get_choices(include_blank=False) #no empty value on a multi-select
else:
form_field_class = ListFormField
defaults.update(**kwargs)
return form_field_class(**defaults)
class ListField(IterableField):
def __init__(self, *args, **kwargs):
self.ordering = kwargs.pop('ordering', None)
if self.ordering is not None and not callable(self.ordering):
raise TypeError("'ordering' has to be a callable or None, "
"not of type %r." % type(self.ordering))
super(ListField, self).__init__(*args, **kwargs)
def pre_save(self, model_instance, add):
value = super(ListField, self).pre_save(model_instance, add)
if value and self.ordering:
value.sort(key=self.ordering)
return value
@property
def _iterable_type(self):
return list
def deconstruct(self):
name, path, args, kwargs = super(ListField, self).deconstruct()
kwargs['ordering'] = self.ordering
return name, path, args, kwargs
class SetField(IterableField):
@property
def _iterable_type(self):
return set
def db_type(self, connection):
return 'set'
def get_db_prep_save(self, *args, **kwargs):
ret = super(SetField, self).get_db_prep_save(*args, **kwargs)
if ret:
ret = list(ret)
return ret
def get_db_prep_lookup(self, *args, **kwargs):
ret = super(SetField, self).get_db_prep_lookup(*args, **kwargs)
if ret:
ret = list(ret)
return ret
def value_to_string(self, obj):
"""
Custom method for serialization, as JSON doesn't support
serializing sets.
"""
return str(list(self._get_val_from_obj(obj)))<|fim▁end|> | if value is None:
return self._iterable_type([])
|
<|file_name|>models.py<|end_file_name|><|fim▁begin|>"""
Example
-------
class SystemSetting(KVModel):<|fim▁hole|>setting = SystemSetting.create(key='foo', value=100)
loaded_setting = SystemSetting.get_by_key('foo')
"""
from django.db import models
from .fields import SerializableField
class KVModel(models.Model):
"""
An Abstract model that has key and value fields
key -- Unique CharField of max_length 255
value -- SerializableField by default could be used to store bool, int,
float, str, list, dict and date
"""
key = models.CharField(max_length=255, unique=True)
value = SerializableField(blank=True, null=True)
def __unicode__(self):
return 'KVModel instance: ' + self.key + ' = ' + unicode(self.value)
@classmethod
def get_by_key(cls, key):
"""
A static method that returns a KVModel instance.
key -- unique key that is used for the search.
this method will throw a DoesNotExist exception if an object with the
key provided is not found.
"""
return cls.objects.get(key=key)
class Meta:
abstract = True<|fim▁end|> | pass
|
<|file_name|>local.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
'''
Local settings
- Run in Debug mode
- Use console backend for emails
- Add Django Debug Toolbar
- Add django-extensions as app
'''
import os
from .common import * # noqa
# DEBUG
# ------------------------------------------------------------------------------
DEBUG = env.bool('DJANGO_DEBUG', default=True)
TEMPLATES[0]['OPTIONS']['debug'] = DEBUG
# SECRET CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#secret-key
# Note: This key only used for development and testing.<|fim▁hole|>EMAIL_HOST = 'localhost'
EMAIL_PORT = 1025
# CACHING
# ------------------------------------------------------------------------------
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': ''
}
}
# django-debug-toolbar
# ------------------------------------------------------------------------------
MIDDLEWARE_CLASSES += ('debug_toolbar.middleware.DebugToolbarMiddleware',)
INSTALLED_APPS += ('debug_toolbar', )
INTERNAL_IPS = ('127.0.0.1', '10.0.2.2',)
DEBUG_TOOLBAR_CONFIG = {
'DISABLE_PANELS': [
'debug_toolbar.panels.redirects.RedirectsPanel',
],
'SHOW_TEMPLATE_CONTEXT': True,
}
# django-extensions
# ------------------------------------------------------------------------------
INSTALLED_APPS += ('django_extensions', )
# TESTING
# ------------------------------------------------------------------------------
TEST_RUNNER = 'django.test.runner.DiscoverRunner'
# Your local stuff: Below this line define 3rd party library settings
STRIPE_PUBLIC_KEY = os.environ.get("STRIPE_PUBLIC_KEY", "pk_test_4XMRbU6H6Jf5B2TXmICnvXS7")
STRIPE_SECRET_KEY = os.environ.get("STRIPE_SECRET_KEY", "sk_test_4XMRnH3aMfrhHN1nZO2uzcDE")
DJSTRIPE_PLANS = {
"monthly": {
"stripe_plan_id": "pro-monthly",
"name": "Web App Pro ($24.99/month)",
"description": "The monthly subscription plan to WebApp",
"price": 2499, # $24.99
"currency": "usd",
"interval": "month"
},
"yearly": {
"stripe_plan_id": "pro-yearly",
"name": "Web App Pro ($199/year)",
"description": "The annual subscription plan to WebApp",
"price": 19900, # $199.00
"currency": "usd",
"interval": "year"
}
}<|fim▁end|> | SECRET_KEY = env("DJANGO_SECRET_KEY", default='CHANGEME!!!')
# Mail settings
# ------------------------------------------------------------------------------ |
<|file_name|>facebook_fetcher.py<|end_file_name|><|fim▁begin|>import re
import datetime
import dateutil.parser
from django.conf import settings
from django.utils import feedgenerator
from django.utils.html import linebreaks
from apps.social.models import MSocialServices
from apps.reader.models import UserSubscription
from utils import log as logging
from vendor.facebook import GraphAPIError
class FacebookFetcher:
def __init__(self, feed, options=None):
self.feed = feed
self.options = options or {}
def fetch(self):
page_name = self.extract_page_name()
if not page_name:
return
facebook_user = self.facebook_user()
if not facebook_user:
return
# If 'video', use video API to get embed:
# f.get_object('tastyvegetarian', fields='posts')
# f.get_object('1992797300790726', fields='embed_html')
feed = self.fetch_page_feed(facebook_user, page_name, 'name,about,posts,videos,photos')
data = {}
data['title'] = feed.get('name', "%s on Facebook" % page_name)
data['link'] = feed.get('link', "https://facebook.com/%s" % page_name)
data['description'] = feed.get('about', "%s on Facebook" % page_name)
data['lastBuildDate'] = datetime.datetime.utcnow()
data['generator'] = 'NewsBlur Facebook API Decrapifier - %s' % settings.NEWSBLUR_URL
data['docs'] = None
data['feed_url'] = self.feed.feed_address
rss = feedgenerator.Atom1Feed(**data)
merged_data = []
<|fim▁hole|> story_data = self.page_posts_story(facebook_user, post)
if not story_data:
continue
merged_data.append(story_data)
videos = feed.get('videos', {}).get('data', None)
if videos:
for video in videos:
story_data = self.page_video_story(facebook_user, video)
if not story_data:
continue
for seen_data in merged_data:
if story_data['link'] == seen_data['link']:
# Video wins over posts (and attachments)
seen_data['description'] = story_data['description']
seen_data['title'] = story_data['title']
break
for story_data in merged_data:
rss.add_item(**story_data)
return rss.writeString('utf-8')
def extract_page_name(self):
page = None
try:
page_groups = re.search('facebook.com/(\w+)/?', self.feed.feed_address)
if not page_groups:
return
page = page_groups.group(1)
except IndexError:
return
return page
def facebook_user(self):
facebook_api = None
social_services = None
if self.options.get('requesting_user_id', None):
social_services = MSocialServices.get_user(self.options.get('requesting_user_id'))
facebook_api = social_services.facebook_api()
if not facebook_api:
logging.debug(u' ***> [%-30s] ~FRFacebook fetch failed: %s: No facebook API for %s' %
(self.feed.log_title[:30], self.feed.feed_address, self.options))
return
else:
usersubs = UserSubscription.objects.filter(feed=self.feed)
if not usersubs:
logging.debug(u' ***> [%-30s] ~FRFacebook fetch failed: %s: No subscriptions' %
(self.feed.log_title[:30], self.feed.feed_address))
return
for sub in usersubs:
social_services = MSocialServices.get_user(sub.user_id)
if not social_services.facebook_uid:
continue
facebook_api = social_services.facebook_api()
if not facebook_api:
continue
else:
break
if not facebook_api:
logging.debug(u' ***> [%-30s] ~FRFacebook fetch failed: %s: No facebook API for %s' %
(self.feed.log_title[:30], self.feed.feed_address, usersubs[0].user.username))
return
return facebook_api
def fetch_page_feed(self, facebook_user, page, fields):
try:
stories = facebook_user.get_object(page, fields=fields)
except GraphAPIError, e:
message = str(e).lower()
if 'session has expired' in message:
logging.debug(u' ***> [%-30s] ~FRFacebook page failed/expired, disconnecting facebook: %s: %s' %
(self.feed.log_title[:30], self.feed.feed_address, e))
self.feed.save_feed_history(560, "Facebook Error: Expired token")
return {}
if not stories:
return {}
return stories
def page_posts_story(self, facebook_user, page_story):
categories = set()
if 'message' not in page_story:
# Probably a story shared on the page's timeline, not a published story
return
message = linebreaks(page_story['message'])
created_date = page_story['created_time']
if isinstance(created_date, unicode):
created_date = dateutil.parser.parse(created_date)
fields = facebook_user.get_object(page_story['id'], fields='permalink_url,link,attachments')
permalink = fields.get('link', fields['permalink_url'])
attachments_html = ""
if fields.get('attachments', None) and fields['attachments']['data']:
for attachment in fields['attachments']['data']:
if 'media' in attachment:
attachments_html += "<img src=\"%s\" />" % attachment['media']['image']['src']
if attachment.get('subattachments', None):
for subattachment in attachment['subattachments']['data']:
attachments_html += "<img src=\"%s\" />" % subattachment['media']['image']['src']
content = """<div class="NB-facebook-rss">
<div class="NB-facebook-rss-message">%s</div>
<div class="NB-facebook-rss-picture">%s</div>
</div>""" % (
message,
attachments_html
)
story = {
'title': message,
'link': permalink,
'description': content,
'categories': list(categories),
'unique_id': "fb_post:%s" % page_story['id'],
'pubdate': created_date,
}
return story
def page_video_story(self, facebook_user, page_story):
categories = set()
if 'description' not in page_story:
return
message = linebreaks(page_story['description'])
created_date = page_story['updated_time']
if isinstance(created_date, unicode):
created_date = dateutil.parser.parse(created_date)
permalink = facebook_user.get_object(page_story['id'], fields='permalink_url')['permalink_url']
embed_html = facebook_user.get_object(page_story['id'], fields='embed_html')
if permalink.startswith('/'):
permalink = "https://www.facebook.com%s" % permalink
content = """<div class="NB-facebook-rss">
<div class="NB-facebook-rss-message">%s</div>
<div class="NB-facebook-rss-embed">%s</div>
</div>""" % (
message,
embed_html.get('embed_html', '')
)
story = {
'title': page_story.get('story', message),
'link': permalink,
'description': content,
'categories': list(categories),
'unique_id': "fb_post:%s" % page_story['id'],
'pubdate': created_date,
}
return story
def favicon_url(self):
page_name = self.extract_page_name()
facebook_user = self.facebook_user()
if not facebook_user:
logging.debug(u' ***> [%-30s] ~FRFacebook icon failed, disconnecting facebook: %s' %
(self.feed.log_title[:30], self.feed.feed_address))
return
try:
picture_data = facebook_user.get_object(page_name, fields='picture')
except GraphAPIError, e:
message = str(e).lower()
if 'session has expired' in message:
logging.debug(u' ***> [%-30s] ~FRFacebook icon failed/expired, disconnecting facebook: %s: %s' %
(self.feed.log_title[:30], self.feed.feed_address, e))
return
if 'picture' in picture_data:
return picture_data['picture']['data']['url']<|fim▁end|> | posts = feed.get('posts', {}).get('data', None)
if posts:
for post in posts: |
<|file_name|>doom-one2.go<|end_file_name|><|fim▁begin|>package styles
import (<|fim▁hole|>)
// Doom One 2 style. Inspired by Atom One and Doom Emacs's Atom One theme
var DoomOne2 = Register(chroma.MustNewStyle("doom-one2", chroma.StyleEntries{
chroma.Text: "#b0c4de",
chroma.Error: "#b0c4de",
chroma.Comment: "italic #8a93a5",
chroma.CommentHashbang: "bold",
chroma.Keyword: "#76a9f9",
chroma.KeywordConstant: "#e5c07b",
chroma.KeywordType: "#e5c07b",
chroma.Operator: "#54b1c7",
chroma.OperatorWord: "bold #b756ff",
chroma.Punctuation: "#abb2bf",
chroma.Name: "#aa89ea",
chroma.NameAttribute: "#cebc3a",
chroma.NameBuiltin: "#e5c07b",
chroma.NameClass: "#ca72ff",
chroma.NameConstant: "bold",
chroma.NameDecorator: "#e5c07b",
chroma.NameEntity: "#bda26f",
chroma.NameException: "bold #fd7474",
chroma.NameFunction: "#00b1f7",
chroma.NameProperty: "#cebc3a",
chroma.NameLabel: "#f5a40d",
chroma.NameNamespace: "#ca72ff",
chroma.NameTag: "#76a9f9",
chroma.NameVariable: "#DCAEEA",
chroma.NameVariableClass: "#DCAEEA",
chroma.NameVariableGlobal: "bold #DCAEEA",
chroma.NameVariableInstance: "#e06c75",
chroma.NameVariableMagic: "#DCAEEA",
chroma.Literal: "#98c379",
chroma.LiteralDate: "#98c379",
chroma.Number: "#d19a66",
chroma.NumberBin: "#d19a66",
chroma.NumberFloat: "#d19a66",
chroma.NumberHex: "#d19a66",
chroma.NumberInteger: "#d19a66",
chroma.NumberIntegerLong: "#d19a66",
chroma.NumberOct: "#d19a66",
chroma.String: "#98c379",
chroma.StringAffix: "#98c379",
chroma.StringBacktick: "#98c379",
chroma.StringDelimiter: "#98c379",
chroma.StringDoc: "#7e97c3",
chroma.StringDouble: "#63c381",
chroma.StringEscape: "bold #d26464",
chroma.StringHeredoc: "#98c379",
chroma.StringInterpol: "#98c379",
chroma.StringOther: "#70b33f",
chroma.StringRegex: "#56b6c2",
chroma.StringSingle: "#98c379",
chroma.StringSymbol: "#56b6c2",
chroma.Generic: "#b0c4de",
chroma.GenericDeleted: "#b0c4de",
chroma.GenericEmph: "italic",
chroma.GenericHeading: "bold #a2cbff",
chroma.GenericInserted: "#a6e22e",
chroma.GenericOutput: "#a6e22e",
chroma.GenericUnderline: "underline",
chroma.GenericPrompt: "#a6e22e",
chroma.GenericStrong: "bold",
chroma.GenericSubheading: "#a2cbff",
chroma.GenericTraceback: "#a2cbff",
chroma.Background: "#b0c4de bg:#282c34",
}))<|fim▁end|> | "github.com/alecthomas/chroma" |
<|file_name|>import4.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.<|fim▁hole|>use zed::bar;
mod zed {
pub fn bar() { println!("bar"); }
}
pub fn main() { let _zed = 42; bar(); }<|fim▁end|> |
// run-pass
|
<|file_name|>RetreatCommand3HullMod.java<|end_file_name|><|fim▁begin|>package org.tc.autonomous;
<|fim▁hole|> super(3);
}
}<|fim▁end|> |
public class RetreatCommand3HullMod extends AbstractRetreatCommandHullMod {
public RetreatCommand3HullMod() {
|
<|file_name|>user.service.ts<|end_file_name|><|fim▁begin|>/**
* Created by fuzhihong on 16/10/14.<|fim▁hole|> */
import { Injectable } from '@angular/core';
import {Headers,Http,Response} from '@angular/http';
import {Subject} from 'rxjs/Subject';
import 'rxjs/add/operator/toPromise'
import {User} from './user'
@Injectable()
export class UserService{
private headers=new Headers({'Content-Type':'application/json'});
private userUrl='app/Users';
private loginedUser=new Subject<string>();
loginedUser$=this.loginedUser.asObservable();
LoginUser(user:string){
console.log(user)
this.loginedUser.next(user)
}
constructor(private http:Http){}
getUsers():Promise<User[]>{
return this.http.get(this.userUrl)
.toPromise()
.then(resp=>resp.json().data)
}
getUser(UserInfo):Promise<User>{
return this.getUsers()
.then(resp=>resp.find(user=>user.userName===UserInfo.userName))
}
signin(UserInfo):Promise<User>{
return this.http.post(this.userUrl,JSON.stringify({userName:UserInfo.userName,password:UserInfo.password}),{headers:this.headers})
.toPromise()
.then(res=>res.json().data)
}
login(UserInfo){
return this.getUser(UserInfo)
.then(user=>{if(UserInfo.password===user.password&&UserInfo.userName===user.userName){return true}else{return false}
})
}
}<|fim▁end|> | |
<|file_name|>rlptraits.rs<|end_file_name|><|fim▁begin|>// Copyright 2015, 2016 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Common RLP traits
use ::{DecoderError, UntrustedRlp};
use bytes::VecLike;
use rlpstream::RlpStream;
use elastic_array::ElasticArray1024;
/// Type is able to decode RLP.
pub trait Decoder: Sized {
/// Read a value from the RLP into a given type.
fn read_value<T, F>(&self, f: &F) -> Result<T, DecoderError>
where F: Fn(&[u8]) -> Result<T, DecoderError>;
/// Get underlying `UntrustedRLP` object.
fn as_rlp(&self) -> &UntrustedRlp;
/// Get underlying raw bytes slice.
fn as_raw(&self) -> &[u8];
}
/// RLP decodable trait
pub trait Decodable: Sized {
/// Decode a value from RLP bytes
fn decode<D>(decoder: &D) -> Result<Self, DecoderError> where D: Decoder;
}
/// Internal helper trait. Implement `Decodable` for custom types.
pub trait RlpDecodable: Sized {
/// Decode a value from RLP bytes
fn decode<D>(decoder: &D) -> Result<Self, DecoderError> where D: Decoder;
}
/// A view into RLP encoded data
pub trait View<'a, 'view>: Sized {
/// RLP prototype type
type Prototype;
/// Payload info type
type PayloadInfo;
/// Data type
type Data;
/// Item type
type Item;
/// Iterator type
type Iter;
/// Creates a new instance of `Rlp` reader
fn new(bytes: &'a [u8]) -> Self;
/// The raw data of the RLP as slice.
///
/// ```rust
/// extern crate rlp;
/// use rlp::*;
///
/// fn main () {
/// let data = vec![0xc8, 0x83, b'c', b'a', b't', 0x83, b'd', b'o', b'g'];
/// let rlp = Rlp::new(&data);
/// let dog = rlp.at(1).as_raw();
/// assert_eq!(dog, &[0x83, b'd', b'o', b'g']);
/// }
/// ```
fn as_raw(&'view self) -> &'a [u8];
/// Get the prototype of the RLP.
fn prototype(&self) -> Self::Prototype;
/// Get payload info.
fn payload_info(&self) -> Self::PayloadInfo;
/// Get underlieing data.
fn data(&'view self) -> Self::Data;
/// Returns number of RLP items.
///
/// ```rust
/// extern crate rlp;
/// use rlp::*;
///
/// fn main () {
/// let data = vec![0xc8, 0x83, b'c', b'a', b't', 0x83, b'd', b'o', b'g'];
/// let rlp = Rlp::new(&data);
/// assert_eq!(rlp.item_count(), 2);
/// let view = rlp.at(1);
/// assert_eq!(view.item_count(), 0);
/// }
/// ```
fn item_count(&self) -> usize;
/// Returns the number of bytes in the data, or zero if it isn't data.
///
/// ```rust
/// extern crate rlp;
/// use rlp::*;
///
/// fn main () {
/// let data = vec![0xc8, 0x83, b'c', b'a', b't', 0x83, b'd', b'o', b'g'];
/// let rlp = Rlp::new(&data);
/// assert_eq!(rlp.size(), 0);
/// let view = rlp.at(1);
/// assert_eq!(view.size(), 3);
/// }
/// ```
fn size(&self) -> usize;
/// Get view onto RLP-slice at index.
///
/// Caches offset to given index, so access to successive
/// slices is faster.
///
/// ```rust
/// extern crate rlp;
/// use rlp::*;
///
/// fn main () {
/// let data = vec![0xc8, 0x83, b'c', b'a', b't', 0x83, b'd', b'o', b'g'];
/// let rlp = Rlp::new(&data);
/// let dog: String = rlp.at(1).as_val();
/// assert_eq!(dog, "dog".to_string());
/// }
fn at(&'view self, index: usize) -> Self::Item;
/// No value
///
/// ```rust
/// extern crate rlp;
/// use rlp::*;
///
/// fn main () {
/// let data = vec![];
/// let rlp = Rlp::new(&data);
/// assert!(rlp.is_null());
/// }
/// ```
fn is_null(&self) -> bool;
/// Contains a zero-length string or zero-length list.
///
/// ```rust
/// extern crate rlp;
/// use rlp::*;
///
/// fn main () {
/// let data = vec![0xc0];
/// let rlp = Rlp::new(&data);
/// assert!(rlp.is_empty());
/// }
/// ```
fn is_empty(&self) -> bool;
/// List value
///
/// ```rust
/// extern crate rlp;
/// use rlp::*;
///
/// fn main () {
/// let data = vec![0xc8, 0x83, b'c', b'a', b't', 0x83, b'd', b'o', b'g'];
/// let rlp = Rlp::new(&data);
/// assert!(rlp.is_list());
/// }
/// ```
fn is_list(&self) -> bool;
/// String value
///
/// ```rust
/// extern crate rlp;
/// use rlp::*;
///
/// fn main () {
/// let data = vec![0xc8, 0x83, b'c', b'a', b't', 0x83, b'd', b'o', b'g'];
/// let rlp = Rlp::new(&data);
/// assert!(rlp.at(1).is_data());
/// }
/// ```
fn is_data(&self) -> bool;
/// Int value
///
/// ```rust
/// extern crate rlp;
/// use rlp::*;
///
/// fn main () {
/// let data = vec![0xc1, 0x10];
/// let rlp = Rlp::new(&data);
/// assert_eq!(rlp.is_int(), false);
/// assert_eq!(rlp.at(0).is_int(), true);
/// }
/// ```
fn is_int(&self) -> bool;
/// Get iterator over rlp-slices
///
/// ```rust
/// extern crate rlp;
/// use rlp::*;
///
/// fn main () {
/// let data = vec![0xc8, 0x83, b'c', b'a', b't', 0x83, b'd', b'o', b'g'];
/// let rlp = Rlp::new(&data);
/// let strings: Vec<String> = rlp.iter().map(| i | i.as_val()).collect();
/// }
/// ```
fn iter(&'view self) -> Self::Iter;
/// Decode data into an object
fn as_val<T>(&self) -> Result<T, DecoderError> where T: RlpDecodable;
/// Decode data at given list index into an object
fn val_at<T>(&self, index: usize) -> Result<T, DecoderError> where T: RlpDecodable;
}
/// Raw RLP encoder
pub trait Encoder {<|fim▁hole|> /// Write a value represented as bytes
fn emit_value<E: ByteEncodable>(&mut self, value: &E);
/// Write raw preencoded data to the output
fn emit_raw(&mut self, bytes: &[u8]) -> ();
}
/// Primitive data type encodable to RLP
pub trait ByteEncodable {
/// Serialize this object to given byte container
fn to_bytes<V: VecLike<u8>>(&self, out: &mut V);
/// Get size of serialised data in bytes
fn bytes_len(&self) -> usize;
}
/// Structure encodable to RLP. Implement this trait for
pub trait Encodable {
/// Append a value to the stream
fn rlp_append(&self, s: &mut RlpStream);
/// Get rlp-encoded bytes for this instance
fn rlp_bytes(&self) -> ElasticArray1024<u8> {
let mut s = RlpStream::new();
self.rlp_append(&mut s);
s.drain()
}
}
/// Encodable wrapper trait required to handle special case of encoding a &[u8] as string and not as list
pub trait RlpEncodable {
/// Append a value to the stream
fn rlp_append(&self, s: &mut RlpStream);
}
/// RLP encoding stream
pub trait Stream: Sized {
/// Initializes instance of empty `Stream`.
fn new() -> Self;
/// Initializes the `Stream` as a list.
fn new_list(len: usize) -> Self;
/// Apends value to the end of stream, chainable.
///
/// ```rust
/// extern crate rlp;
/// use rlp::*;
///
/// fn main () {
/// let mut stream = RlpStream::new_list(2);
/// stream.append(&"cat").append(&"dog");
/// let out = stream.out();
/// assert_eq!(out, vec![0xc8, 0x83, b'c', b'a', b't', 0x83, b'd', b'o', b'g']);
/// }
/// ```
fn append<'a, E>(&'a mut self, value: &E) -> &'a mut Self where E: RlpEncodable;
/// Declare appending the list of given size, chainable.
///
/// ```rust
/// extern crate rlp;
/// use rlp::*;
///
/// fn main () {
/// let mut stream = RlpStream::new_list(2);
/// stream.begin_list(2).append(&"cat").append(&"dog");
/// stream.append(&"");
/// let out = stream.out();
/// assert_eq!(out, vec![0xca, 0xc8, 0x83, b'c', b'a', b't', 0x83, b'd', b'o', b'g', 0x80]);
/// }
/// ```
fn begin_list(&mut self, len: usize) -> &mut Self;
/// Apends null to the end of stream, chainable.
///
/// ```rust
/// extern crate rlp;
/// use rlp::*;
///
/// fn main () {
/// let mut stream = RlpStream::new_list(2);
/// stream.append_empty_data().append_empty_data();
/// let out = stream.out();
/// assert_eq!(out, vec![0xc2, 0x80, 0x80]);
/// }
/// ```
fn append_empty_data(&mut self) -> &mut Self;
/// Appends raw (pre-serialised) RLP data. Use with caution. Chainable.
fn append_raw<'a>(&'a mut self, bytes: &[u8], item_count: usize) -> &'a mut Self;
/// Clear the output stream so far.
///
/// ```rust
/// extern crate rlp;
/// use rlp::*;
///
/// fn main () {
/// let mut stream = RlpStream::new_list(3);
/// stream.append(&"cat");
/// stream.clear();
/// stream.append(&"dog");
/// let out = stream.out();
/// assert_eq!(out, vec![0x83, b'd', b'o', b'g']);
/// }
fn clear(&mut self);
/// Returns true if stream doesnt expect any more items.
///
/// ```rust
/// extern crate rlp;
/// use rlp::*;
///
/// fn main () {
/// let mut stream = RlpStream::new_list(2);
/// stream.append(&"cat");
/// assert_eq!(stream.is_finished(), false);
/// stream.append(&"dog");
/// assert_eq!(stream.is_finished(), true);
/// let out = stream.out();
/// assert_eq!(out, vec![0xc8, 0x83, b'c', b'a', b't', 0x83, b'd', b'o', b'g']);
/// }
fn is_finished(&self) -> bool;
/// Get raw encoded bytes
fn as_raw(&self) -> &[u8];
/// Streams out encoded bytes.
///
/// panic! if stream is not finished.
fn out(self) -> Vec<u8>;
}
/// Trait for compressing and decompressing RLP by replacement of common terms.
pub trait Compressible: Sized {
/// Indicates the origin of RLP to be compressed.
type DataType;
/// Compress given RLP type using appropriate methods.
fn compress(&self, t: Self::DataType) -> ElasticArray1024<u8>;
/// Decompress given RLP type using appropriate methods.
fn decompress(&self, t: Self::DataType) -> ElasticArray1024<u8>;
}<|fim▁end|> | |
<|file_name|>BinaryNumberTest.cpp<|end_file_name|><|fim▁begin|>/*--------------------------------------------------------------------------*
BinaryNumberTest.cpp
This test file provides some examples of using WinUnit, by testing a
"BinaryNumber" class. The BinaryNumber class has two constructors:
one takes a string consisting of '1' and '0' characters; the other takes
an unsigned short. The class has NumericValue and StringValue properties,
which return counterparts to what the constructors take, except the string
value is normalized with leading zeros. See ExampleLib\BinaryNumber.h for
the "spec" of the class.
Things to note:
1. There is a one-to-one correspondence between the test file and the
class. If I had additional classes I would make an additional test
file for each.
2. There is exactly one .h/.cpp pair for each production class.
3. The test file name for the class is <class> + Test.cpp. This is a
convention that makes it easier to match up test files with
production classes.
4. The test names start with the name of the class, followed by the
name (or description, in the case of constructors) of the method(s)
<|fim▁hole|> being tested, followed by some statement of what's expected to be
shown by the test. This makes it easier to run related tests as a
group (using the winunit -p option).
5. The WIN_ASSERT* macros take optional printf-style format strings and
arguments. Note that here I've written them using the _T("") style
because I want to be able to build both with and without _UNICODE. If
you are only building with _UNICODE you can use wchar_t*.
6. I usually put a prefix on messages passed to the WIN_TRACE macro, such
as "INFO:" or "WARNING:", to help distinguish them from errors.
*--------------------------------------------------------------------------*/
#include <WinUnit.h>
#include "..\SampleLib\BinaryNumber.h"
#include "BinaryNumberTestDataProvider.h"
#include "BinaryNumberDataRow.h"
using namespace ExampleLib;
// This overload of WinUnit::ToString() allows the BinaryNumber object
// to be displayed correctly if it shows up in error messages. Implement this
// especially for objects that implement operator== and are compared using
// WIN_ASSERT_EQUAL (see below). If this were not implemented, everything
// would still work--you'd just see "[OBJECT]" in error messages containing
// a BinaryNumber object
template<>
const TCHAR* WinUnit::ToString(const BinaryNumber& object,
TCHAR buffer[], size_t bufferSize)
{
// This is because I want to be able to run both with and without _UNICODE,
// and my BinaryNumber string field is only non-Unicode. (%S means to use
// the opposite "wideness" from the version of the string function being
// called.)
#ifdef _UNICODE
wchar_t* formatString = L"%S";
#else
char* formatString = "%s";
#endif
::_sntprintf_s(buffer, bufferSize, _TRUNCATE, formatString, object.StringValue);
return buffer;
}
// This test demonstrates WIN_ASSERT_EQUAL and WIN_ASSERT_STRING_EQUAL. It's
// usually a good idea to add more details via message parameters to the assert.
// Note that messages are always expected to be of type TCHAR* (i.e. Unicode
// when _UNICODE is defined; non-Unicode otherwise). However, the first
// two arguments of WIN_ASSERT_STRING_EQUAL can be either wchar_t* or char*
// (but both must be the same).
BEGIN_TEST(BinaryNumberPlusAddsTwoNumbersCorrectly)
{
BinaryNumber sum = BinaryNumber("101") + BinaryNumber("110");
WIN_ASSERT_EQUAL(11, sum.NumericValue, _T("5 + 6 should be 11."));
WIN_ASSERT_STRING_EQUAL("0000000000001011", sum.StringValue, _T("Value is 11."));
}
END_TEST
// In order to use WIN_ASSERT_EQUAL on non-fundamental datatypes, operator=
// must be implemented for the object in question (which it is, here). If
// There were an failure here (which you can see by changing the lines so
// the two are not equivalent), the objects would be displayed using the
// ToString implementation above.
BEGIN_TEST(BinaryNumberConstructorsShouldBeEquivalent)
{
BinaryNumber bn1(7);
BinaryNumber bn2("111");
WIN_ASSERT_EQUAL(bn1, bn2);
}
END_TEST
// The following three tests demonstrate expected exceptions.
BEGIN_TEST(BinaryNumberStringConstructorOnlyAllowsOnesAndZeros)
{
WIN_ASSERT_THROWS(BinaryNumber("-100"), BinaryNumber::InvalidInputException,
_T("Only ones and zeros should be allowed."));
}
END_TEST
BEGIN_TEST(BinaryNumberStringConstructorDisallowsStringTooLong)
{
char longString[BinaryNumber::MaxStringValueLength + 2] = "";
memset(longString, '1', ARRAYSIZE(longString) - 1);
WIN_ASSERT_THROWS(BinaryNumber(longString),
BinaryNumber::InvalidInputException,
_T("BinaryNumber constructor should restrict length of string."));
}
END_TEST
BEGIN_TEST(BinaryNumberPlusRecognizesIntegerOverflow)
{
unsigned short s1 = USHRT_MAX / 2 + 1;
unsigned short s2 = s1;
BinaryNumber bn1(s1);
BinaryNumber bn2(s2);
WIN_ASSERT_THROWS(bn1 + bn2, BinaryNumber::IntegerOverflowException);
}
END_TEST
// Leading zeros are added to make all BinaryNumber string representations
// the same length. This test verifies one instance of that.
BEGIN_TEST(BinaryNumberStringConstructorShouldNormalizeWithLeadingZeroes)
{
BinaryNumber bn1("00000111");
BinaryNumber bn2("111");
WIN_ASSERT_EQUAL(bn1, bn2);
}
END_TEST
// Here we demonstrate that any way you construct a BinaryNumber of value
// zero, it ends up the same.
BEGIN_TEST(BinaryNumberConstructorsHandleZeroCorrectly)
{
BinaryNumber bn1("0");
WIN_ASSERT_EQUAL(0, bn1.NumericValue);
WIN_ASSERT_STRING_EQUAL("0000000000000000", bn1.StringValue);
BinaryNumber bn2((unsigned short)0);
WIN_ASSERT_EQUAL(bn1, bn2);
}
END_TEST
// ------------------------------------------------------------
// Tests that use fixture
// ------------------------------------------------------------
namespace
{
// Here's our "data provider"--a file local instance of a class that was
// designed to be used in this way. (See notes in *DataProvider.h.)
BinaryNumberTestDataProvider s_dataProvider;
// We first declare a fixture, then we can create SETUP and TEARDOWN functions,
// and refer to it as the second parameter of the BEGIN_TESTF macro.
// SETUP and TEARDOWN will be called before and after (respectively) running
// any test that uses the fixture.
FIXTURE(BinaryNumberTestDataFixture);
// This function opens the "data provider" object. Since that object is meant
// to read from a text file, this function prepares the full path to the text
// file and passes it to the data provider's "Open" method.
SETUP(BinaryNumberTestDataFixture)
{
// Get directory for test data. If the environment variable TestDir is set,
// use that. Otherwise use the current directory. Note that you can
// set environment variables on the WinUnit command line via the "--"
// option.
TCHAR buffer[MAX_PATH] = _T("");
bool testDirectoryVarSet = WinUnit::Environment::GetVariable(_T("TestDir"),
buffer, MAX_PATH);
if (!testDirectoryVarSet)
{
WIN_TRACE("INFO: Environment variable TestDir not set; "
"looking for BinaryNumberTestData.txt in current directory.\n");
DWORD charCount = GetCurrentDirectory(MAX_PATH, buffer);
// This macro should be used to verify the results of any WinAPI
// function that sets last error (i.e. in the documentation it tells
// you to call GetLastError for more error information). For the first
// argument, you pass an expression that should be true if the function
// succeeded. If it fails, it will show the system error message
// associated with the result of GetLastError(), as well as whatever
// message you passed in.
WIN_ASSERT_WINAPI_SUCCESS(charCount != 0 && charCount <= MAX_PATH,
_T("GetCurrentDirectory failed."));
}
// Append trailing backslash if necessary.
size_t directoryLength = _tcslen(buffer);
WIN_ASSERT_TRUE(directoryLength < MAX_PATH - 1,
_T("Directory name too long: %s."), buffer);
if (directoryLength > 0 && buffer[directoryLength - 1] != _T('\\'))
{
_tcsncat_s(buffer, ARRAYSIZE(buffer), _T("\\"), _TRUNCATE);
}
// Append filename.
WIN_ASSERT_ZERO(::_tcsncat_s(buffer, MAX_PATH,
_T("BinaryNumberTestData.txt"), _TRUNCATE));
// Finally, open the data provider.
s_dataProvider.Open(buffer);
}
TEARDOWN(BinaryNumberTestDataFixture)
{
s_dataProvider.Close();
}
// Here we're using the "data provider" (which reads lines from a file) to
// test multiple combinations.
BEGIN_TESTF(BinaryNumberStringConstructorTest, BinaryNumberTestDataFixture)
{
// The "data rows" (non-comment, non-empty lines in a file) contain a
// string value and a numeric value.
// We can use trace statements to make it easier to see on which row an
// error occurs, if any.
BinaryNumberDataRow row;
while (s_dataProvider.GetNextDataRow(row))
{
// Ensure that there was a "StringValue" field in the row. (Note
// that this value will be invalid/changed once you get the next
// data row, so use it in this loop or copy it.)
const char* stringValue = NULL;
WIN_ASSERT_TRUE(row.GetItem("StringValue", stringValue),
_T("\"StringValue\" field not found in row %d."), row.LineNumber);
// Ensure there was a "NumericValue" field found in the row.
unsigned int numericValue = 0;
WIN_ASSERT_TRUE(row.GetItem("NumericValue", numericValue),
_T("\"NumericValue\" field not found in row %d."), row.LineNumber);
// Writing out a trace statement makes it easier to tell which row
// a failure occurred on.
WIN_TRACE("INFO: Line %d: \"%s\" [%d].\n", row.LineNumber, stringValue, numericValue);
// Now we just want to make sure that the BinaryNumber constructed
// from the string value has the expected numeric value.
BinaryNumber binaryNumber(stringValue);
WIN_ASSERT_EQUAL(numericValue, binaryNumber.NumericValue,
_T("Data file, line %d."), row.LineNumber);
}
}
END_TESTF
// This test is the same as the previous one, except it's intended to test
// the numeric constructor.
BEGIN_TESTF(BinaryNumberNumericConstructorTest, BinaryNumberTestDataFixture)
{
BinaryNumberDataRow row;
while (s_dataProvider.GetNextDataRow(row))
{
const char* stringValue = NULL;
WIN_ASSERT_TRUE(row.GetItem("StringValue", stringValue),
_T("\"StringValue\" field not found in row %d."), row.LineNumber);
unsigned int numericValue = 0;
WIN_ASSERT_TRUE(row.GetItem("NumericValue", numericValue),
_T("\"NumericValue\" field not found in row %d."), row.LineNumber);
WIN_TRACE("INFO: Line %d: \"%s\" [%d].\n", row.LineNumber, stringValue, numericValue);
BinaryNumber bn1(numericValue);
// Since string values are normalized in the constructor, we can't
// just compare the string value of bn1 with the string value found
// in the file--we have to create another object using the string value
// and make sure they were both normalized to the same thing.
BinaryNumber bn2(stringValue);
WIN_ASSERT_STRING_EQUAL(bn1.StringValue, bn2.StringValue,
_T("(Data file, line %d."), row.LineNumber);
WIN_ASSERT_EQUAL(bn1, bn2, _T("Data file, line %d."), row.LineNumber);
}
}
END_TESTF
}<|fim▁end|> | |
<|file_name|>queue.js<|end_file_name|><|fim▁begin|>(function() {
if (typeof module === "undefined") self.queue = queue;
else module.exports = queue;
queue.version = "1.0.1";
function queue(parallelism) {
var queue = {},
active = 0, // number of in-flight deferrals
remaining = 0, // number of deferrals remaining
head, tail, // singly-linked list of deferrals
error = null,
results = [],
await = noop,
awaitAll;
if (arguments.length < 1) parallelism = Infinity;
queue.defer = function() {
if (!error) {
var node = arguments;
node.index = results.push(undefined) - 1;
if (tail) tail.next = node, tail = tail.next;
else head = tail = node;
++remaining;
pop();
}
return queue;
};
queue.await = function(f) {
await = f;
awaitAll = false;
if (!remaining) notify();
return queue;
};
queue.awaitAll = function(f) {
await = f;
awaitAll = true;
if (!remaining) notify();
return queue;
};
function pop() {
if (head && active < parallelism) {
var node = head,
f = node[0],
a = Array.prototype.slice.call(node, 1),
i = node.index;
if (head === tail) head = tail = null;
else head = head.next;
++active;
a.push(function(e, r) {
--active;
if (error != null) return;
if (e != null) {
// clearing remaining cancels subsequent callbacks
// clearing head stops queued tasks from being executed
// setting error ignores subsequent calls to defer
error = e;
remaining = results = head = tail = null;
notify();
} else {
results[i] = r;
if (--remaining) pop();
else notify();
}
});
f.apply(null, a);
}
}
function notify() {
if (error != null) await(error);
else if (awaitAll) await(null, results);
else await.apply(null, [null].concat(results));<|fim▁hole|> }
function noop() {}
})();<|fim▁end|> | }
return queue; |
<|file_name|>evaluator_test.go<|end_file_name|><|fim▁begin|>// Copyright 2015 PingCAP, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// See the License for the specific language governing permissions and
// limitations under the License.
package evaluator
import (
"testing"
"time"
"fmt"
. "github.com/insionng/yougam/libraries/pingcap/check"
"github.com/insionng/yougam/libraries/pingcap/tidb/ast"
"github.com/insionng/yougam/libraries/pingcap/tidb/model"
"github.com/insionng/yougam/libraries/pingcap/tidb/mysql"
"github.com/insionng/yougam/libraries/pingcap/tidb/parser"
"github.com/insionng/yougam/libraries/pingcap/tidb/parser/opcode"
"github.com/insionng/yougam/libraries/pingcap/tidb/sessionctx/variable"
"github.com/insionng/yougam/libraries/pingcap/tidb/util/charset"
"github.com/insionng/yougam/libraries/pingcap/tidb/util/mock"
"github.com/insionng/yougam/libraries/pingcap/tidb/util/testleak"
"github.com/insionng/yougam/libraries/pingcap/tidb/util/testutil"
"github.com/insionng/yougam/libraries/pingcap/tidb/util/types"
)
var _ = Suite(&testEvaluatorSuite{})
func TestT(t *testing.T) {
TestingT(t)
}
type testEvaluatorSuite struct {
}
func parseExpr(c *C, expr string) ast.ExprNode {
s, err := parser.ParseOneStmt("select "+expr, "", "")
c.Assert(err, IsNil)
stmt := s.(*ast.SelectStmt)
return stmt.Fields.Fields[0].Expr
}
type testCase struct {
exprStr string
resultStr string
}
func (s *testEvaluatorSuite) runTests(c *C, cases []testCase) {
ctx := mock.NewContext()
for _, ca := range cases {
expr := parseExpr(c, ca.exprStr)
val, err := Eval(ctx, expr)
c.Assert(err, IsNil)
valStr := fmt.Sprintf("%v", val.GetValue())
c.Assert(valStr, Equals, ca.resultStr, Commentf("for %s", ca.exprStr))
}
}
func (s *testEvaluatorSuite) TestBetween(c *C) {
defer testleak.AfterTest(c)()
cases := []testCase{
{exprStr: "1 between 2 and 3", resultStr: "0"},
{exprStr: "1 not between 2 and 3", resultStr: "1"},
}
s.runTests(c, cases)
}
func (s *testEvaluatorSuite) TestBinopComparison(c *C) {
defer testleak.AfterTest(c)()
ctx := mock.NewContext()
tbl := []struct {
lhs interface{}
op opcode.Op
rhs interface{}
result int64 // 0 for false, 1 for true
}{
// test EQ
{1, opcode.EQ, 2, 0},
{false, opcode.EQ, false, 1},
{false, opcode.EQ, true, 0},
{true, opcode.EQ, true, 1},
{true, opcode.EQ, false, 0},
{"1", opcode.EQ, true, 1},
{"1", opcode.EQ, false, 0},
// test NEQ
{1, opcode.NE, 2, 1},
{false, opcode.NE, false, 0},
{false, opcode.NE, true, 1},
{true, opcode.NE, true, 0},
{"1", opcode.NE, true, 0},
{"1", opcode.NE, false, 1},
// test GT, GE
{1, opcode.GT, 0, 1},
{1, opcode.GT, 1, 0},
{1, opcode.GE, 1, 1},
{3.14, opcode.GT, 3, 1},
{3.14, opcode.GE, 3.14, 1},
// test LT, LE
{1, opcode.LT, 2, 1},
{1, opcode.LT, 1, 0},
{1, opcode.LE, 1, 1},
}
for _, t := range tbl {
expr := &ast.BinaryOperationExpr{Op: t.op, L: ast.NewValueExpr(t.lhs), R: ast.NewValueExpr(t.rhs)}
v, err := Eval(ctx, expr)
c.Assert(err, IsNil)
val, err := v.ToBool()
c.Assert(err, IsNil)
c.Assert(val, Equals, t.result)
}
// test nil
nilTbl := []struct {
lhs interface{}
op opcode.Op
rhs interface{}
}{
{nil, opcode.EQ, nil},
{nil, opcode.EQ, 1},
{nil, opcode.NE, nil},
{nil, opcode.NE, 1},
{nil, opcode.LT, nil},
{nil, opcode.LT, 1},
{nil, opcode.LE, nil},
{nil, opcode.LE, 1},
{nil, opcode.GT, nil},
{nil, opcode.GT, 1},
{nil, opcode.GE, nil},
{nil, opcode.GE, 1},
}
for _, t := range nilTbl {
expr := &ast.BinaryOperationExpr{Op: t.op, L: ast.NewValueExpr(t.lhs), R: ast.NewValueExpr(t.rhs)}
v, err := Eval(ctx, expr)
c.Assert(err, IsNil)
c.Assert(v.Kind(), Equals, types.KindNull)
}
}
func (s *testEvaluatorSuite) TestBinopLogic(c *C) {
defer testleak.AfterTest(c)()
ctx := mock.NewContext()
tbl := []struct {
lhs interface{}
op opcode.Op
rhs interface{}
ret interface{}
}{
{nil, opcode.AndAnd, 1, nil},
{nil, opcode.AndAnd, 0, 0},
{nil, opcode.OrOr, 1, 1},
{nil, opcode.OrOr, 0, nil},
{nil, opcode.LogicXor, 1, nil},
{nil, opcode.LogicXor, 0, nil},
{1, opcode.AndAnd, 0, 0},
{1, opcode.AndAnd, 1, 1},
{1, opcode.OrOr, 0, 1},
{1, opcode.OrOr, 1, 1},
{0, opcode.OrOr, 0, 0},
{1, opcode.LogicXor, 0, 1},
{1, opcode.LogicXor, 1, 0},
{0, opcode.LogicXor, 0, 0},
{0, opcode.LogicXor, 1, 1},
}
for _, t := range tbl {
expr := &ast.BinaryOperationExpr{Op: t.op, L: ast.NewValueExpr(t.lhs), R: ast.NewValueExpr(t.rhs)}
v, err := Eval(ctx, expr)
c.Assert(err, IsNil)
switch x := t.ret.(type) {
case nil:
c.Assert(v.Kind(), Equals, types.KindNull)
case int:
c.Assert(v, testutil.DatumEquals, types.NewDatum(int64(x)))
}
}
}
func (s *testEvaluatorSuite) TestBinopBitop(c *C) {
defer testleak.AfterTest(c)()
ctx := mock.NewContext()
tbl := []struct {
lhs interface{}
op opcode.Op
rhs interface{}
ret interface{}
}{
{1, opcode.And, 1, 1},
{1, opcode.Or, 1, 1},
{1, opcode.Xor, 1, 0},
{1, opcode.LeftShift, 1, 2},
{2, opcode.RightShift, 1, 1},
{nil, opcode.And, 1, nil},
{1, opcode.And, nil, nil},
{nil, opcode.Or, 1, nil},
{nil, opcode.Xor, 1, nil},
{nil, opcode.LeftShift, 1, nil},
{nil, opcode.RightShift, 1, nil},
}
for _, t := range tbl {
expr := &ast.BinaryOperationExpr{Op: t.op, L: ast.NewValueExpr(t.lhs), R: ast.NewValueExpr(t.rhs)}
v, err := Eval(ctx, expr)
c.Assert(err, IsNil)
switch x := t.ret.(type) {
case nil:
c.Assert(v.Kind(), Equals, types.KindNull)
case int:
c.Assert(v, testutil.DatumEquals, types.NewDatum(uint64(x)))
}
}
}
func (s *testEvaluatorSuite) TestBinopNumeric(c *C) {
defer testleak.AfterTest(c)()
ctx := mock.NewContext()
tbl := []struct {
lhs interface{}
op opcode.Op
rhs interface{}
ret interface{}
}{
// plus
{1, opcode.Plus, 1, 2},
{1, opcode.Plus, uint64(1), 2},
{1, opcode.Plus, "1", 2},
{1, opcode.Plus, mysql.NewDecimalFromInt(1, 0), 2},
{uint64(1), opcode.Plus, 1, 2},
{uint64(1), opcode.Plus, uint64(1), 2},
{1, opcode.Plus, []byte("1"), 2},
{1, opcode.Plus, mysql.Hex{Value: 1}, 2},
{1, opcode.Plus, mysql.Bit{Value: 1, Width: 1}, 2},
{1, opcode.Plus, mysql.Enum{Name: "a", Value: 1}, 2},
{1, opcode.Plus, mysql.Set{Name: "a", Value: 1}, 2},
// minus
{1, opcode.Minus, 1, 0},
{1, opcode.Minus, uint64(1), 0},
{1, opcode.Minus, float64(1), 0},
{1, opcode.Minus, mysql.NewDecimalFromInt(1, 0), 0},
{uint64(1), opcode.Minus, 1, 0},
{uint64(1), opcode.Minus, uint64(1), 0},
{mysql.NewDecimalFromInt(1, 0), opcode.Minus, 1, 0},
{"1", opcode.Minus, []byte("1"), 0},
// mul
{1, opcode.Mul, 1, 1},
{1, opcode.Mul, uint64(1), 1},
{1, opcode.Mul, float64(1), 1},
{1, opcode.Mul, mysql.NewDecimalFromInt(1, 0), 1},
{uint64(1), opcode.Mul, 1, 1},
{uint64(1), opcode.Mul, uint64(1), 1},
{mysql.Time{}, opcode.Mul, 0, 0},
{mysql.ZeroDuration, opcode.Mul, 0, 0},
{mysql.Time{Time: time.Now(), Fsp: 0, Type: mysql.TypeDatetime}, opcode.Mul, 0, 0},
{mysql.Time{Time: time.Now(), Fsp: 6, Type: mysql.TypeDatetime}, opcode.Mul, 0, 0},
{mysql.Duration{Duration: 100000000, Fsp: 6}, opcode.Mul, 0, 0},
// div
{1, opcode.Div, float64(1), 1},
{1, opcode.Div, float64(0), nil},
{1, opcode.Div, 2, 0.5},
{1, opcode.Div, 0, nil},
// int div
{1, opcode.IntDiv, 2, 0},
{1, opcode.IntDiv, uint64(2), 0},
{1, opcode.IntDiv, 0, nil},
{1, opcode.IntDiv, uint64(0), nil},
{uint64(1), opcode.IntDiv, 2, 0},
{uint64(1), opcode.IntDiv, uint64(2), 0},
{uint64(1), opcode.IntDiv, 0, nil},
{uint64(1), opcode.IntDiv, uint64(0), nil},
{1.0, opcode.IntDiv, 2.0, 0},
{1.0, opcode.IntDiv, 0, nil},
// mod
{10, opcode.Mod, 2, 0},
{10, opcode.Mod, uint64(2), 0},
{10, opcode.Mod, 0, nil},
{10, opcode.Mod, uint64(0), nil},
{-10, opcode.Mod, uint64(2), 0},
{uint64(10), opcode.Mod, 2, 0},
{uint64(10), opcode.Mod, uint64(2), 0},
{uint64(10), opcode.Mod, 0, nil},
{uint64(10), opcode.Mod, uint64(0), nil},
{uint64(10), opcode.Mod, -2, 0},
{float64(10), opcode.Mod, 2, 0},
{float64(10), opcode.Mod, 0, nil},
{mysql.NewDecimalFromInt(10, 0), opcode.Mod, 2, 0},
{mysql.NewDecimalFromInt(10, 0), opcode.Mod, 0, nil},
}
for _, t := range tbl {
expr := &ast.BinaryOperationExpr{Op: t.op, L: ast.NewValueExpr(t.lhs), R: ast.NewValueExpr(t.rhs)}
v, err := Eval(ctx, expr)
c.Assert(err, IsNil)
switch v.Kind() {
case types.KindNull:
c.Assert(t.ret, IsNil)
default:
// we use float64 as the result type check for all.
f, err := v.ToFloat64()
c.Assert(err, IsNil)
r, err := types.ToFloat64(t.ret)
c.Assert(err, IsNil)
c.Assert(r, Equals, f)
}
}
}
func (s *testEvaluatorSuite) TestCaseWhen(c *C) {
defer testleak.AfterTest(c)()
cases := []testCase{
{
exprStr: "case 1 when 1 then 'str1' when 2 then 'str2' end",
resultStr: "str1",
},
{
exprStr: "case 2 when 1 then 'str1' when 2 then 'str2' end",
resultStr: "str2",
},
{
exprStr: "case 3 when 1 then 'str1' when 2 then 'str2' end",
resultStr: "<nil>",
},
{
exprStr: "case 4 when 1 then 'str1' when 2 then 'str2' else 'str3' end",
resultStr: "str3",
},
}
s.runTests(c, cases)
// When expression value changed, result set back to null.
valExpr := ast.NewValueExpr(1)
whenClause := &ast.WhenClause{Expr: ast.NewValueExpr(1), Result: ast.NewValueExpr(1)}
caseExpr := &ast.CaseExpr{
Value: valExpr,
WhenClauses: []*ast.WhenClause{whenClause},
}
ctx := mock.NewContext()
v, err := Eval(ctx, caseExpr)
c.Assert(err, IsNil)
c.Assert(v, testutil.DatumEquals, types.NewDatum(int64(1)))
valExpr.SetValue(4)
ast.ResetEvaluatedFlag(caseExpr)
v, err = Eval(ctx, caseExpr)
c.Assert(err, IsNil)
c.Assert(v.Kind(), Equals, types.KindNull)
}
func (s *testEvaluatorSuite) TestCall(c *C) {
defer testleak.AfterTest(c)()
ctx := mock.NewContext()
// Test case for correct number of arguments
expr := &ast.FuncCallExpr{
FnName: model.NewCIStr("date"),
Args: []ast.ExprNode{ast.NewValueExpr("2015-12-21 11:11:11")},
}
v, err := Eval(ctx, expr)
c.Assert(err, IsNil)
c.Assert(v.Kind(), Equals, types.KindMysqlTime)
c.Assert(v.GetMysqlTime().String(), Equals, "2015-12-21")
// Test case for unlimited upper bound
expr = &ast.FuncCallExpr{
FnName: model.NewCIStr("concat"),
Args: []ast.ExprNode{ast.NewValueExpr("Ti"),
ast.NewValueExpr("D"), ast.NewValueExpr("B")},
}
v, err = Eval(ctx, expr)
c.Assert(err, IsNil)
c.Assert(v.Kind(), Equals, types.KindString)
c.Assert(v.GetString(), Equals, "TiDB")
// Test case for unknown function
expr = &ast.FuncCallExpr{
FnName: model.NewCIStr("unknown"),
Args: []ast.ExprNode{},
}
_, err = Eval(ctx, expr)
c.Assert(err, NotNil)
// Test case for invalid number of arguments, violating the lower bound
expr = &ast.FuncCallExpr{
FnName: model.NewCIStr("date"),
Args: []ast.ExprNode{},
}
_, err = Eval(ctx, expr)
c.Assert(err, NotNil)
// Test case for invalid number of arguments, violating the upper bound
expr = &ast.FuncCallExpr{
FnName: model.NewCIStr("date"),
Args: []ast.ExprNode{ast.NewValueExpr("2015-12-21"),
ast.NewValueExpr("2015-12-22")},
}
_, err = Eval(ctx, expr)
c.Assert(err, NotNil)
}
func (s *testEvaluatorSuite) TestCast(c *C) {
defer testleak.AfterTest(c)()
f := types.NewFieldType(mysql.TypeLonglong)
expr := &ast.FuncCastExpr{
Expr: ast.NewValueExpr(1),
Tp: f,
}
ast.SetFlag(expr)
ctx := mock.NewContext()
v, err := Eval(ctx, expr)
c.Assert(err, IsNil)
c.Assert(v, testutil.DatumEquals, types.NewDatum(int64(1)))
f.Flag |= mysql.UnsignedFlag
v, err = Eval(ctx, expr)
c.Assert(err, IsNil)
c.Assert(v, testutil.DatumEquals, types.NewDatum(uint64(1)))
f.Tp = mysql.TypeString
f.Charset = charset.CharsetBin
v, err = Eval(ctx, expr)
c.Assert(err, IsNil)
c.Assert(v, testutil.DatumEquals, types.NewDatum([]byte("1")))
f.Tp = mysql.TypeString
f.Charset = "utf8"
v, err = Eval(ctx, expr)
c.Assert(err, IsNil)
c.Assert(v, testutil.DatumEquals, types.NewDatum("1"))
expr.Expr = ast.NewValueExpr(nil)
v, err = Eval(ctx, expr)
c.Assert(err, IsNil)
c.Assert(v.Kind(), Equals, types.KindNull)
}
func (s *testEvaluatorSuite) TestExtract(c *C) {
defer testleak.AfterTest(c)()
str := "2011-11-11 10:10:10.123456"
tbl := []struct {
Unit string
Expect int64
}{
{"MICROSECOND", 123456},
{"SECOND", 10},
{"MINUTE", 10},
{"HOUR", 10},
{"DAY", 11},
{"WEEK", 45},
{"MONTH", 11},
{"QUARTER", 4},
{"YEAR", 2011},
{"SECOND_MICROSECOND", 10123456},
{"MINUTE_MICROSECOND", 1010123456},
{"MINUTE_SECOND", 1010},
{"HOUR_MICROSECOND", 101010123456},
{"HOUR_SECOND", 101010},
{"HOUR_MINUTE", 1010},
{"DAY_MICROSECOND", 11101010123456},
{"DAY_SECOND", 11101010},
{"DAY_MINUTE", 111010},
{"DAY_HOUR", 1110},
{"YEAR_MONTH", 201111},
}
ctx := mock.NewContext()
for _, t := range tbl {<|fim▁hole|> Args: []ast.ExprNode{ast.NewValueExpr(t.Unit), ast.NewValueExpr(str)},
}
v, err := Eval(ctx, e)
c.Assert(err, IsNil)
c.Assert(v, testutil.DatumEquals, types.NewDatum(t.Expect))
}
// Test nil
e := &ast.FuncCallExpr{
FnName: model.NewCIStr("EXTRACT"),
Args: []ast.ExprNode{ast.NewValueExpr("SECOND"), ast.NewValueExpr(nil)},
}
v, err := Eval(ctx, e)
c.Assert(err, IsNil)
c.Assert(v.Kind(), Equals, types.KindNull)
}
func (s *testEvaluatorSuite) TestPatternIn(c *C) {
defer testleak.AfterTest(c)()
cases := []testCase{
{
exprStr: "1 not in (1, 2, 3)",
resultStr: "0",
},
{
exprStr: "1 in (1, 2, 3)",
resultStr: "1",
},
{
exprStr: "1 in (2, 3)",
resultStr: "0",
},
{
exprStr: "NULL in (2, 3)",
resultStr: "<nil>",
},
{
exprStr: "NULL not in (2, 3)",
resultStr: "<nil>",
},
{
exprStr: "NULL in (NULL, 3)",
resultStr: "<nil>",
},
{
exprStr: "1 in (1, NULL)",
resultStr: "1",
},
{
exprStr: "1 in (NULL, 1)",
resultStr: "1",
},
{
exprStr: "2 in (1, NULL)",
resultStr: "<nil>",
},
{
exprStr: "(-(23)++46/51*+51) in (+23)",
resultStr: "0",
},
}
s.runTests(c, cases)
}
func (s *testEvaluatorSuite) TestIsNull(c *C) {
defer testleak.AfterTest(c)()
cases := []testCase{
{
exprStr: "1 IS NULL",
resultStr: "0",
},
{
exprStr: "1 IS NOT NULL",
resultStr: "1",
},
{
exprStr: "NULL IS NULL",
resultStr: "1",
},
{
exprStr: "NULL IS NOT NULL",
resultStr: "0",
},
}
s.runTests(c, cases)
}
func (s *testEvaluatorSuite) TestIsTruth(c *C) {
defer testleak.AfterTest(c)()
cases := []testCase{
{
exprStr: "1 IS TRUE",
resultStr: "1",
},
{
exprStr: "2 IS TRUE",
resultStr: "1",
},
{
exprStr: "0 IS TRUE",
resultStr: "0",
},
{
exprStr: "NULL IS TRUE",
resultStr: "0",
},
{
exprStr: "1 IS FALSE",
resultStr: "0",
},
{
exprStr: "2 IS FALSE",
resultStr: "0",
},
{
exprStr: "0 IS FALSE",
resultStr: "1",
},
{
exprStr: "NULL IS NOT FALSE",
resultStr: "1",
},
{
exprStr: "1 IS NOT TRUE",
resultStr: "0",
},
{
exprStr: "2 IS NOT TRUE",
resultStr: "0",
},
{
exprStr: "0 IS NOT TRUE",
resultStr: "1",
},
{
exprStr: "NULL IS NOT TRUE",
resultStr: "1",
},
{
exprStr: "1 IS NOT FALSE",
resultStr: "1",
},
{
exprStr: "2 IS NOT FALSE",
resultStr: "1",
},
{
exprStr: "0 IS NOT FALSE",
resultStr: "0",
},
{
exprStr: "NULL IS NOT FALSE",
resultStr: "1",
},
}
s.runTests(c, cases)
}
func (s *testEvaluatorSuite) TestLike(c *C) {
defer testleak.AfterTest(c)()
tbl := []struct {
pattern string
input string
escape byte
match bool
}{
{"", "a", '\\', false},
{"a", "a", '\\', true},
{"a", "b", '\\', false},
{"aA", "aA", '\\', true},
{"_", "a", '\\', true},
{"_", "ab", '\\', false},
{"__", "b", '\\', false},
{"_ab", "AAB", '\\', true},
{"%", "abcd", '\\', true},
{"%", "", '\\', true},
{"%a", "AAA", '\\', true},
{"%b", "AAA", '\\', false},
{"b%", "BBB", '\\', true},
{"%a%", "BBB", '\\', false},
{"%a%", "BAB", '\\', true},
{"a%", "BBB", '\\', false},
{`\%a`, `%a`, '\\', true},
{`\%a`, `aa`, '\\', false},
{`\_a`, `_a`, '\\', true},
{`\_a`, `aa`, '\\', false},
{`\\_a`, `\xa`, '\\', true},
{`\a\b`, `\a\b`, '\\', true},
{"%%_", `abc`, '\\', true},
{`+_a`, `_a`, '+', true},
{`+%a`, `%a`, '+', true},
{`\%a`, `%a`, '+', false},
{`++a`, `+a`, '+', true},
{`++_a`, `+xa`, '+', true},
}
for _, v := range tbl {
patChars, patTypes := compilePattern(v.pattern, v.escape)
match := doMatch(v.input, patChars, patTypes)
c.Assert(match, Equals, v.match, Commentf("%v", v))
}
cases := []testCase{
{
exprStr: "'a' LIKE ''",
resultStr: "0",
},
{
exprStr: "'a' LIKE 'a'",
resultStr: "1",
},
{
exprStr: "'a' LIKE 'b'",
resultStr: "0",
},
{
exprStr: "'aA' LIKE 'Aa'",
resultStr: "1",
},
{
exprStr: "'aAb' LIKE 'Aa%'",
resultStr: "1",
},
{
exprStr: "'aAb' LIKE 'Aa_'",
resultStr: "1",
},
}
s.runTests(c, cases)
}
func (s *testEvaluatorSuite) TestRegexp(c *C) {
defer testleak.AfterTest(c)()
tbl := []struct {
pattern string
input string
match int64
}{
{"^$", "a", 0},
{"a", "a", 1},
{"a", "b", 0},
{"aA", "aA", 1},
{".", "a", 1},
{"^.$", "ab", 0},
{"..", "b", 0},
{".ab", "aab", 1},
{".*", "abcd", 1},
}
ctx := mock.NewContext()
for _, v := range tbl {
pattern := &ast.PatternRegexpExpr{
Pattern: ast.NewValueExpr(v.pattern),
Expr: ast.NewValueExpr(v.input),
}
match, err := Eval(ctx, pattern)
c.Assert(err, IsNil)
c.Assert(match, testutil.DatumEquals, types.NewDatum(v.match), Commentf("%v", v))
}
}
func (s *testEvaluatorSuite) TestUnaryOp(c *C) {
defer testleak.AfterTest(c)()
tbl := []struct {
arg interface{}
op opcode.Op
result interface{}
}{
// test NOT.
{1, opcode.Not, int64(0)},
{0, opcode.Not, int64(1)},
{nil, opcode.Not, nil},
{mysql.Hex{Value: 0}, opcode.Not, int64(1)},
{mysql.Bit{Value: 0, Width: 1}, opcode.Not, int64(1)},
{mysql.Enum{Name: "a", Value: 1}, opcode.Not, int64(0)},
{mysql.Set{Name: "a", Value: 1}, opcode.Not, int64(0)},
// test BitNeg.
{nil, opcode.BitNeg, nil},
{-1, opcode.BitNeg, uint64(0)},
// test Plus.
{nil, opcode.Plus, nil},
{float64(1.0), opcode.Plus, float64(1.0)},
{int64(1), opcode.Plus, int64(1)},
{int64(1), opcode.Plus, int64(1)},
{uint64(1), opcode.Plus, uint64(1)},
{"1.0", opcode.Plus, "1.0"},
{[]byte("1.0"), opcode.Plus, []byte("1.0")},
{mysql.Hex{Value: 1}, opcode.Plus, mysql.Hex{Value: 1}},
{mysql.Bit{Value: 1, Width: 1}, opcode.Plus, mysql.Bit{Value: 1, Width: 1}},
{true, opcode.Plus, int64(1)},
{false, opcode.Plus, int64(0)},
{mysql.Enum{Name: "a", Value: 1}, opcode.Plus, mysql.Enum{Name: "a", Value: 1}},
{mysql.Set{Name: "a", Value: 1}, opcode.Plus, mysql.Set{Name: "a", Value: 1}},
// test Minus.
{nil, opcode.Minus, nil},
{float64(1.0), opcode.Minus, float64(-1.0)},
{int64(1), opcode.Minus, int64(-1)},
{int64(1), opcode.Minus, int64(-1)},
{uint64(1), opcode.Minus, -int64(1)},
{"1.0", opcode.Minus, -1.0},
{[]byte("1.0"), opcode.Minus, -1.0},
{mysql.Hex{Value: 1}, opcode.Minus, -1.0},
{mysql.Bit{Value: 1, Width: 1}, opcode.Minus, -1.0},
{true, opcode.Minus, int64(-1)},
{false, opcode.Minus, int64(0)},
{mysql.Enum{Name: "a", Value: 1}, opcode.Minus, -1.0},
{mysql.Set{Name: "a", Value: 1}, opcode.Minus, -1.0},
}
ctx := mock.NewContext()
for i, t := range tbl {
expr := &ast.UnaryOperationExpr{}
expr.Op = t.op
expr.V = ast.NewValueExpr(t.arg)
result, err := Eval(ctx, expr)
c.Assert(err, IsNil)
c.Assert(result, testutil.DatumEquals, types.NewDatum(t.result), Commentf("%d", i))
}
tbl = []struct {
arg interface{}
op opcode.Op
result interface{}
}{
{mysql.NewDecimalFromInt(1, 0), opcode.Plus, mysql.NewDecimalFromInt(1, 0)},
{mysql.Duration{Duration: time.Duration(838*3600 + 59*60 + 59), Fsp: mysql.DefaultFsp}, opcode.Plus,
mysql.Duration{Duration: time.Duration(838*3600 + 59*60 + 59), Fsp: mysql.DefaultFsp}},
{mysql.Time{Time: time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC), Type: mysql.TypeDatetime, Fsp: 0}, opcode.Plus, mysql.Time{Time: time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC), Type: mysql.TypeDatetime, Fsp: 0}},
{mysql.NewDecimalFromInt(1, 0), opcode.Minus, mysql.NewDecimalFromInt(-1, 0)},
{mysql.ZeroDuration, opcode.Minus, mysql.NewDecimalFromInt(0, 0)},
{mysql.Time{Time: time.Date(2009, time.November, 10, 23, 0, 0, 0, time.UTC), Type: mysql.TypeDatetime, Fsp: 0}, opcode.Minus, mysql.NewDecimalFromInt(-20091110230000, 0)},
}
for _, t := range tbl {
expr := &ast.UnaryOperationExpr{Op: t.op, V: ast.NewValueExpr(t.arg)}
result, err := Eval(ctx, expr)
c.Assert(err, IsNil)
ret, err := result.CompareDatum(types.NewDatum(t.result))
c.Assert(err, IsNil)
c.Assert(ret, Equals, 0)
}
}
func (s *testEvaluatorSuite) TestColumnNameExpr(c *C) {
defer testleak.AfterTest(c)()
ctx := mock.NewContext()
value1 := ast.NewValueExpr(1)
rf := &ast.ResultField{Expr: value1}
expr := &ast.ColumnNameExpr{Refer: rf}
ast.SetFlag(expr)
result, err := Eval(ctx, expr)
c.Assert(err, IsNil)
c.Assert(result, testutil.DatumEquals, types.NewDatum(int64(1)))
value2 := ast.NewValueExpr(2)
rf.Expr = value2
result, err = Eval(ctx, expr)
c.Assert(err, IsNil)
c.Assert(result, testutil.DatumEquals, types.NewDatum(int64(2)))
}
func (s *testEvaluatorSuite) TestAggFuncAvg(c *C) {
defer testleak.AfterTest(c)()
ctx := mock.NewContext()
avg := &ast.AggregateFuncExpr{
F: ast.AggFuncAvg,
}
avg.CurrentGroup = "emptyGroup"
ast.SetFlag(avg)
result, err := Eval(ctx, avg)
c.Assert(err, IsNil)
// Empty group should return nil.
c.Assert(result.Kind(), Equals, types.KindNull)
avg.Args = []ast.ExprNode{ast.NewValueExpr(2)}
avg.Update()
avg.Args = []ast.ExprNode{ast.NewValueExpr(4)}
avg.Update()
result, err = Eval(ctx, avg)
c.Assert(err, IsNil)
expect, _ := mysql.ConvertToDecimal(3)
c.Assert(result.Kind(), Equals, types.KindMysqlDecimal)
c.Assert(result.GetMysqlDecimal().Equals(expect), IsTrue)
}
func (s *testEvaluatorSuite) TestGetTimeValue(c *C) {
defer testleak.AfterTest(c)()
v, err := GetTimeValue(nil, "2012-12-12 00:00:00", mysql.TypeTimestamp, mysql.MinFsp)
c.Assert(err, IsNil)
c.Assert(v.Kind(), Equals, types.KindMysqlTime)
timeValue := v.GetMysqlTime()
c.Assert(timeValue.String(), Equals, "2012-12-12 00:00:00")
ctx := mock.NewContext()
variable.BindSessionVars(ctx)
sessionVars := variable.GetSessionVars(ctx)
sessionVars.Systems["timestamp"] = ""
v, err = GetTimeValue(ctx, "2012-12-12 00:00:00", mysql.TypeTimestamp, mysql.MinFsp)
c.Assert(err, IsNil)
c.Assert(v.Kind(), Equals, types.KindMysqlTime)
timeValue = v.GetMysqlTime()
c.Assert(timeValue.String(), Equals, "2012-12-12 00:00:00")
sessionVars.Systems["timestamp"] = "0"
v, err = GetTimeValue(ctx, "2012-12-12 00:00:00", mysql.TypeTimestamp, mysql.MinFsp)
c.Assert(err, IsNil)
c.Assert(v.Kind(), Equals, types.KindMysqlTime)
timeValue = v.GetMysqlTime()
c.Assert(timeValue.String(), Equals, "2012-12-12 00:00:00")
delete(sessionVars.Systems, "timestamp")
v, err = GetTimeValue(ctx, "2012-12-12 00:00:00", mysql.TypeTimestamp, mysql.MinFsp)
c.Assert(err, IsNil)
c.Assert(v.Kind(), Equals, types.KindMysqlTime)
timeValue = v.GetMysqlTime()
c.Assert(timeValue.String(), Equals, "2012-12-12 00:00:00")
sessionVars.Systems["timestamp"] = "1234"
tbl := []struct {
Expr interface{}
Ret interface{}
}{
{"2012-12-12 00:00:00", "2012-12-12 00:00:00"},
{CurrentTimestamp, time.Unix(1234, 0).Format(mysql.TimeFormat)},
{ZeroTimestamp, "0000-00-00 00:00:00"},
{ast.NewValueExpr("2012-12-12 00:00:00"), "2012-12-12 00:00:00"},
{ast.NewValueExpr(int64(0)), "0000-00-00 00:00:00"},
{ast.NewValueExpr(nil), nil},
{&ast.FuncCallExpr{FnName: model.NewCIStr(CurrentTimestamp)}, CurrentTimestamp},
{&ast.UnaryOperationExpr{Op: opcode.Minus, V: ast.NewValueExpr(int64(0))}, "0000-00-00 00:00:00"},
}
for i, t := range tbl {
comment := Commentf("expr: %d", i)
v, err := GetTimeValue(ctx, t.Expr, mysql.TypeTimestamp, mysql.MinFsp)
c.Assert(err, IsNil)
switch v.Kind() {
case types.KindMysqlTime:
c.Assert(v.GetMysqlTime().String(), DeepEquals, t.Ret, comment)
default:
c.Assert(v.GetValue(), DeepEquals, t.Ret, comment)
}
}
errTbl := []struct {
Expr interface{}
}{
{"2012-13-12 00:00:00"},
{ast.NewValueExpr("2012-13-12 00:00:00")},
{ast.NewValueExpr(int64(1))},
{&ast.FuncCallExpr{FnName: model.NewCIStr("xxx")}},
{&ast.UnaryOperationExpr{Op: opcode.Minus, V: ast.NewValueExpr(int64(1))}},
}
for _, t := range errTbl {
_, err := GetTimeValue(ctx, t.Expr, mysql.TypeTimestamp, mysql.MinFsp)
c.Assert(err, NotNil)
}
}
func (s *testEvaluatorSuite) TestIsCurrentTimeExpr(c *C) {
defer testleak.AfterTest(c)()
v := IsCurrentTimeExpr(ast.NewValueExpr("abc"))
c.Assert(v, IsFalse)
v = IsCurrentTimeExpr(&ast.FuncCallExpr{FnName: model.NewCIStr("CURRENT_TIMESTAMP")})
c.Assert(v, IsTrue)
}
func (s *testEvaluatorSuite) TestEvaluatedFlag(c *C) {
l := ast.NewValueExpr(int64(1))
r := ast.NewValueExpr(int64(2))
b := &ast.BinaryOperationExpr{L: l, R: r, Op: opcode.Plus}
ast.SetFlag(b)
c.Assert(ast.IsPreEvaluable(b), Equals, true)
ctx := mock.NewContext()
d, err := Eval(ctx, b)
c.Assert(ast.IsEvaluated(b), Equals, true)
c.Assert(err, IsNil)
c.Assert(d, testutil.DatumEquals, types.NewIntDatum(3))
funcCall := &ast.FuncCallExpr{
FnName: model.NewCIStr("abs"),
Args: []ast.ExprNode{ast.NewValueExpr(int(-1))},
}
b = &ast.BinaryOperationExpr{L: funcCall, R: r, Op: opcode.Plus}
ast.ResetEvaluatedFlag(b)
ast.SetFlag(b)
c.Assert(ast.IsPreEvaluable(b), Equals, true)
d, err = Eval(ctx, b)
c.Assert(ast.IsEvaluated(b), Equals, false)
c.Assert(err, IsNil)
c.Assert(d, testutil.DatumEquals, types.NewIntDatum(3))
rf := &ast.ResultField{Expr: ast.NewValueExpr(int64(1))}
colExpr := &ast.ColumnNameExpr{Refer: rf}
b = &ast.BinaryOperationExpr{L: colExpr, R: r, Op: opcode.Plus}
ast.ResetEvaluatedFlag(b)
ast.SetFlag(b)
c.Assert(ast.IsPreEvaluable(b), Equals, false)
d, err = Eval(ctx, b)
c.Assert(ast.IsEvaluated(b), Equals, false)
c.Assert(err, IsNil)
c.Assert(d, testutil.DatumEquals, types.NewIntDatum(3))
}
func (s *testEvaluatorSuite) TestMod(c *C) {
cases := []testCase{
{
exprStr: "MOD(234, 10)",
resultStr: "4",
},
{
exprStr: "MOD(29, 9)",
resultStr: "2",
},
{
exprStr: "MOD(34.5, 3)",
resultStr: "1.5",
},
}
s.runTests(c, cases)
}<|fim▁end|> | e := &ast.FuncCallExpr{
FnName: model.NewCIStr("EXTRACT"), |
<|file_name|>tensorflow.py<|end_file_name|><|fim▁begin|>"""
Component that performs TensorFlow classification on images.
For a quick start, pick a pre-trained COCO model from:
https://github.com/tensorflow/models/blob/master/research/object_detection/g3doc/detection_model_zoo.md
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/image_processing.tensorflow/
"""
import logging
import sys
import os
import voluptuous as vol
from homeassistant.components.image_processing import (
CONF_CONFIDENCE, CONF_ENTITY_ID, CONF_NAME, CONF_SOURCE, PLATFORM_SCHEMA,
ImageProcessingEntity)
from homeassistant.core import split_entity_id
from homeassistant.helpers import template
import homeassistant.helpers.config_validation as cv
REQUIREMENTS = ['numpy==1.15.3', 'pillow==5.2.0', 'protobuf==3.6.1']
_LOGGER = logging.getLogger(__name__)
ATTR_MATCHES = 'matches'
ATTR_SUMMARY = 'summary'
ATTR_TOTAL_MATCHES = 'total_matches'
CONF_FILE_OUT = 'file_out'
CONF_MODEL = 'model'
CONF_GRAPH = 'graph'
CONF_LABELS = 'labels'
CONF_MODEL_DIR = 'model_dir'
CONF_CATEGORIES = 'categories'
CONF_CATEGORY = 'category'
CONF_AREA = 'area'
CONF_TOP = 'top'
CONF_LEFT = 'left'
CONF_BOTTOM = 'bottom'
CONF_RIGHT = 'right'
AREA_SCHEMA = vol.Schema({
vol.Optional(CONF_TOP, default=0): cv.small_float,
vol.Optional(CONF_LEFT, default=0): cv.small_float,
vol.Optional(CONF_BOTTOM, default=1): cv.small_float,
vol.Optional(CONF_RIGHT, default=1): cv.small_float
})
CATEGORY_SCHEMA = vol.Schema({
vol.Required(CONF_CATEGORY): cv.string,
vol.Optional(CONF_AREA): AREA_SCHEMA
})
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Optional(CONF_FILE_OUT, default=[]):
vol.All(cv.ensure_list, [cv.template]),
vol.Required(CONF_MODEL): vol.Schema({
vol.Required(CONF_GRAPH): cv.isfile,
vol.Optional(CONF_LABELS): cv.isfile,
vol.Optional(CONF_MODEL_DIR): cv.isdir,<|fim▁hole|> vol.All(cv.ensure_list, [vol.Any(
cv.string,
CATEGORY_SCHEMA
)])
})
})
def draw_box(draw, box, img_width,
img_height, text='', color=(255, 255, 0)):
"""Draw bounding box on image."""
ymin, xmin, ymax, xmax = box
(left, right, top, bottom) = (xmin * img_width, xmax * img_width,
ymin * img_height, ymax * img_height)
draw.line([(left, top), (left, bottom), (right, bottom),
(right, top), (left, top)], width=5, fill=color)
if text:
draw.text((left, abs(top-15)), text, fill=color)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the TensorFlow image processing platform."""
model_config = config.get(CONF_MODEL)
model_dir = model_config.get(CONF_MODEL_DIR) \
or hass.config.path('tensorflow')
labels = model_config.get(CONF_LABELS) \
or hass.config.path('tensorflow', 'object_detection',
'data', 'mscoco_label_map.pbtxt')
# Make sure locations exist
if not os.path.isdir(model_dir) or not os.path.exists(labels):
_LOGGER.error("Unable to locate tensorflow models or label map.")
return
# append custom model path to sys.path
sys.path.append(model_dir)
try:
# Verify that the TensorFlow Object Detection API is pre-installed
# pylint: disable=unused-import,unused-variable
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
import tensorflow as tf # noqa
from object_detection.utils import label_map_util # noqa
except ImportError:
# pylint: disable=line-too-long
_LOGGER.error(
"No TensorFlow Object Detection library found! Install or compile "
"for your system following instructions here: "
"https://github.com/tensorflow/models/blob/master/research/object_detection/g3doc/installation.md") # noqa
return
try:
# Display warning that PIL will be used if no OpenCV is found.
# pylint: disable=unused-import,unused-variable
import cv2 # noqa
except ImportError:
_LOGGER.warning("No OpenCV library found. "
"TensorFlow will process image with "
"PIL at reduced resolution.")
# setup tensorflow graph, session, and label map to pass to processor
# pylint: disable=no-member
detection_graph = tf.Graph()
with detection_graph.as_default():
od_graph_def = tf.GraphDef()
with tf.gfile.GFile(model_config.get(CONF_GRAPH), 'rb') as fid:
serialized_graph = fid.read()
od_graph_def.ParseFromString(serialized_graph)
tf.import_graph_def(od_graph_def, name='')
session = tf.Session(graph=detection_graph)
label_map = label_map_util.load_labelmap(labels)
categories = label_map_util.convert_label_map_to_categories(
label_map, max_num_classes=90, use_display_name=True)
category_index = label_map_util.create_category_index(categories)
entities = []
for camera in config[CONF_SOURCE]:
entities.append(TensorFlowImageProcessor(
hass, camera[CONF_ENTITY_ID], camera.get(CONF_NAME),
session, detection_graph, category_index, config))
add_entities(entities)
class TensorFlowImageProcessor(ImageProcessingEntity):
"""Representation of an TensorFlow image processor."""
def __init__(self, hass, camera_entity, name, session, detection_graph,
category_index, config):
"""Initialize the TensorFlow entity."""
model_config = config.get(CONF_MODEL)
self.hass = hass
self._camera_entity = camera_entity
if name:
self._name = name
else:
self._name = "TensorFlow {0}".format(
split_entity_id(camera_entity)[1])
self._session = session
self._graph = detection_graph
self._category_index = category_index
self._min_confidence = config.get(CONF_CONFIDENCE)
self._file_out = config.get(CONF_FILE_OUT)
# handle categories and specific detection areas
categories = model_config.get(CONF_CATEGORIES)
self._include_categories = []
self._category_areas = {}
for category in categories:
if isinstance(category, dict):
category_name = category.get(CONF_CATEGORY)
category_area = category.get(CONF_AREA)
self._include_categories.append(category_name)
self._category_areas[category_name] = [0, 0, 1, 1]
if category_area:
self._category_areas[category_name] = [
category_area.get(CONF_TOP),
category_area.get(CONF_LEFT),
category_area.get(CONF_BOTTOM),
category_area.get(CONF_RIGHT)
]
else:
self._include_categories.append(category)
self._category_areas[category] = [0, 0, 1, 1]
# Handle global detection area
self._area = [0, 0, 1, 1]
area_config = model_config.get(CONF_AREA)
if area_config:
self._area = [
area_config.get(CONF_TOP),
area_config.get(CONF_LEFT),
area_config.get(CONF_BOTTOM),
area_config.get(CONF_RIGHT)
]
template.attach(hass, self._file_out)
self._matches = {}
self._total_matches = 0
self._last_image = None
@property
def camera_entity(self):
"""Return camera entity id from process pictures."""
return self._camera_entity
@property
def name(self):
"""Return the name of the image processor."""
return self._name
@property
def state(self):
"""Return the state of the entity."""
return self._total_matches
@property
def device_state_attributes(self):
"""Return device specific state attributes."""
return {
ATTR_MATCHES: self._matches,
ATTR_SUMMARY: {category: len(values)
for category, values in self._matches.items()},
ATTR_TOTAL_MATCHES: self._total_matches
}
def _save_image(self, image, matches, paths):
from PIL import Image, ImageDraw
import io
img = Image.open(io.BytesIO(bytearray(image))).convert('RGB')
img_width, img_height = img.size
draw = ImageDraw.Draw(img)
# Draw custom global region/area
if self._area != [0, 0, 1, 1]:
draw_box(draw, self._area,
img_width, img_height,
"Detection Area", (0, 255, 255))
for category, values in matches.items():
# Draw custom category regions/areas
if (category in self._category_areas
and self._category_areas[category] != [0, 0, 1, 1]):
label = "{} Detection Area".format(category.capitalize())
draw_box(draw, self._category_areas[category], img_width,
img_height, label, (0, 255, 0))
# Draw detected objects
for instance in values:
label = "{0} {1:.1f}%".format(category, instance['score'])
draw_box(draw, instance['box'],
img_width, img_height,
label, (255, 255, 0))
for path in paths:
_LOGGER.info("Saving results image to %s", path)
img.save(path)
def process_image(self, image):
"""Process the image."""
import numpy as np
try:
import cv2 # pylint: disable=import-error
img = cv2.imdecode(
np.asarray(bytearray(image)), cv2.IMREAD_UNCHANGED)
inp = img[:, :, [2, 1, 0]] # BGR->RGB
inp_expanded = inp.reshape(1, inp.shape[0], inp.shape[1], 3)
except ImportError:
from PIL import Image
import io
img = Image.open(io.BytesIO(bytearray(image))).convert('RGB')
img.thumbnail((460, 460), Image.ANTIALIAS)
img_width, img_height = img.size
inp = np.array(img.getdata()).reshape(
(img_height, img_width, 3)).astype(np.uint8)
inp_expanded = np.expand_dims(inp, axis=0)
image_tensor = self._graph.get_tensor_by_name('image_tensor:0')
boxes = self._graph.get_tensor_by_name('detection_boxes:0')
scores = self._graph.get_tensor_by_name('detection_scores:0')
classes = self._graph.get_tensor_by_name('detection_classes:0')
boxes, scores, classes = self._session.run(
[boxes, scores, classes],
feed_dict={image_tensor: inp_expanded})
boxes, scores, classes = map(np.squeeze, [boxes, scores, classes])
classes = classes.astype(int)
matches = {}
total_matches = 0
for box, score, obj_class in zip(boxes, scores, classes):
score = score * 100
boxes = box.tolist()
# Exclude matches below min confidence value
if score < self._min_confidence:
continue
# Exclude matches outside global area definition
if (boxes[0] < self._area[0] or boxes[1] < self._area[1]
or boxes[2] > self._area[2] or boxes[3] > self._area[3]):
continue
category = self._category_index[obj_class]['name']
# Exclude unlisted categories
if (self._include_categories
and category not in self._include_categories):
continue
# Exclude matches outside category specific area definition
if (self._category_areas
and (boxes[0] < self._category_areas[category][0]
or boxes[1] < self._category_areas[category][1]
or boxes[2] > self._category_areas[category][2]
or boxes[3] > self._category_areas[category][3])):
continue
# If we got here, we should include it
if category not in matches.keys():
matches[category] = []
matches[category].append({
'score': float(score),
'box': boxes
})
total_matches += 1
# Save Images
if total_matches and self._file_out:
paths = []
for path_template in self._file_out:
if isinstance(path_template, template.Template):
paths.append(path_template.render(
camera_entity=self._camera_entity))
else:
paths.append(path_template)
self._save_image(image, matches, paths)
self._matches = matches
self._total_matches = total_matches<|fim▁end|> | vol.Optional(CONF_AREA): AREA_SCHEMA,
vol.Optional(CONF_CATEGORIES, default=[]): |
<|file_name|>controllers.py<|end_file_name|><|fim▁begin|>"""
mod_customized Controllers
===================
In this module, users can test their fork branch with customized set of regression tests
"""
from flask import Blueprint, g, request, redirect, url_for, flash
from github import GitHub, ApiError
from datetime import datetime, timedelta
from decorators import template_renderer, get_menu_entries
from mod_auth.controllers import login_required, check_access_rights
from mod_auth.models import Role, User
from mod_test.models import Fork, Test, TestType, TestPlatform
from mod_customized.forms import TestForkForm
from mod_customized.models import TestFork, CustomizedTest
from mod_regression.models import Category, regressionTestLinkTable, RegressionTest
from mod_test.controllers import get_data_for_test, TestNotFoundException
from mod_auth.controllers import fetch_username_from_token
from sqlalchemy import and_
mod_customized = Blueprint('custom', __name__)
@mod_customized.before_app_request
def before_app_request():
if g.user is not None:
g.menu_entries['custom'] = {
'title': 'Customize Test',
'icon': 'code-fork',
'route': 'custom.index',
'access': [Role.tester, Role.contributor, Role.admin]
}<|fim▁hole|>
@mod_customized.route('/', methods=['GET', 'POST'])
@login_required
@check_access_rights([Role.tester, Role.contributor, Role.admin])
@template_renderer()
def index():
"""
Display a form to allow users to run tests.
User can enter commit or select the commit from their repo that are not more than 30 days old.
User can customized test based on selected regression tests and platforms.
Also Display list of customized tests started by user.
User will be redirected to the same page on submit.
"""
fork_test_form = TestForkForm(request.form)
username = fetch_username_from_token()
commit_options = False
if username is not None:
gh = GitHub(access_token=g.github['bot_token'])
repository = gh.repos(username)(g.github['repository'])
# Only commits since last month
last_month = datetime.now() - timedelta(days=30)
commit_since = last_month.isoformat() + 'Z'
commits = repository.commits().get(since=commit_since)
commit_arr = []
for commit in commits:
commit_url = commit['html_url']
commit_sha = commit['sha']
commit_option = (
'<a href="{url}">{sha}</a>').format(url=commit_url, sha=commit_sha)
commit_arr.append((commit_sha, commit_option))
# If there are commits present, display it on webpage
if len(commit_arr) > 0:
fork_test_form.commit_select.choices = commit_arr
commit_options = True
fork_test_form.regression_test.choices = [(regression_test.id, regression_test)
for regression_test in RegressionTest.query.all()]
if fork_test_form.add.data and fork_test_form.validate_on_submit():
import requests
regression_tests = fork_test_form.regression_test.data
commit_hash = fork_test_form.commit_hash.data
repo = g.github['repository']
platforms = fork_test_form.platform.data
api_url = ('https://api.github.com/repos/{user}/{repo}/commits/{hash}').format(
user=username, repo=repo, hash=commit_hash
)
# Show error if github fails to recognize commit
response = requests.get(api_url)
if response.status_code == 500:
fork_test_form.commit_hash.errors.append('Error contacting Github')
elif response.status_code != 200:
fork_test_form.commit_hash.errors.append('Wrong Commit Hash')
else:
add_test_to_kvm(username, commit_hash, platforms, regression_tests)
return redirect(url_for('custom.index'))
populated_categories = g.db.query(regressionTestLinkTable.c.category_id).subquery()
categories = Category.query.filter(Category.id.in_(populated_categories)).order_by(Category.name.asc()).all()
tests = Test.query.filter(and_(TestFork.user_id == g.user.id, TestFork.test_id == Test.id)).order_by(
Test.id.desc()).limit(50).all()
return {
'addTestFork': fork_test_form,
'commit_options': commit_options,
'tests': tests,
'TestType': TestType,
'GitUser': username,
'categories': categories,
'customize': True
}
def add_test_to_kvm(username, commit_hash, platforms, regression_tests):
"""
Create new tests and add it to CustomizedTests based on parameters.
:param username: git username required to find fork
:type username: str
:param commit_hash: commit hash of the repo user selected to run test
:type commit_hash: str
:param platforms: platforms user selected to run test
:type platforms: list
:param regression_tests: regression tests user selected to run tests
:type regression_tests: list
"""
fork_url = ('https://github.com/{user}/{repo}.git').format(
user=username, repo=g.github['repository']
)
fork = Fork.query.filter(Fork.github == fork_url).first()
if fork is None:
fork = Fork(fork_url)
g.db.add(fork)
g.db.commit()
for platform in platforms:
platform = TestPlatform.from_string(platform)
test = Test(platform, TestType.commit, fork.id, 'master', commit_hash)
g.db.add(test)
g.db.commit()
for regression_test in regression_tests:
customized_test = CustomizedTest(test.id, regression_test)
g.db.add(customized_test)
test_fork = TestFork(g.user.id, test.id)
g.db.add(test_fork)
g.db.commit()<|fim▁end|> | |
<|file_name|>Solution.py<|end_file_name|><|fim▁begin|># Problem: Search for a Range
#
# Given a sorted array of integers, find the starting and ending position of a given target value.
#
# Your algorithm's runtime complexity must be in the order of O(log n).
#
# If the target is not found in the array, return [-1, -1].
#
# For example,
#
# Given [5, 7, 7, 8, 8, 10] and target value 8,
#
# return [3, 4].
#
################################################################################
class Solution(object):
def searchRange(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: List[int]
"""
low, high = 0, len(nums) - 1
while low <= high:
mid = (low + high) / 2
if nums[mid] < target:
low = mid + 1
elif nums[mid] > target:
high = mid - 1
else:
start, end = mid, mid
while start - 1 >= 0 and nums[start - 1] == nums[mid]:<|fim▁hole|>
while end + 1 <= (len(nums) - 1) and nums[end + 1] == nums[mid]:
end += 1
return [start, end]
return [-1, -1]<|fim▁end|> | start -= 1 |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>from setuptools import setup, find_packages
DESCRIPTION = """
Send emails based on a Django template
See:
https://github.com/prestontimmons/django-email-template
"""
<|fim▁hole|> description="Send emails based on a Django template",
long_description=DESCRIPTION,
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=[],
)<|fim▁end|> | setup(
name="django-email-template",
version="1.0.2", |
<|file_name|>ChartComponentModel.js<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2005 - 2014 TIBCO Software Inc. All rights reserved.
* http://www.jaspersoft.com.
*
* Unless you have purchased a commercial license agreement from Jaspersoft,
* the following license terms apply:
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/**
* @author: nesterone
* @version: $Id: ChartComponentModel.js 47331 2014-07-18 09:13:06Z kklein $
*/
define(function (require) {
"use strict";
var BaseComponentModel = require("./BaseComponentModel"),
jiveTypes = require("../enum/jiveTypes"),
interactiveComponentTypes = require("../enum/interactiveComponentTypes"),
_ = require("underscore"),
reportEvents = require("../../enum/reportEvents"),
reportCreators = require("../../enum/reportCreators");
return BaseComponentModel.extend({
defaults: function() {
return {
charttype: undefined,
datetimeSupported: false,
hcinstancedata: undefined,
id: undefined,
interactive: true,
module: "jive.highcharts",
type: jiveTypes.CHART,
uimodule: "jive.interactive.highchart"
};
},
api: {
changeType: {}
},
actions: {
"change:charttype": function() {
return {<|fim▁hole|> changeChartTypeData: {
chartComponentUuid: this.get("id"),
chartType: this.get("charttype")
}
}
}
},
initialize: function(){
if (this.has("hcinstancedata")){
var hcinstancedata = this.get("hcinstancedata"),
creator = this._detectCreator(hcinstancedata);
if (reportCreators.AD_HOC_DESIGNER === creator){
//workaround to stretch adhoc's
delete hcinstancedata.width;
delete hcinstancedata.height;
}
}
// JSON.parse(JSON.stringify(.... deep clone.
// JR services are changing model by initialization, therefore deep clone is done to avoid data corruption
// should be replaced with true deep clone if JRS-1450 implemented
this.config = JSON.parse(JSON.stringify(this.toJSON()));
},
showTypeError: function() {
this.get("uiModuleType").showTypeError();
},
changeType: function(parms) {
this.trigger(reportEvents.ACTION, {
actionName: 'changeChartType',
changeChartTypeData: {
chartComponentUuid: this.config.id,
chartType: parms.type
}
});
},
_detectCreator: function(hcInstance){
var services = hcInstance.services,
isCreatedFromAdhoc = _.some(services, function (info) {
return info.service.indexOf("adhoc") != -1;
}),
creator;
if (isCreatedFromAdhoc) {
creator = reportCreators.AD_HOC_DESIGNER;
}
if (creator){
this.set("creator", creator);
}
return creator;
},
toReportComponentObject: function() {
if (!this.get("interactive")) {
return undefined;
}
return {
id: this.get("id"),
componentType: interactiveComponentTypes.CHART,
chartType: this.get("charttype"),
name: this.get("name")
};
},
updateFromReportComponentObject: function(obj) {
this.set({ charttype: obj.chartType });
}
});
});<|fim▁end|> | actionName: 'changeChartType', |
<|file_name|>version.go<|end_file_name|><|fim▁begin|>package eventgrid
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Code generated by Microsoft (R) AutoRest Code Generator 2.2.18.0
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
// UserAgent returns the UserAgent string to use when sending http.Requests.<|fim▁hole|>
// Version returns the semantic version (see http://semver.org) of the client.
func Version() string {
return "v10.3.1-beta"
}<|fim▁end|> | func UserAgent() string {
return "Azure-SDK-For-Go/v10.3.1-beta arm-eventgrid/2017-06-15-preview"
} |
<|file_name|>09-rooms.js<|end_file_name|><|fim▁begin|>/* eslint-env mocha */
/* globals expect */
import { getCredentials, api, request, credentials } from '../../data/api-data.js';
import { password } from '../../data/user';
import { closeRoom, createRoom } from '../../data/rooms.helper';
import { updatePermission } from '../../data/permissions.helper';
describe('[Rooms]', function() {
this.retries(0);
before((done) => getCredentials(done));
it('/rooms.get', (done) => {
request.get(api('rooms.get'))
.set(credentials)
.expect(200)
.expect((res) => {
expect(res.body).to.have.property('success', true);
expect(res.body).to.have.property('update');
expect(res.body).to.have.property('remove');
})
.end(done);
});
it('/rooms.get?updatedSince', (done) => {
request.get(api('rooms.get'))
.set(credentials)
.query({
updatedSince: new Date,
})
.expect(200)
.expect((res) => {
expect(res.body).to.have.property('success', true);
expect(res.body).to.have.property('update').that.have.lengthOf(0);
expect(res.body).to.have.property('remove').that.have.lengthOf(0);
})
.end(done);
});
describe('/rooms.saveNotification:', () => {
let testChannel;
it('create an channel', (done) => {
createRoom({ type: 'c', name: `channel.test.${ Date.now() }` })
.end((err, res) => {
testChannel = res.body.channel;
done();
});
});
it('/rooms.saveNotification:', (done) => {
request.post(api('rooms.saveNotification'))
.set(credentials)
.send({
roomId: testChannel._id,
notifications: {
disableNotifications: '0',
emailNotifications: 'nothing',
audioNotificationValue: 'beep',
desktopNotifications: 'nothing',
desktopNotificationDuration: '2',
audioNotifications: 'all',
mobilePushNotifications: 'mentions',
},
})
.expect('Content-Type', 'application/json')
.expect(200)
.expect((res) => {
expect(res.body).to.have.property('success', true);
})
.end(done);
});
});
describe('/rooms.favorite', () => {
let testChannel;
const testChannelName = `channel.test.${ Date.now() }`;
it('create an channel', (done) => {
createRoom({ type: 'c', name: testChannelName })
.end((err, res) => {
testChannel = res.body.channel;
done();
});
});
it('should favorite the room when send favorite: true by roomName', (done) => {
request.post(api('rooms.favorite'))
.set(credentials)
.send({
roomName: testChannelName,
favorite: true,
})
.expect(200)
.expect((res) => {
expect(res.body).to.have.property('success', true);
})
.end(done);
});
it('should unfavorite the room when send favorite: false by roomName', (done) => {
request.post(api('rooms.favorite'))
.set(credentials)
.send({
roomName: testChannelName,
favorite: false,
})
.expect(200)
.expect((res) => {
expect(res.body).to.have.property('success', true);
})
.end(done);
});
it('should favorite the room when send favorite: true by roomId', (done) => {
request.post(api('rooms.favorite'))
.set(credentials)
.send({
roomId: testChannel._id,
favorite: true,
})
.expect(200)
.expect((res) => {
expect(res.body).to.have.property('success', true);
})
.end(done);
});
it('should unfavorite room when send favorite: false by roomId', (done) => {
request.post(api('rooms.favorite'))
.set(credentials)
.send({
roomId: testChannel._id,
favorite: false,
})
.expect(200)
.expect((res) => {
expect(res.body).to.have.property('success', true);
})
.end(done);
});
it('should return an error when send an invalid room', (done) => {
request.post(api('rooms.favorite'))
.set(credentials)
.send({
roomId: 'foo',
favorite: false,
})
.expect(400)
.expect((res) => {
expect(res.body).to.have.property('success', false);
expect(res.body).to.have.property('error');
})
.end(done);
});
});
describe('[/rooms.cleanHistory]', () => {
let publicChannel;
let privateChannel;
let directMessageChannel;
let user;
beforeEach((done) => {
const username = `user.test.${ Date.now() }`;
const email = `${ username }@rocket.chat`;<|fim▁hole|> .set(credentials)
.send({ email, name: username, username, password })
.end((err, res) => {
user = res.body.user;
done();
});
});
let userCredentials;
beforeEach((done) => {
request.post(api('login'))
.send({
user: user.username,
password,
})
.expect('Content-Type', 'application/json')
.expect(200)
.expect((res) => {
userCredentials = {};
userCredentials['X-Auth-Token'] = res.body.data.authToken;
userCredentials['X-User-Id'] = res.body.data.userId;
})
.end(done);
});
afterEach((done) => {
request.post(api('users.delete')).set(credentials).send({
userId: user._id,
}).end(done);
user = undefined;
});
it('create a public channel', (done) => {
createRoom({ type: 'c', name: `testeChannel${ +new Date() }` })
.end((err, res) => {
publicChannel = res.body.channel;
done();
});
});
it('create a private channel', (done) => {
createRoom({ type: 'p', name: `testPrivateChannel${ +new Date() }` })
.end((err, res) => {
privateChannel = res.body.group;
done();
});
});
it('create a direct message', (done) => {
createRoom({ type: 'd', username: 'rocket.cat' })
.end((err, res) => {
directMessageChannel = res.body.room;
done();
});
});
it('should return success when send a valid public channel', (done) => {
request.post(api('rooms.cleanHistory'))
.set(credentials)
.send({
roomId: publicChannel._id,
latest: '2016-12-09T13:42:25.304Z',
oldest: '2016-08-30T13:42:25.304Z',
})
.expect('Content-Type', 'application/json')
.expect(200)
.expect((res) => {
expect(res.body).to.have.property('success', true);
})
.end(done);
});
it('should return success when send a valid private channel', (done) => {
request.post(api('rooms.cleanHistory'))
.set(credentials)
.send({
roomId: privateChannel._id,
latest: '2016-12-09T13:42:25.304Z',
oldest: '2016-08-30T13:42:25.304Z',
})
.expect('Content-Type', 'application/json')
.expect(200)
.expect((res) => {
expect(res.body).to.have.property('success', true);
})
.end(done);
});
it('should return success when send a valid Direct Message channel', (done) => {
request.post(api('rooms.cleanHistory'))
.set(credentials)
.send({
roomId: directMessageChannel._id,
latest: '2016-12-09T13:42:25.304Z',
oldest: '2016-08-30T13:42:25.304Z',
})
.expect('Content-Type', 'application/json')
.expect(200)
.expect((res) => {
expect(res.body).to.have.property('success', true);
})
.end(done);
});
it('should return not allowed error when try deleting messages with user without permission', (done) => {
request.post(api('rooms.cleanHistory'))
.set(userCredentials)
.send({
roomId: directMessageChannel._id,
latest: '2016-12-09T13:42:25.304Z',
oldest: '2016-08-30T13:42:25.304Z',
})
.expect('Content-Type', 'application/json')
.expect(400)
.expect((res) => {
expect(res.body).to.have.property('success', false);
expect(res.body).to.have.property('errorType', 'error-not-allowed');
})
.end(done);
});
});
describe('[/rooms.info]', () => {
let testChannel;
let testGroup;
let testDM;
const expectedKeys = ['_id', 'name', 'fname', 't', 'msgs', 'usersCount', 'u', 'customFields', 'ts', 'ro', 'sysMes', 'default', '_updatedAt'];
const testChannelName = `channel.test.${ Date.now() }-${ Math.random() }`;
const testGroupName = `group.test.${ Date.now() }-${ Math.random() }`;
after((done) => {
closeRoom({ type: 'd', roomId: testDM._id })
.then(done);
});
it('create an channel', (done) => {
createRoom({ type: 'c', name: testChannelName })
.end((err, res) => {
testChannel = res.body.channel;
done();
});
});
it('create a group', (done) => {
createRoom(({ type: 'p', name: testGroupName }))
.end((err, res) => {
testGroup = res.body.group;
done();
});
});
it('create a Direct message room with rocket.cat', (done) => {
createRoom(({ type: 'd', username: 'rocket.cat' }))
.end((err, res) => {
testDM = res.body.room;
done();
});
});
it('should return the info about the created channel correctly searching by roomId', (done) => {
request.get(api('rooms.info'))
.set(credentials)
.query({
roomId: testChannel._id,
})
.expect(200)
.expect((res) => {
expect(res.body).to.have.property('success', true);
expect(res.body).to.have.property('room').and.to.be.an('object');
expect(res.body.room).to.have.keys(expectedKeys);
})
.end(done);
});
it('should return the info about the created channel correctly searching by roomName', (done) => {
request.get(api('rooms.info'))
.set(credentials)
.query({
roomName: testChannel.name,
})
.expect(200)
.expect((res) => {
expect(res.body).to.have.property('success', true);
expect(res.body).to.have.property('room').and.to.be.an('object');
expect(res.body.room).to.have.all.keys(expectedKeys);
})
.end(done);
});
it('should return the info about the created group correctly searching by roomId', (done) => {
request.get(api('rooms.info'))
.set(credentials)
.query({
roomId: testGroup._id,
})
.expect(200)
.expect((res) => {
expect(res.body).to.have.property('success', true);
expect(res.body).to.have.property('room').and.to.be.an('object');
expect(res.body.room).to.have.all.keys(expectedKeys);
})
.end(done);
});
it('should return the info about the created group correctly searching by roomName', (done) => {
request.get(api('rooms.info'))
.set(credentials)
.query({
roomName: testGroup.name,
})
.expect(200)
.expect((res) => {
expect(res.body).to.have.property('success', true);
expect(res.body).to.have.property('room').and.to.be.an('object');
expect(res.body.room).to.have.all.keys(expectedKeys);
})
.end(done);
});
it('should return the info about the created DM correctly searching by roomId', (done) => {
request.get(api('rooms.info'))
.set(credentials)
.query({
roomId: testDM._id,
})
.expect(200)
.expect((res) => {
expect(res.body).to.have.property('success', true);
expect(res.body).to.have.property('room').and.to.be.an('object');
})
.end(done);
});
it('should return name and _id of public channel when it has the "fields" query parameter limiting by name', (done) => {
request.get(api('rooms.info'))
.set(credentials)
.query({
roomId: testChannel._id,
fields: JSON.stringify({ name: 1 }),
})
.expect(200)
.expect((res) => {
expect(res.body).to.have.property('success', true);
expect(res.body).to.have.property('room').and.to.be.an('object');
expect(res.body.room).to.have.property('name').and.to.be.equal(testChannelName);
expect(res.body.room).to.have.all.keys(['_id', 'name']);
})
.end(done);
});
});
describe('[/rooms.leave]', () => {
let testChannel;
let testGroup;
let testDM;
const testChannelName = `channel.test.${ Date.now() }-${ Math.random() }`;
const testGroupName = `group.test.${ Date.now() }-${ Math.random() }`;
after((done) => {
closeRoom({ type: 'd', roomId: testDM._id })
.then(done);
});
it('create an channel', (done) => {
createRoom({ type: 'c', name: testChannelName })
.end((err, res) => {
testChannel = res.body.channel;
done();
});
});
it('create a group', (done) => {
createRoom(({ type: 'p', name: testGroupName }))
.end((err, res) => {
testGroup = res.body.group;
done();
});
});
it('create a Direct message room with rocket.cat', (done) => {
createRoom(({ type: 'd', username: 'rocket.cat' }))
.end((err, res) => {
testDM = res.body.room;
done();
});
});
it('should return an Error when trying leave a DM room', (done) => {
request.post(api('rooms.leave'))
.set(credentials)
.send({
roomId: testDM._id,
})
.expect(400)
.expect((res) => {
expect(res.body).to.have.property('success', false);
expect(res.body).to.have.property('errorType', 'error-not-allowed');
})
.end(done);
});
it('should return an Error when trying to leave a public channel and you are the last owner', (done) => {
request.post(api('rooms.leave'))
.set(credentials)
.send({
roomId: testChannel._id,
})
.expect(400)
.expect((res) => {
expect(res.body).to.have.property('success', false);
expect(res.body).to.have.property('errorType', 'error-you-are-last-owner');
})
.end(done);
});
it('should return an Error when trying to leave a private group and you are the last owner', (done) => {
request.post(api('rooms.leave'))
.set(credentials)
.send({
roomId: testGroup._id,
})
.expect(400)
.expect((res) => {
expect(res.body).to.have.property('success', false);
expect(res.body).to.have.property('errorType', 'error-you-are-last-owner');
})
.end(done);
});
it('should return an Error when trying to leave a public channel and not have the necessary permission(leave-c)', (done) => {
updatePermission('leave-c', []).then(() => {
request.post(api('rooms.leave'))
.set(credentials)
.send({
roomId: testChannel._id,
})
.expect(400)
.expect((res) => {
expect(res.body).to.have.property('success', false);
expect(res.body).to.have.property('errorType', 'error-not-allowed');
})
.end(done);
});
});
it('should return an Error when trying to leave a private group and not have the necessary permission(leave-p)', (done) => {
updatePermission('leave-p', []).then(() => {
request.post(api('rooms.leave'))
.set(credentials)
.send({
roomId: testGroup._id,
})
.expect(400)
.expect((res) => {
expect(res.body).to.have.property('success', false);
expect(res.body).to.have.property('errorType', 'error-not-allowed');
})
.end(done);
});
});
it('should leave the public channel when the room has at least another owner and the user has the necessary permission(leave-c)', (done) => {
updatePermission('leave-c', ['admin']).then(() => {
request.post(api('channels.addAll'))
.set(credentials)
.send({
roomId: testChannel._id,
})
.end(() => {
request.post(api('channels.addOwner'))
.set(credentials)
.send({
roomId: testChannel._id,
userId: 'rocket.cat',
})
.end(() => {
request.post(api('rooms.leave'))
.set(credentials)
.send({
roomId: testChannel._id,
})
.expect(200)
.expect((res) => {
expect(res.body).to.have.property('success', true);
})
.end(done);
});
});
});
});
it('should leave the private group when the room has at least another owner and the user has the necessary permission(leave-p)', (done) => {
updatePermission('leave-p', ['admin']).then(() => {
request.post(api('groups.addAll'))
.set(credentials)
.send({
roomId: testGroup._id,
})
.end(() => {
request.post(api('groups.addOwner'))
.set(credentials)
.send({
roomId: testGroup._id,
userId: 'rocket.cat',
})
.end(() => {
request.post(api('rooms.leave'))
.set(credentials)
.send({
roomId: testGroup._id,
})
.expect(200)
.expect((res) => {
expect(res.body).to.have.property('success', true);
})
.end(done);
});
});
});
});
});
});<|fim▁end|> | request.post(api('users.create')) |
<|file_name|>testNavigateFTP.js<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
"use strict";
Cu.import("resource://gre/modules/Services.jsm");
const TEST_DATA = "ftp://ftp.mozqa.com/";
var setupModule = function(aModule) {
aModule.controller = mozmill.getBrowserController();
}
var testNavigateFTP = function () {
// opens the mozilla.org ftp page then navigates through a couple levels.
controller.open(TEST_DATA);
controller.waitForPageLoad();
var dataLink = new elementslib.Link(controller.tabs.activeTab, 'data');
controller.click(dataLink);
controller.waitForPageLoad();
var up = new elementslib.Selector(controller.tabs.activeTab, '.up');
controller.click(up);
controller.waitForPageLoad();
controller.waitForElement(dataLink);<|fim▁hole|><|fim▁end|> | } |
<|file_name|>apk_operations.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import argparse
import json
import logging
import os
import pipes
import posixpath
import random
import re
import shlex
import sys
import devil_chromium
from devil import devil_env
from devil.android import apk_helper
from devil.android import device_errors
from devil.android import device_utils
from devil.android import flag_changer
from devil.android.sdk import adb_wrapper
from devil.android.sdk import intent
from devil.android.sdk import version_codes
from devil.utils import run_tests_helper
with devil_env.SysPath(os.path.join(os.path.dirname(__file__), '..', '..',
'third_party', 'colorama', 'src')):
import colorama
from incremental_install import installer
from pylib import constants
from pylib.symbols import deobfuscator
def _Colorize(color, text):
# |color| as a string to avoid pylint's no-member warning :(.
# pylint: disable=no-member
return getattr(colorama.Fore, color) + text + colorama.Fore.RESET
def _InstallApk(devices, apk, install_dict):
def install(device):
if install_dict:
installer.Install(device, install_dict, apk=apk)
else:
device.Install(apk)
logging.info('Installing %sincremental apk.', '' if install_dict else 'non-')
device_utils.DeviceUtils.parallel(devices).pMap(install)
def _UninstallApk(devices, install_dict, package_name):
def uninstall(device):
if install_dict:
installer.Uninstall(device, package_name)
else:
device.Uninstall(package_name)
device_utils.DeviceUtils.parallel(devices).pMap(uninstall)
def _LaunchUrl(devices, input_args, device_args_file, url, apk):
if input_args and device_args_file is None:
raise Exception('This apk does not support any flags.')
if url:
view_activity = apk.GetViewActivityName()
if not view_activity:
raise Exception('APK does not support launching with URLs.')
def launch(device):
# The flags are first updated with input args.
changer = flag_changer.FlagChanger(device, device_args_file)
flags = []
if input_args:
flags = shlex.split(input_args)
changer.ReplaceFlags(flags)
# Then launch the apk.
if url is None:
# Simulate app icon click if no url is present.
cmd = ['monkey', '-p', apk.GetPackageName(), '-c',
'android.intent.category.LAUNCHER', '1']
device.RunShellCommand(cmd, check_return=True)
else:
launch_intent = intent.Intent(action='android.intent.action.VIEW',
activity=view_activity, data=url,
package=apk.GetPackageName())
device.StartActivity(launch_intent)
device_utils.DeviceUtils.parallel(devices).pMap(launch)
def _ChangeFlags(devices, input_args, device_args_file):
if input_args is None:
_DisplayArgs(devices, device_args_file)
else:
flags = shlex.split(input_args)
def update(device):
flag_changer.FlagChanger(device, device_args_file).ReplaceFlags(flags)
device_utils.DeviceUtils.parallel(devices).pMap(update)
def _TargetCpuToTargetArch(target_cpu):
if target_cpu == 'x64':
return 'x86_64'
if target_cpu == 'mipsel':
return 'mips'
return target_cpu
def _RunGdb(device, package_name, output_directory, target_cpu, extra_args,
verbose):
gdb_script_path = os.path.dirname(__file__) + '/adb_gdb'
cmd = [
gdb_script_path,
'--package-name=%s' % package_name,
'--output-directory=%s' % output_directory,
'--adb=%s' % adb_wrapper.AdbWrapper.GetAdbPath(),
'--device=%s' % device.serial,
# Use one lib dir per device so that changing between devices does require
# refetching the device libs.
'--pull-libs-dir=/tmp/adb-gdb-libs-%s' % device.serial,
]
# Enable verbose output of adb_gdb if it's set for this script.
if verbose:
cmd.append('--verbose')
if target_cpu:
cmd.append('--target-arch=%s' % _TargetCpuToTargetArch(target_cpu))
cmd.extend(extra_args)
logging.warning('Running: %s', ' '.join(pipes.quote(x) for x in cmd))
print _Colorize('YELLOW', 'All subsequent output is from adb_gdb script.')
os.execv(gdb_script_path, cmd)
def _PrintPerDeviceOutput(devices, results, single_line=False):
for d, result in zip(devices, results):
if not single_line and d is not devices[0]:
sys.stdout.write('\n')
sys.stdout.write(
_Colorize('YELLOW', '%s (%s):' % (d, d.build_description)))
sys.stdout.write(' ' if single_line else '\n')
yield result
def _RunMemUsage(devices, package_name):
def mem_usage_helper(d):
ret = []
proc_map = d.GetPids(package_name)
for name, pids in proc_map.iteritems():
for pid in pids:
ret.append((name, pid, d.GetMemoryUsageForPid(pid)))
return ret
parallel_devices = device_utils.DeviceUtils.parallel(devices)
all_results = parallel_devices.pMap(mem_usage_helper).pGet(None)
for result in _PrintPerDeviceOutput(devices, all_results):
if not result:
print 'No processes found.'
else:
for name, pid, usage in sorted(result):
print '%s(%s):' % (name, pid)
for k, v in sorted(usage.iteritems()):
print ' %s=%d' % (k, v)
print
def _DuHelper(device, path_spec, run_as=None):
"""Runs "du -s -k |path_spec|" on |device| and returns parsed result.
Args:
device: A DeviceUtils instance.
path_spec: The list of paths to run du on. May contain shell expansions
(will not be escaped).
run_as: Package name to run as, or None to run as shell user. If not None
and app is not android:debuggable (run-as fails), then command will be
run as root.
Returns:
A dict of path->size in kb containing all paths in |path_spec| that exist on
device. Paths that do not exist are silently ignored.
"""
# Example output for: du -s -k /data/data/org.chromium.chrome/{*,.*}
# 144 /data/data/org.chromium.chrome/cache
# 8 /data/data/org.chromium.chrome/files
# <snip>
# du: .*: No such file or directory
# The -d flag works differently across android version, so use -s instead.
cmd_str = 'du -s -k ' + path_spec
lines = device.RunShellCommand(cmd_str, run_as=run_as, shell=True,
check_return=False)
output = '\n'.join(lines)
# run-as: Package 'com.android.chrome' is not debuggable
if output.startswith('run-as:'):
# check_return=False needed for when some paths in path_spec do not exist.
lines = device.RunShellCommand(cmd_str, as_root=True, shell=True,
check_return=False)
ret = {}
try:
for line in lines:
# du: .*: No such file or directory
if line.startswith('du:'):
continue
size, subpath = line.split(None, 1)
ret[subpath] = int(size)
return ret
except ValueError:
logging.error('Failed to parse du output:\n%s', output)
def _RunDiskUsage(devices, package_name, verbose):
# Measuring dex size is a bit complicated:
# https://source.android.com/devices/tech/dalvik/jit-compiler
#
# For KitKat and below:
# dumpsys package contains:
# dataDir=/data/data/org.chromium.chrome
# codePath=/data/app/org.chromium.chrome-1.apk
# resourcePath=/data/app/org.chromium.chrome-1.apk
# nativeLibraryPath=/data/app-lib/org.chromium.chrome-1
# To measure odex:
# ls -l /data/dalvik-cache/data@[email protected]@classes.dex
#
# For Android L and M (and maybe for N+ system apps):
# dumpsys package contains:
# codePath=/data/app/org.chromium.chrome-1
# resourcePath=/data/app/org.chromium.chrome-1
# legacyNativeLibraryDir=/data/app/org.chromium.chrome-1/lib
# To measure odex:
# # Option 1:
# /data/dalvik-cache/arm/data@[email protected]@[email protected]
# /data/dalvik-cache/arm/data@[email protected]@[email protected]
# ls -l /data/dalvik-cache/profiles/org.chromium.chrome
# (these profiles all appear to be 0 bytes)
# # Option 2:
# ls -l /data/app/org.chromium.chrome-1/oat/arm/base.odex
#
# For Android N+:
# dumpsys package contains:
# dataDir=/data/user/0/org.chromium.chrome
# codePath=/data/app/org.chromium.chrome-UuCZ71IE-i5sZgHAkU49_w==
# resourcePath=/data/app/org.chromium.chrome-UuCZ71IE-i5sZgHAkU49_w==
# legacyNativeLibraryDir=/data/app/org.chromium.chrome-GUID/lib
# Instruction Set: arm
# path: /data/app/org.chromium.chrome-UuCZ71IE-i5sZgHAkU49_w==/base.apk
# status: /data/.../oat/arm/base.odex[status=kOatUpToDate, compilation_f
# ilter=quicken]
# Instruction Set: arm64
# path: /data/app/org.chromium.chrome-UuCZ71IE-i5sZgHAkU49_w==/base.apk
# status: /data/.../oat/arm64/base.odex[status=..., compilation_filter=q
# uicken]
# To measure odex:
# ls -l /data/app/.../oat/arm/base.odex
# ls -l /data/app/.../oat/arm/base.vdex (optional)
# To measure the correct odex size:
# cmd package compile -m speed org.chromium.chrome # For webview
# cmd package compile -m speed-profile org.chromium.chrome # For others
def disk_usage_helper(d):
package_output = '\n'.join(d.RunShellCommand(
['dumpsys', 'package', package_name], check_return=True))
# Prints a message but does not return error when apk is not installed.
if 'Unable to find package:' in package_output:
return None
# Ignore system apks.
idx = package_output.find('Hidden system packages:')
if idx != -1:
package_output = package_output[:idx]
try:
data_dir = re.search(r'dataDir=(.*)', package_output).group(1)
code_path = re.search(r'codePath=(.*)', package_output).group(1)
lib_path = re.search(r'(?:legacyN|n)ativeLibrary(?:Dir|Path)=(.*)',
package_output).group(1)
except AttributeError:
raise Exception('Error parsing dumpsys output: ' + package_output)
compilation_filters = set()
# Match "compilation_filter=value", where a line break can occur at any spot
# (refer to examples above).
awful_wrapping = r'\s*'.join('compilation_filter=')
for m in re.finditer(awful_wrapping + r'([\s\S]+?)[\],]', package_output):
compilation_filters.add(re.sub(r'\s+', '', m.group(1)))
compilation_filter = ','.join(sorted(compilation_filters))
data_dir_sizes = _DuHelper(d, '%s/{*,.*}' % data_dir, run_as=package_name)
# Measure code_cache separately since it can be large.
code_cache_sizes = {}
code_cache_dir = next(
(k for k in data_dir_sizes if k.endswith('/code_cache')), None)
if code_cache_dir:
data_dir_sizes.pop(code_cache_dir)
code_cache_sizes = _DuHelper(d, '%s/{*,.*}' % code_cache_dir,
run_as=package_name)
apk_path_spec = code_path
if not apk_path_spec.endswith('.apk'):
apk_path_spec += '/*.apk'
apk_sizes = _DuHelper(d, apk_path_spec)
if lib_path.endswith('/lib'):
# Shows architecture subdirectory.
lib_sizes = _DuHelper(d, '%s/{*,.*}' % lib_path)
else:
lib_sizes = _DuHelper(d, lib_path)
# Look at all possible locations for odex files.
odex_paths = []
for apk_path in apk_sizes:
mangled_apk_path = apk_path[1:].replace('/', '@')
apk_basename = posixpath.basename(apk_path)[:-4]
for ext in ('dex', 'odex', 'vdex', 'art'):
# Easier to check all architectures than to determine active ones.
for arch in ('arm', 'arm64', 'x86', 'x86_64', 'mips', 'mips64'):
odex_paths.append(
'%s/oat/%s/%s.%s' % (code_path, arch, apk_basename, ext))
# No app could possibly have more than 6 dex files.
for suffix in ('', '2', '3', '4', '5'):
odex_paths.append('/data/dalvik-cache/%s/%s@classes%s.%s' % (
arch, mangled_apk_path, suffix, ext))
# This path does not have |arch|, so don't repeat it for every arch.
if arch == 'arm':
odex_paths.append('/data/dalvik-cache/%s@classes%s.dex' % (
mangled_apk_path, suffix))
odex_sizes = _DuHelper(d, ' '.join(pipes.quote(p) for p in odex_paths))
return (data_dir_sizes, code_cache_sizes, apk_sizes, lib_sizes, odex_sizes,
compilation_filter)
def print_sizes(desc, sizes):
print '%s: %dkb' % (desc, sum(sizes.itervalues()))
if verbose:
for path, size in sorted(sizes.iteritems()):
print ' %s: %skb' % (path, size)
parallel_devices = device_utils.DeviceUtils.parallel(devices)
all_results = parallel_devices.pMap(disk_usage_helper).pGet(None)
for result in _PrintPerDeviceOutput(devices, all_results):
if not result:
print 'APK is not installed.'
continue
(data_dir_sizes, code_cache_sizes, apk_sizes, lib_sizes, odex_sizes,
compilation_filter) = result
total = sum(sum(sizes.itervalues()) for sizes in result[:-1])
print_sizes('Apk', apk_sizes)
print_sizes('App Data (non-code cache)', data_dir_sizes)
print_sizes('App Data (code cache)', code_cache_sizes)
print_sizes('Native Libs', lib_sizes)
show_warning = compilation_filter and 'speed' not in compilation_filter
compilation_filter = compilation_filter or 'n/a'
print_sizes('odex (compilation_filter=%s)' % compilation_filter, odex_sizes)
if show_warning:
logging.warning('For a more realistic odex size, run:')
logging.warning(' %s compile-dex [speed|speed-profile]', sys.argv[0])
print 'Total: %skb (%.1fmb)' % (total, total / 1024.0)
def _RunLogcat(device, package_name, verbose, mapping_path):
if mapping_path:
try:
deobfuscate = deobfuscator.Deobfuscator(mapping_path)
except OSError:
sys.stderr.write('Error executing "bin/java_deobfuscate". '
'Did you forget to build it?\n')
sys.exit(1)
def get_my_pids():
my_pids = []
for pids in device.GetPids(package_name).values():
my_pids.extend(pids)
return [int(pid) for pid in my_pids]
def process_line(line, fast=False):
if verbose:
if fast:
return
else:
if not line or line.startswith('------'):
return
tokens = line.split(None, 4)
pid = int(tokens[2])
priority = tokens[4]
if pid in my_pids or (not fast and priority == 'F'):
pass # write
elif pid in not_my_pids:
return
elif fast:
# Skip checking whether our package spawned new processes.
not_my_pids.add(pid)
return
else:
# Check and add the pid if it is a new one from our package.
my_pids.update(get_my_pids())
if pid not in my_pids:
not_my_pids.add(pid)
return
if mapping_path:
line = '\n'.join(deobfuscate.TransformLines([line.rstrip()])) + '\n'
sys.stdout.write(line)
try:
my_pids = set(get_my_pids())
not_my_pids = set()
nonce = 'apk_wrappers.py nonce={}'.format(random.random())
device.RunShellCommand(['log', nonce])
fast = True
for line in device.adb.Logcat(logcat_format='threadtime'):
try:
process_line(line, fast)
except:
sys.stderr.write('Failed to process line: ' + line)
raise
if fast and nonce in line:
fast = False
except KeyboardInterrupt:
pass # Don't show stack trace upon Ctrl-C
finally:
if mapping_path:
deobfuscate.Close()
def _RunPs(devices, package_name):
parallel_devices = device_utils.DeviceUtils.parallel(devices)
all_pids = parallel_devices.GetPids(package_name).pGet(None)
for proc_map in _PrintPerDeviceOutput(devices, all_pids):
if not proc_map:
print 'No processes found.'
else:
for name, pids in sorted(proc_map.items()):
print name, ','.join(pids)
def _RunShell(devices, package_name, cmd):
if cmd:
parallel_devices = device_utils.DeviceUtils.parallel(devices)
outputs = parallel_devices.RunShellCommand(
cmd, run_as=package_name).pGet(None)
for output in _PrintPerDeviceOutput(devices, outputs):
for line in output:
print line
else:
adb_path = adb_wrapper.AdbWrapper.GetAdbPath()
cmd = [adb_path, '-s', devices[0].serial, 'shell']
# Pre-N devices do not support -t flag.
if devices[0].build_version_sdk >= version_codes.NOUGAT:
cmd += ['-t', 'run-as', package_name]
else:
print 'Upon entering the shell, run:'
print 'run-as', package_name
print
os.execv(adb_path, cmd)
def _RunCompileDex(devices, package_name, compilation_filter):
cmd = ['cmd', 'package', 'compile', '-f', '-m', compilation_filter,
package_name]
parallel_devices = device_utils.DeviceUtils.parallel(devices)
outputs = parallel_devices.RunShellCommand(cmd).pGet(None)
for output in _PrintPerDeviceOutput(devices, outputs):
for line in output:
print line
def _GenerateAvailableDevicesMessage(devices):
devices_obj = device_utils.DeviceUtils.parallel(devices)
descriptions = devices_obj.pMap(lambda d: d.build_description).pGet(None)
msg = 'Available devices:\n'
for d, desc in zip(devices, descriptions):
msg += ' %s (%s)\n' % (d, desc)
return msg
# TODO(agrieve):add "--all" in the MultipleDevicesError message and use it here.
def _GenerateMissingAllFlagMessage(devices):
return ('More than one device available. Use --all to select all devices, ' +
'or use --device to select a device by serial.\n\n' +
_GenerateAvailableDevicesMessage(devices))
def _DisplayArgs(devices, device_args_file):
def flags_helper(d):
changer = flag_changer.FlagChanger(d, device_args_file)
return changer.GetCurrentFlags()
parallel_devices = device_utils.DeviceUtils.parallel(devices)
outputs = parallel_devices.pMap(flags_helper).pGet(None)
print 'Existing flags per-device (via /data/local/tmp/%s):' % device_args_file
for flags in _PrintPerDeviceOutput(devices, outputs, single_line=True):
quoted_flags = ' '.join(pipes.quote(f) for f in flags)
print quoted_flags or 'No flags set.'
def _DeviceCachePath(device, output_directory):
file_name = 'device_cache_%s.json' % device.serial
return os.path.join(output_directory, file_name)
def _LoadDeviceCaches(devices, output_directory):
if not output_directory:
return
for d in devices:
cache_path = _DeviceCachePath(d, output_directory)
if os.path.exists(cache_path):
logging.debug('Using device cache: %s', cache_path)
with open(cache_path) as f:
d.LoadCacheData(f.read())
# Delete the cached file so that any exceptions cause it to be cleared.
os.unlink(cache_path)
else:
logging.debug('No cache present for device: %s', d)
def _SaveDeviceCaches(devices, output_directory):
if not output_directory:
return
for d in devices:
cache_path = _DeviceCachePath(d, output_directory)
with open(cache_path, 'w') as f:
f.write(d.DumpCacheData())
logging.info('Wrote device cache: %s', cache_path)
class _Command(object):
name = None
description = None
needs_package_name = False
needs_output_directory = False
needs_apk_path = False
supports_incremental = False
accepts_command_line_flags = False
accepts_args = False
accepts_url = False
all_devices_by_default = False
calls_exec = False
def __init__(self, from_wrapper_script):
self._parser = None
self._from_wrapper_script = from_wrapper_script
self.args = None
self.apk_helper = None
self.install_dict = None
self.devices = None
# Do not support incremental install outside the context of wrapper scripts.
if not from_wrapper_script:
self.supports_incremental = False
def _RegisterExtraArgs(self, subp):
pass
def RegisterArgs(self, parser):
subp = parser.add_parser(self.name, help=self.description)
self._parser = subp
subp.set_defaults(command=self)
subp.add_argument('--all',
action='store_true',
default=self.all_devices_by_default,
help='Operate on all connected devices.',)
subp.add_argument('-d',
'--device',
action='append',
default=[],
dest='devices',
help='Target device for script to work on. Enter '
'multiple times for multiple devices.')
subp.add_argument('-v',
'--verbose',
action='count',
default=0,
dest='verbose_count',
help='Verbose level (multiple times for more)')
group = subp.add_argument_group('%s arguments' % self.name)
if self.needs_package_name:
# Always gleaned from apk when using wrapper scripts.
group.add_argument('--package-name',
help=argparse.SUPPRESS if self._from_wrapper_script else (
"App's package name."))
if self.needs_apk_path or self.needs_package_name:
# Adding this argument to the subparser would override the set_defaults()
# value set by on the parent parser (even if None).
if not self._from_wrapper_script:
group.add_argument('--apk-path',
required=self.needs_apk_path,
help='Path to .apk')
if self.supports_incremental:
group.add_argument('--incremental',
action='store_true',
default=False,
help='Always install an incremental apk.')
group.add_argument('--non-incremental',
action='store_true',
default=False,
help='Always install a non-incremental apk.')
# accepts_command_line_flags and accepts_args are mutually exclusive.
# argparse will throw if they are both set.
if self.accepts_command_line_flags:
group.add_argument('--args', help='Command-line flags.')
if self.accepts_args:
group.add_argument('--args', help='Extra arguments.')
if self.accepts_url:
group.add_argument('url', nargs='?', help='A URL to launch with.')
if not self._from_wrapper_script and self.accepts_command_line_flags:
# Provided by wrapper scripts.
group.add_argument(
'--command-line-flags-file-name',
help='Name of the command-line flags file')
<|fim▁hole|> devices = device_utils.DeviceUtils.HealthyDevices(
device_arg=args.devices,
enable_device_files_cache=bool(args.output_directory),
default_retries=0)
self.args = args
self.devices = devices
# TODO(agrieve): Device cache should not depend on output directory.
# Maybe put int /tmp?
_LoadDeviceCaches(devices, args.output_directory)
# Ensure these keys always exist. They are set by wrapper scripts, but not
# always added when not using wrapper scripts.
args.__dict__.setdefault('apk_path', None)
args.__dict__.setdefault('incremental_json', None)
try:
if len(devices) > 1:
if self.calls_exec:
self._parser.error(device_errors.MultipleDevicesError(devices))
if not args.all and not args.devices:
self._parser.error(_GenerateMissingAllFlagMessage(devices))
if self.supports_incremental:
if args.incremental and args.non_incremental:
self._parser.error('Must use only one of --incremental and '
'--non-incremental')
elif args.non_incremental:
if not args.apk_path:
self._parser.error('Apk has not been built.')
args.incremental_json = None
elif args.incremental:
if not args.incremental_json:
self._parser.error('Incremental apk has not been built.')
args.apk_path = None
if args.apk_path and args.incremental_json:
self._parser.error('Both incremental and non-incremental apks exist. '
'Select using --incremental or --non-incremental')
if self.needs_apk_path or args.apk_path or args.incremental_json:
if args.incremental_json:
with open(args.incremental_json) as f:
install_dict = json.load(f)
apk_path = os.path.join(args.output_directory,
install_dict['apk_path'])
if os.path.exists(apk_path):
self.install_dict = install_dict
self.apk_helper = apk_helper.ToHelper(
os.path.join(args.output_directory,
self.install_dict['apk_path']))
if not self.apk_helper and args.apk_path:
self.apk_helper = apk_helper.ToHelper(args.apk_path)
if not self.apk_helper:
self._parser.error(
'Neither incremental nor non-incremental apk is built.')
if self.needs_package_name and not args.package_name:
if self.apk_helper:
args.package_name = self.apk_helper.GetPackageName()
elif self._from_wrapper_script:
self._parser.error(
'Neither incremental nor non-incremental apk is built.')
else:
self._parser.error('One of --package-name or --apk-path is required.')
# Save cache now if command will not get a chance to afterwards.
if self.calls_exec:
_SaveDeviceCaches(devices, args.output_directory)
except:
_SaveDeviceCaches(devices, args.output_directory)
raise
class _DevicesCommand(_Command):
name = 'devices'
description = 'Describe attached devices.'
all_devices_by_default = True
def Run(self):
print _GenerateAvailableDevicesMessage(self.devices)
class _InstallCommand(_Command):
name = 'install'
description = 'Installs the APK to one or more devices.'
needs_apk_path = True
supports_incremental = True
def Run(self):
_InstallApk(self.devices, self.apk_helper, self.install_dict)
class _UninstallCommand(_Command):
name = 'uninstall'
description = 'Removes the APK to one or more devices.'
needs_package_name = True
def Run(self):
_UninstallApk(self.devices, self.install_dict, self.args.package_name)
class _LaunchCommand(_Command):
name = 'launch'
description = ('Sends a launch intent for the APK after first writing the '
'command-line flags file.')
# TODO(agrieve): Launch could be changed to require only package name by
# parsing "dumpsys package" for launch & view activities.
needs_apk_path = True
accepts_command_line_flags = True
accepts_url = True
all_devices_by_default = True
def Run(self):
_LaunchUrl(self.devices, self.args.args, self.args.command_line_flags_file,
self.args.url, self.apk_helper)
class _RunCommand(_Command):
name = 'run'
description = 'Install and then launch.'
needs_apk_path = True
supports_incremental = True
needs_package_name = True
accepts_command_line_flags = True
accepts_url = True
def Run(self):
logging.warning('Installing...')
_InstallApk(self.devices, self.apk_helper, self.install_dict)
logging.warning('Sending launch intent...')
_LaunchUrl(self.devices, self.args.args, self.args.command_line_flags_file,
self.args.url, self.apk_helper)
class _StopCommand(_Command):
name = 'stop'
description = 'Force-stops the app.'
needs_package_name = True
all_devices_by_default = True
def Run(self):
device_utils.DeviceUtils.parallel(self.devices).ForceStop(
self.args.package_name)
class _ClearDataCommand(_Command):
name = 'clear-data'
descriptions = 'Clears all app data.'
needs_package_name = True
all_devices_by_default = True
def Run(self):
device_utils.DeviceUtils.parallel(self.devices).ClearApplicationState(
self.args.package_name)
class _ArgvCommand(_Command):
name = 'argv'
description = 'Display and optionally update command-line flags file.'
needs_package_name = True
accepts_command_line_flags = True
all_devices_by_default = True
def Run(self):
_ChangeFlags(self.devices, self.args.args,
self.args.command_line_flags_file)
class _GdbCommand(_Command):
name = 'gdb'
description = 'Runs //build/android/adb_gdb with apk-specific args.'
needs_package_name = True
needs_output_directory = True
accepts_args = True
calls_exec = True
def Run(self):
extra_args = shlex.split(self.args.args or '')
_RunGdb(self.devices[0], self.args.package_name, self.args.output_directory,
self.args.target_cpu, extra_args, bool(self.args.verbose_count))
class _LogcatCommand(_Command):
name = 'logcat'
description = 'Runs "adb logcat" filtering to just the current APK processes'
needs_package_name = True
calls_exec = True
def Run(self):
mapping = self.args.proguard_mapping_path
if self.args.no_deobfuscate:
mapping = None
_RunLogcat(self.devices[0], self.args.package_name,
bool(self.args.verbose_count), mapping)
def _RegisterExtraArgs(self, group):
if self._from_wrapper_script:
group.add_argument('--no-deobfuscate', action='store_true',
help='Disables ProGuard deobfuscation of logcat.')
else:
group.set_defaults(no_deobfuscate=False)
group.add_argument('--proguard-mapping-path',
help='Path to ProGuard map (enables deobfuscation)')
class _PsCommand(_Command):
name = 'ps'
description = 'Show PIDs of any APK processes currently running.'
needs_package_name = True
all_devices_by_default = True
def Run(self):
_RunPs(self.devices, self.args.package_name)
class _DiskUsageCommand(_Command):
name = 'disk-usage'
description = 'Show how much device storage is being consumed by the app.'
needs_package_name = True
all_devices_by_default = True
def Run(self):
_RunDiskUsage(self.devices, self.args.package_name,
bool(self.args.verbose_count))
class _MemUsageCommand(_Command):
name = 'mem-usage'
description = 'Show memory usage of currently running APK processes.'
needs_package_name = True
all_devices_by_default = True
def Run(self):
_RunMemUsage(self.devices, self.args.package_name)
class _ShellCommand(_Command):
name = 'shell'
description = ('Same as "adb shell <command>", but runs as the apk\'s uid '
'(via run-as). Useful for inspecting the app\'s data '
'directory.')
needs_package_name = True
@property
def calls_exec(self):
return not self.args.cmd
def _RegisterExtraArgs(self, group):
group.add_argument(
'cmd', nargs=argparse.REMAINDER, help='Command to run.')
def Run(self):
_RunShell(self.devices, self.args.package_name, self.args.cmd)
class _CompileDexCommand(_Command):
name = 'compile-dex'
description = ('Applicable only for Android N+. Forces .odex files to be '
'compiled with the given compilation filter. To see existing '
'filter, use "disk-usage" command.')
needs_package_name = True
all_devices_by_default = True
def _RegisterExtraArgs(self, group):
group.add_argument(
'compilation_filter',
choices=['verify', 'quicken', 'space-profile', 'space',
'speed-profile', 'speed'],
help='For WebView/Monochrome, use "speed". For other apks, use '
'"speed-profile".')
def Run(self):
_RunCompileDex(self.devices, self.args.package_name,
self.args.compilation_filter)
_COMMANDS = [
_DevicesCommand,
_InstallCommand,
_UninstallCommand,
_LaunchCommand,
_RunCommand,
_StopCommand,
_ClearDataCommand,
_ArgvCommand,
_GdbCommand,
_LogcatCommand,
_PsCommand,
_DiskUsageCommand,
_MemUsageCommand,
_ShellCommand,
_CompileDexCommand,
]
def _ParseArgs(parser, from_wrapper_script):
subparsers = parser.add_subparsers()
commands = [clazz(from_wrapper_script) for clazz in _COMMANDS]
for command in commands:
if from_wrapper_script or not command.needs_output_directory:
command.RegisterArgs(subparsers)
# Show extended help when no command is passed.
argv = sys.argv[1:]
if not argv:
argv = ['--help']
return parser.parse_args(argv)
def _RunInternal(parser, output_directory=None):
colorama.init()
parser.set_defaults(output_directory=output_directory)
from_wrapper_script = bool(output_directory)
args = _ParseArgs(parser, from_wrapper_script)
run_tests_helper.SetLogLevel(args.verbose_count)
args.command.ProcessArgs(args)
args.command.Run()
# Incremental install depends on the cache being cleared when uninstalling.
if args.command.name != 'uninstall':
_SaveDeviceCaches(args.command.devices, output_directory)
# TODO(agrieve): Remove =None from target_cpu on or after October 2017.
# It exists only so that stale wrapper scripts continue to work.
def Run(output_directory, apk_path, incremental_json, command_line_flags_file,
target_cpu, proguard_mapping_path):
"""Entry point for generated wrapper scripts."""
constants.SetOutputDirectory(output_directory)
devil_chromium.Initialize(output_directory=output_directory)
parser = argparse.ArgumentParser()
exists_or_none = lambda p: p if p and os.path.exists(p) else None
parser.set_defaults(
command_line_flags_file=command_line_flags_file,
target_cpu=target_cpu,
apk_path=exists_or_none(apk_path),
incremental_json=exists_or_none(incremental_json),
proguard_mapping_path=proguard_mapping_path)
_RunInternal(parser, output_directory=output_directory)
def main():
devil_chromium.Initialize()
_RunInternal(argparse.ArgumentParser(), output_directory=None)
if __name__ == '__main__':
main()<|fim▁end|> | self._RegisterExtraArgs(group)
def ProcessArgs(self, args): |
<|file_name|>Streams.ts<|end_file_name|><|fim▁begin|>export enum Streams {
COLLABORATORS = 400,
METADATA,
DOCUMENT_CONTENT,
CURSOR,
}
export enum StreamsSubtype {
COLLABORATORS_JOIN = 100,
COLLABORATORS_LOCAL_UPDATE,
DOCUMENT_OPERATION,<|fim▁hole|> DOCUMENT_QUERY,
DOCUMENT_REPLY,
METADATA_TITLE,
METADATA_FIXDATA,
METADATA_LOGS,
METADATA_PULSAR,
CRYPTO,
CURSOR,
}
export interface StreamId {
type: Streams
subtype: StreamsSubtype
}<|fim▁end|> | |
<|file_name|>directives.js<|end_file_name|><|fim▁begin|>'use strict';
angular.module('rvplusplus').directive('initFocus', function() {
return {
restrict: 'A', // only activate on element attribute
link: function(scope, element, attrs) {
element.focus();
}
};<|fim▁hole|><|fim▁end|> | }); |
<|file_name|>inputhandler.py<|end_file_name|><|fim▁begin|>'''
File: input.py
Author: Tristan van Vaalen
Handles user input
'''
import signal
import sys
import verbose
v = verbose.Verbose()
class InputHandler():
def __init__(self):
v.debug('Initializing input handler').indent()
self.running = True
self.signal_level = 0
v.debug('Registering signal handler').unindent()
signal.signal(signal.SIGINT, self.signal_handler)
def test(self):
pass
def signal_handler(self, signal, frame):
self.signal_level += 1
if self.signal_level == 1:
self.running = False
else:
sys.exit(0)
def output_options(self):
v.write(
'Available options:\n' +
' - help: prints this message\n' +
' - exit: exit program'
' - test: magic'
)
def get(self):
v.debug('Entering input loop')
v.write('AUDIOLYZE v0.01\nPress ctrl+D to exit')
while self.running:
try:
self._parse_input(raw_input('>>> '))
except EOFError:
v.write('EOF received')
self.running = False
v.write('Goodbye')
def _parse_input(self, raw):
raw = raw.strip()<|fim▁hole|> elif raw in ['quit', 'exit', 'stop', 'abort']:
self.running = False
elif raw in ['test']:
self.test()
else:
v.write(
'Invalid command \'{}\'. Try \'help\' for a list of commands'
.format(raw)
)<|fim▁end|> |
if raw in ['help', 'h', '?']:
self.output_options()
|
<|file_name|>test.rs<|end_file_name|><|fim▁begin|>use intern::intern;
use grammar::repr::*;
use lr1::Lookahead;
use lr1::Lookahead::EOF;
use test_util::{normalized_grammar};
use super::FirstSets;
pub fn nt(t: &str) -> Symbol {
Symbol::Nonterminal(NonterminalString(intern(t)))
}
pub fn term(t: &str) -> Symbol {
Symbol::Terminal(TerminalString::Quoted(intern(t)))
}
fn la(t: &str) -> Lookahead {
Lookahead::Terminal(TerminalString::Quoted(intern(t)))
}
fn first(first: &FirstSets, symbols: &[Symbol], lookahead: Lookahead) -> Vec<Lookahead> {
let mut v = first.first(symbols, lookahead);
v.sort();
v
}
#[test]
fn basic() {
let grammar = normalized_grammar(r#"
grammar;
extern { enum Tok { } }
A = B "C";
B: Option<u32> = {
"D" => Some(1);
=> None;
};
"#);
let first_sets = FirstSets::new(&grammar);
assert_eq!(
first(&first_sets, &[nt("A")], EOF),
vec![la("C"), la("D")]);
<|fim▁hole|> first(&first_sets, &[nt("B")], EOF),
vec![EOF, la("D")]);
assert_eq!(
first(&first_sets, &[nt("B"), term("E")], EOF),
vec![la("D"), la("E")]);
}<|fim▁end|> | assert_eq!( |
<|file_name|>0006_auto_20161209_0108.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-<|fim▁hole|>from django.db import migrations
class Migration(migrations.Migration):
dependencies = [("hordak", "0005_account_currencies")]
operations = [
migrations.RunSQL(
"""
CREATE OR REPLACE FUNCTION check_leg()
RETURNS trigger AS
$$
DECLARE
tx_id INT;
non_zero RECORD;
BEGIN
IF (TG_OP = 'DELETE') THEN
tx_id := OLD.transaction_id;
ELSE
tx_id := NEW.transaction_id;
END IF;
SELECT ABS(SUM(amount)) AS total, amount_currency AS currency
INTO non_zero
FROM hordak_leg
WHERE transaction_id = tx_id
GROUP BY amount_currency
HAVING ABS(SUM(amount)) > 0
LIMIT 1;
IF FOUND THEN
RAISE EXCEPTION 'Sum of transaction amounts in each currency must be 0. Currency % has non-zero total %',
non_zero.currency, non_zero.total;
END IF;
RETURN NEW;
END;
$$
LANGUAGE plpgsql;
"""
)
]<|fim▁end|> | # Generated by Django 1.10.1 on 2016-12-09 01:08
from __future__ import unicode_literals
|
<|file_name|>WorkbenchCmd.java<|end_file_name|><|fim▁begin|>package daanielz.tools.commands;
import org.bukkit.command.Command;
import org.bukkit.command.CommandExecutor;
import org.bukkit.command.CommandSender;<|fim▁hole|>import daanielz.tools.Utils;
public class WorkbenchCmd implements CommandExecutor {
public boolean onCommand(CommandSender sender, Command cmd, String label, String[] args) {
if(sender instanceof Player){
Player p = (Player) sender;
if(cmd.getName().equalsIgnoreCase("workbench")){
if(!sender.hasPermission("vetesda.workbench")){
sender.sendMessage(Utils.getColor("&8» &7Nie masz uprawnien."));
} else{
p.openWorkbench(null, true);
}
} else if(cmd.getName().equalsIgnoreCase("enchanttable")){
if(!sender.hasPermission("vetesda.enchanttable")){
sender.sendMessage(Utils.getColor("&8» &7Nie masz uprawnien."));
} else{
p.openEnchanting(null, true);
}
}
} return true;
}
}<|fim▁end|> | import org.bukkit.entity.Player;
|
<|file_name|>GraphProgram.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2014 Google Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.ac.nott.mrl.gles.program;
import android.opengl.GLES20;
import android.util.Log;
import com.android.grafika.gles.GlUtil;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
public class GraphProgram
{
private static final int SIZEOF_FLOAT = 4;
private static final int VERTEX_STRIDE = SIZEOF_FLOAT * 2;
private static final String TAG = GlUtil.TAG;
private static final String VERTEX_SHADER =
"uniform mat4 uMVPMatrix;" +
"attribute vec4 aPosition;" +
"void main() {" +
" gl_Position = uMVPMatrix * aPosition;" +
"}";
private static final String FRAGMENT_SHADER =
"precision mediump float;" +
"uniform vec4 uColor;" +
"void main() {" +
" gl_FragColor = uColor;" +
"}";
private final int MAX_SIZE = 200;
// Handles to the GL program and various components of it.
private int programHandle = -1;
private int colorLocation = -1;
private int matrixLocation = -1;
private int positionLocation = -1;
private final float[] colour = {1f, 1f, 1f, 1f};
private final FloatBuffer points;
private final float[] values = new float[MAX_SIZE];
private int size = 0;
private int offset = 0;
private boolean bufferValid = false;
private float min = Float.MAX_VALUE;
private float max = Float.MIN_VALUE;
private static final float left = 1.8f;
private static final float right = 0.2f;
private static final float top = 0.8f;
private static final float bottom = -0.8f;
/**
* Prepares the program in the current EGL context.
*/<|fim▁hole|> {
programHandle = GlUtil.createProgram(VERTEX_SHADER, FRAGMENT_SHADER);
if (programHandle == 0)
{
throw new RuntimeException("Unable to create program");
}
Log.d(TAG, "Created program " + programHandle);
// get locations of attributes and uniforms
ByteBuffer bb = ByteBuffer.allocateDirect(MAX_SIZE * VERTEX_STRIDE);
bb.order(ByteOrder.nativeOrder());
points = bb.asFloatBuffer();
positionLocation = GLES20.glGetAttribLocation(programHandle, "aPosition");
GlUtil.checkLocation(positionLocation, "aPosition");
matrixLocation = GLES20.glGetUniformLocation(programHandle, "uMVPMatrix");
GlUtil.checkLocation(matrixLocation, "uMVPMatrix");
colorLocation = GLES20.glGetUniformLocation(programHandle, "uColor");
GlUtil.checkLocation(colorLocation, "uColor");
}
/**
* Releases the program.
*/
public void release()
{
GLES20.glDeleteProgram(programHandle);
programHandle = -1;
}
public synchronized void add(float value)
{
values[offset] = value;
min = Math.min(value, min);
max = Math.max(value, max);
size = Math.min(size + 1, MAX_SIZE);
offset = (offset + 1) % MAX_SIZE;
bufferValid = false;
}
public void setColour(final float r, final float g, final float b)
{
colour[0] = r;
colour[1] = g;
colour[2] = b;
}
private synchronized FloatBuffer getValidBuffer()
{
if (!bufferValid)
{
points.position(0);
for(int index = 0; index < size; index++)
{
float value = values[(offset + index) % size];
float scaledValue = ((value - min) / (max - min) * (top - bottom)) + bottom;
//Log.i(TAG, "x=" + ((index * (right - left) / size) + left) + ", y=" + scaledValue);
points.put((index * (right - left) / (size - 1)) + left);
points.put(scaledValue);
}
points.position(0);
bufferValid = true;
}
return points;
}
public void draw(float[] matrix)
{
GlUtil.checkGlError("draw start");
// Select the program.
GLES20.glUseProgram(programHandle);
GlUtil.checkGlError("glUseProgram");
// Copy the model / view / projection matrix over.
GLES20.glUniformMatrix4fv(matrixLocation, 1, false, matrix, 0);
GlUtil.checkGlError("glUniformMatrix4fv");
// Copy the color vector in.
GLES20.glUniform4fv(colorLocation, 1, colour, 0);
GlUtil.checkGlError("glUniform4fv ");
// Enable the "aPosition" vertex attribute.
GLES20.glEnableVertexAttribArray(positionLocation);
GlUtil.checkGlError("glEnableVertexAttribArray");
// Connect vertexBuffer to "aPosition".
GLES20.glVertexAttribPointer(positionLocation, 2,
GLES20.GL_FLOAT, false, VERTEX_STRIDE, getValidBuffer());
GlUtil.checkGlError("glVertexAttribPointer");
// Draw the rect.
GLES20.glDrawArrays(GLES20.GL_LINE_STRIP, 0, size);
GlUtil.checkGlError("glDrawArrays");
// Done -- disable vertex array and program.
GLES20.glDisableVertexAttribArray(positionLocation);
GLES20.glUseProgram(0);
}
}<|fim▁end|> | public GraphProgram() |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>var LOTUS = Symbol.for('lotus');
var lotus = global[LOTUS];
if (!lotus) {
var lotusPath = process.env.LOTUS_PATH;
// Try using the local version.
if (lotusPath) {
lotusPath += '/lotus-require';
if (__dirname === lotusPath) {
// We are already using the local version.
}
else if (require('fs').existsSync(lotusPath)) {
lotus = require(lotusPath);
}
}
// Default to using the installed remote version.
if (!lotus) {
lotus = require('./js/index');
}<|fim▁hole|>}
module.exports = lotus;<|fim▁end|> |
global[LOTUS] = lotus; |
<|file_name|>olpc-muc-prop-change.py<|end_file_name|><|fim▁begin|>"""
Test OLPC MUC properties.
"""
import dbus
from twisted.words.xish import domish, xpath
from gabbletest import exec_test, acknowledge_iq, make_muc_presence
from servicetest import call_async, EventPattern, wrap_channel
import constants as cs
import ns
from mucutil import echo_muc_presence
def test(q, bus, conn, stream):
iq_event = q.expect('stream-iq', to=None, query_ns='vcard-temp',
query_name='vCard')
acknowledge_iq(stream, iq_event.stanza)
buddy_iface = dbus.Interface(conn, 'org.laptop.Telepathy.BuddyInfo')
act_prop_iface = dbus.Interface(conn, 'org.laptop.Telepathy.ActivityProperties')
bob_handle = conn.get_contact_handle_sync('bob@localhost')
# Bob invites us to a chatroom, pre-seeding properties
message = domish.Element(('jabber:client', 'message'))
message['from'] = 'bob@localhost'
message['to'] = 'test@localhost'
properties = message.addElement(
(ns.OLPC_ACTIVITY_PROPS, 'properties'))
properties['room'] = '[email protected]'
properties['activity'] = 'foo_id'
property = properties.addElement((None, 'property'))
property['type'] = 'str'
property['name'] = 'title'
property.addContent('From the invitation')
property = properties.addElement((None, 'property'))
property['type'] = 'bool'
property['name'] = 'private'
property.addContent('1')
stream.send(message)
message = domish.Element((None, 'message'))
message['from'] = '[email protected]'
message['to'] = 'test@localhost'
x = message.addElement((ns.MUC_USER, 'x'))
invite = x.addElement((None, 'invite'))
invite['from'] = 'bob@localhost'
reason = invite.addElement((None, 'reason'))
reason.addContent('No good reason')
stream.send(message)
event = q.expect('dbus-signal', signal='NewChannel')
assert event.args[1] == cs.CHANNEL_TYPE_TEXT
<|fim▁hole|> room_handle = 1
text_chan = wrap_channel(bus.get_object(conn.bus_name, event.args[0]),
'Text')
group_iface = text_chan.Group
members = group_iface.GetAllMembers()[0]
local_pending = group_iface.GetAllMembers()[1]
remote_pending = group_iface.GetAllMembers()[2]
assert len(members) == 1
assert conn.inspect_contact_sync(members[0]) == 'bob@localhost'
bob_handle = members[0]
assert len(local_pending) == 1
# FIXME: the username-part-is-nickname assumption
assert conn.inspect_contact_sync(local_pending[0]) == \
'[email protected]/test'
assert len(remote_pending) == 0
room_self_handle = text_chan.Properties.Get(cs.CHANNEL_IFACE_GROUP,
"SelfHandle")
assert room_self_handle == local_pending[0]
# by now, we should have picked up the extra activity properties
buddy_iface = dbus.Interface(conn, 'org.laptop.Telepathy.BuddyInfo')
call_async(q, buddy_iface, 'GetActivities', bob_handle)
event = q.expect('stream-iq', iq_type='get', to='bob@localhost')
# Bob still has no (public) activities
event.stanza['type'] = 'result'
event.stanza['to'] = 'test@localhost'
event.stanza['from'] = 'bob@localhost'
stream.send(event.stanza)
event = q.expect('dbus-return', method='GetActivities')
assert event.value == ([('foo_id', room_handle)],)
props = act_prop_iface.GetProperties(room_handle)
assert len(props) == 2
assert props['title'] == 'From the invitation'
assert props['private'] == True
# Now Bob changes the properties
message = domish.Element(('jabber:client', 'message'))
message['from'] = 'bob@localhost'
message['to'] = 'test@localhost'
properties = message.addElement(
(ns.OLPC_ACTIVITY_PROPS, 'properties'))
properties['room'] = '[email protected]'
properties['activity'] = 'foo_id'
property = properties.addElement((None, 'property'))
property['type'] = 'str'
property['name'] = 'title'
property.addContent('Mushroom, mushroom')
property = properties.addElement((None, 'property'))
property['type'] = 'bool'
property['name'] = 'private'
property.addContent('0')
stream.send(message)
event = q.expect('dbus-signal', signal='ActivityPropertiesChanged')
assert event.args == [room_handle, {'title': 'Mushroom, mushroom',
'private': False }]
assert act_prop_iface.GetProperties(room_handle) == \
event.args[1]
# OK, now accept the invitation
call_async(q, group_iface, 'AddMembers', [room_self_handle], 'Oh, OK then')
q.expect_many(
EventPattern('stream-presence', to='[email protected]/test'),
EventPattern('dbus-signal', signal='MembersChanged',
args=['', [], [bob_handle], [], [room_self_handle],
0, cs.GC_REASON_INVITED]),
EventPattern('dbus-return', method='AddMembers'),
)
# Send presence for own membership of room.
stream.send(make_muc_presence('owner', 'moderator', '[email protected]', 'test'))
event = q.expect('dbus-signal', signal='MembersChanged')
assert event.args == ['', [room_self_handle], [], [], [], 0, 0]
call_async(q, buddy_iface, 'SetActivities', [('foo_id', room_handle)])
event = q.expect('stream-iq', iq_type='set')
# Now that it's not private, it'll go in my PEP
event.stanza['type'] = 'result'
event.stanza['to'] = 'test@localhost'
event.stanza['from'] = 'test@localhost'
stream.send(event.stanza)
q.expect('dbus-return', method='SetActivities')
# Bob changes the properties and tells the room he's done so
message = domish.Element(('jabber:client', 'message'))
message['from'] = '[email protected]/bob'
message['to'] = '[email protected]'
properties = message.addElement(
(ns.OLPC_ACTIVITY_PROPS, 'properties'))
properties['activity'] = 'foo_id'
property = properties.addElement((None, 'property'))
property['type'] = 'str'
property['name'] = 'title'
property.addContent('Badger badger badger')
property = properties.addElement((None, 'property'))
property['type'] = 'bool'
property['name'] = 'private'
property.addContent('0')
stream.send(message)
event = q.expect('stream-iq', iq_type='set')
message = event.stanza
activities = xpath.queryForNodes('/iq/pubsub/publish/item/activities',
message)
assert (activities is not None and len(activities) == 1), repr(activities)
assert activities[0].uri == ns.OLPC_ACTIVITY_PROPS
properties = xpath.queryForNodes('/activities/properties', activities[0])
assert (properties is not None and len(properties) == 1), repr(properties)
assert properties[0].uri == ns.OLPC_ACTIVITY_PROPS
assert properties[0]['room'] == '[email protected]'
assert properties[0]['activity'] == 'foo_id'
property = xpath.queryForNodes('/properties/property', properties[0])
assert (property is not None and len(property) == 2), repr(property)
seen = set()
for p in property:
seen.add(p['name'])
if p['name'] == 'title':
assert p['type'] == 'str'
assert str(p) == 'Badger badger badger'
elif p['name'] == 'private':
assert p['type'] == 'bool'
assert str(p) == '0'
else:
assert False, 'Unexpected property %s' % p['name']
assert 'title' in seen, seen
assert 'private' in seen, seen
event.stanza['type'] = 'result'
event.stanza['to'] = 'test@localhost'
event.stanza['from'] = 'test@localhost'
stream.send(event.stanza)
act_prop_iface = dbus.Interface(conn, 'org.laptop.Telepathy.ActivityProperties')
# test sets the title and sets private back to True
call_async(q, act_prop_iface, 'SetProperties',
room_handle, {'title': 'I can set the properties too', 'private': True})
event = q.expect('stream-message', to='[email protected]')
message = event.stanza
properties = xpath.queryForNodes('/message/properties', message)
assert (properties is not None and len(properties) == 1), repr(properties)
assert properties[0].uri == ns.OLPC_ACTIVITY_PROPS
assert properties[0]['room'] == '[email protected]'
assert properties[0]['activity'] == 'foo_id'
property = xpath.queryForNodes('/properties/property', properties[0])
assert (property is not None and len(property) == 2), repr(property)
seen = set()
for p in property:
seen.add(p['name'])
if p['name'] == 'title':
assert p['type'] == 'str'
assert str(p) == 'I can set the properties too'
elif p['name'] == 'private':
assert p['type'] == 'bool'
assert str(p) == '1'
else:
assert False, 'Unexpected property %s' % p['name']
assert 'title' in seen, seen
assert 'private' in seen, seen
event = q.expect('stream-iq', iq_type='set')
event.stanza['type'] = 'result'
event.stanza['to'] = 'test@localhost'
event.stanza['from'] = 'test@localhost'
stream.send(event.stanza)
message = event.stanza
activities = xpath.queryForNodes('/iq/pubsub/publish/item/activities',
message)
assert (activities is not None and len(activities) == 1), repr(activities)
assert activities[0].uri == ns.OLPC_ACTIVITY_PROPS
properties = xpath.queryForNodes('/activities/properties', activities[0])
assert properties is None, repr(properties)
event = q.expect('stream-iq', iq_type='set')
event.stanza['type'] = 'result'
event.stanza['to'] = 'test@localhost'
event.stanza['from'] = 'test@localhost'
stream.send(event.stanza)
message = event.stanza
activities = xpath.queryForNodes('/iq/pubsub/publish/item/activities',
message)
assert (activities is not None and len(activities) == 1), repr(activities)
assert activities[0].uri == ns.OLPC_ACTIVITIES
activity = xpath.queryForNodes('/activities/activity', activities[0])
assert activity is None, repr(activity)
q.expect('dbus-return', method='SetProperties')
# test sets the title and sets private back to True
call_async(q, act_prop_iface, 'SetProperties',
room_handle, {'title': 'I can set the properties too',
'private': False})
event = q.expect('stream-message', to='[email protected]')
message = event.stanza
properties = xpath.queryForNodes('/message/properties', message)
assert (properties is not None and len(properties) == 1), repr(properties)
assert properties[0].uri == ns.OLPC_ACTIVITY_PROPS
assert properties[0]['room'] == '[email protected]'
assert properties[0]['activity'] == 'foo_id'
property = xpath.queryForNodes('/properties/property', properties[0])
assert (property is not None and len(property) == 2), repr(property)
seen = set()
for p in property:
seen.add(p['name'])
if p['name'] == 'title':
assert p['type'] == 'str'
assert str(p) == 'I can set the properties too'
elif p['name'] == 'private':
assert p['type'] == 'bool'
assert str(p) == '0'
else:
assert False, 'Unexpected property %s' % p['name']
assert 'title' in seen, seen
assert 'private' in seen, seen
event = q.expect('stream-iq', iq_type='set')
event.stanza['type'] = 'result'
event.stanza['to'] = 'test@localhost'
event.stanza['from'] = 'test@localhost'
stream.send(event.stanza)
message = event.stanza
activities = xpath.queryForNodes('/iq/pubsub/publish/item/activities',
message)
assert (activities is not None and len(activities) == 1), repr(activities)
assert activities[0].uri == ns.OLPC_ACTIVITY_PROPS
properties = xpath.queryForNodes('/activities/properties', activities[0])
assert (properties is not None and len(properties) == 1), repr(properties)
assert properties[0].uri == ns.OLPC_ACTIVITY_PROPS
assert properties[0]['room'] == '[email protected]'
assert properties[0]['activity'] == 'foo_id'
property = xpath.queryForNodes('/properties/property', properties[0])
assert (property is not None and len(property) == 2), repr(property)
seen = set()
for p in property:
seen.add(p['name'])
if p['name'] == 'title':
assert p['type'] == 'str'
assert str(p) == 'I can set the properties too'
elif p['name'] == 'private':
assert p['type'] == 'bool'
assert str(p) == '0'
else:
assert False, 'Unexpected property %s' % p['name']
assert 'title' in seen, seen
assert 'private' in seen, seen
event = q.expect('stream-iq', iq_type='set')
event.stanza['type'] = 'result'
event.stanza['to'] = 'test@localhost'
event.stanza['from'] = 'test@localhost'
stream.send(event.stanza)
message = event.stanza
activities = xpath.queryForNodes('/iq/pubsub/publish/item/activities',
message)
assert (activities is not None and len(activities) == 1), repr(activities)
assert activities[0].uri == ns.OLPC_ACTIVITIES
activity = xpath.queryForNodes('/activities/activity', activities[0])
assert (activity is not None and len(activity) == 1), repr(activity)
assert activity[0]['room'] == '[email protected]'
assert activity[0]['type'] == 'foo_id' # sic
q.expect('dbus-return', method='SetProperties')
text_chan.Close()
# we must echo the MUC presence so the room will actually close
event = q.expect('stream-presence', to='[email protected]/test',
presence_type='unavailable')
echo_muc_presence(q, stream, event.stanza, 'none', 'participant')
event = q.expect('stream-iq', iq_type='set')
event.stanza['type'] = 'result'
event.stanza['to'] = 'test@localhost'
event.stanza['from'] = 'test@localhost'
stream.send(event.stanza)
message = event.stanza
activities = xpath.queryForNodes('/iq/pubsub/publish/item/activities',
message)
assert (activities is not None and len(activities) == 1), repr(activities)
assert activities[0].uri == ns.OLPC_ACTIVITIES
activity = xpath.queryForNodes('/activities/activity', activities[0])
assert activity is None, repr(activity)
event = q.expect('stream-iq', iq_type='set')
event.stanza['type'] = 'result'
event.stanza['to'] = 'test@localhost'
event.stanza['from'] = 'test@localhost'
stream.send(event.stanza)
message = event.stanza
activities = xpath.queryForNodes('/iq/pubsub/publish/item/activities',
message)
assert (activities is not None and len(activities) == 1), repr(activities)
assert activities[0].uri == ns.OLPC_ACTIVITY_PROPS
properties = xpath.queryForNodes('/activities/properties', activities[0])
assert properties is None, repr(properties)
if __name__ == '__main__':
exec_test(test)<|fim▁end|> | assert event.args[2] == 2 # handle type
assert event.args[3] == 1 # handle |
<|file_name|>VisibleScope.java<|end_file_name|><|fim▁begin|>/**
* Copyright (C) 2009-2014 Dell, Inc.
* See annotations for authorship information
*
* ====================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ====================================================================
*/
package org.dasein.cloud;
/**
* Defines what level data gets cached at.
* <p>Created by AndyLyall: 02/25/14 13:35 PM</p>
* @author Andy Lyall
* @version 2014.03 initial version
* @since 2014.03
*/
public enum VisibleScope {
/**
* Resource is visibile across the entire account
*/
ACCOUNT_GLOBAL,
/**
* Resource is visible across one whole region
*/
ACCOUNT_REGION,<|fim▁hole|> ACCOUNT_DATACENTER
}<|fim▁end|> |
/**
* Resource is visible across one whole datacenter
*/ |
<|file_name|>loadProfile.js<|end_file_name|><|fim▁begin|>game.LoadProfile = me.ScreenObject.extend({
/**
* action to perform on state change
*/
onResetEvent: function() {
me.game.world.addChild(new me.Sprite(0, 0, me.loader.getImage('load-screen')), -10);
//puts load screen in when game starts
<|fim▁hole|> document.getElementById("input").style.visibility = "visible";
document.getElementById("load").style.visibility = "visible";
me.input.unbindKey(me.input.KEY.B);
me.input.unbindKey(me.input.KEY.I);
me.input.unbindKey(me.input.KEY.O);
me.input.unbindKey(me.input.KEY.P);
me.input.unbindKey(me.input.KEY.SPACE);
//unbinds keys
var exp1cost = ((game.data.exp1 + 1) * 10);
var exp2cost = ((game.data.exp2 + 1) * 10);
var exp3cost = ((game.data.exp3 + 1) * 10);
var exp4cost = ((game.data.exp4 + 1) * 10);
me.game.world.addChild(new (me.Renderable.extend({
init: function() {
this._super(me.Renderable, 'init', [10, 10, 300, 50]);
this.font = new me.Font("Arial", 26, "white");
},
draw: function(renderer) {
this.font.draw(renderer.getContext(), "Enter Username & Password", this.pos.x, this.pos.y);
}
})));
},
/**
* action to perform when leaving this screen (state change)
*/
onDestroyEvent: function() {
document.getElementById("input").style.visibility = "hidden";
document.getElementById("load").style.visibility = "hidden";
}
});<|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Copyright (C) 2003 CAMP
# Please see the accompanying LICENSE file for further information.
"""Main gpaw module."""
import os
import sys
try:
from distutils.util import get_platform
except ImportError:
modulepath = os.environ.get('GPAW_GET_PLATFORM')
if modulepath is None:
errmsg = ('Error: Could not get platform from distutils. '
'Set the GPAW_GET_PLATFORM environment variable to '
'the architecture string printed during build.')
raise ImportError(errmsg)
def get_platform():
return modulepath
from glob import glob
from os.path import join, isfile
import numpy as np
assert not np.version.version.startswith('1.6.0')
__all__ = ['GPAW', 'Calculator',
'Mixer', 'MixerSum', 'MixerDif', 'MixerSum2',
'CG', 'Davidson', 'RMM_DIIS', 'LCAO',
'PoissonSolver',
'FermiDirac', 'MethfesselPaxton',
'restart']
class ConvergenceError(Exception):
pass
class KohnShamConvergenceError(ConvergenceError):
pass
class PoissonConvergenceError(ConvergenceError):
pass
# Check for special command line arguments:
debug = False
trace = False
dry_run = 0
memory_estimate_depth = 2
parsize_domain = None
parsize_bands = None
sl_default = None
sl_diagonalize = None
sl_inverse_cholesky = None
sl_lcao = None
sl_lrtddft = None
buffer_size = None
extra_parameters = {}
profile = False
i = 1
while len(sys.argv) > i:
arg = sys.argv[i]
if arg.startswith('--gpaw-'):
# Found old-style gpaw command line argument:
arg = '--' + arg[7:]
raise RuntimeError('Warning: Use %s instead of %s.' %
(arg, sys.argv[i]))
if arg == '--trace':
trace = True
elif arg == '--debug':
debug = True
elif arg.startswith('--dry-run'):
dry_run = 1
if len(arg.split('=')) == 2:
dry_run = int(arg.split('=')[1])
elif arg.startswith('--memory-estimate-depth'):
memory_estimate_depth = -1
if len(arg.split('=')) == 2:
memory_estimate_depth = int(arg.split('=')[1])
elif arg.startswith('--domain-decomposition='):
parsize_domain = [int(n) for n in arg.split('=')[1].split(',')]
if len(parsize_domain) == 1:
parsize_domain = parsize_domain[0]
else:
assert len(parsize_domain) == 3
elif arg.startswith('--state-parallelization='):
parsize_bands = int(arg.split('=')[1])
elif arg.startswith('--sl_default='):
# --sl_default=nprow,npcol,mb,cpus_per_node
# use 'd' for the default of one or more of the parameters
# --sl_default=default to use all default values
sl_args = [n for n in arg.split('=')[1].split(',')]
if len(sl_args) == 1:
assert sl_args[0] == 'default'
sl_default = ['d'] * 3
else:
sl_default = []
assert len(sl_args) == 3
for sl_args_index in range(len(sl_args)):
assert sl_args[sl_args_index] is not None
if sl_args[sl_args_index] is not 'd':
assert int(sl_args[sl_args_index]) > 0
sl_default.append(int(sl_args[sl_args_index]))
else:
sl_default.append(sl_args[sl_args_index])
elif arg.startswith('--sl_diagonalize='):
# --sl_diagonalize=nprow,npcol,mb,cpus_per_node
# use 'd' for the default of one or more of the parameters
# --sl_diagonalize=default to use all default values
sl_args = [n for n in arg.split('=')[1].split(',')]
if len(sl_args) == 1:
assert sl_args[0] == 'default'
sl_diagonalize = ['d'] * 3
else:
sl_diagonalize = []
assert len(sl_args) == 3
for sl_args_index in range(len(sl_args)):
assert sl_args[sl_args_index] is not None
if sl_args[sl_args_index] is not 'd':
assert int(sl_args[sl_args_index]) > 0
sl_diagonalize.append(int(sl_args[sl_args_index]))
else:
sl_diagonalize.append(sl_args[sl_args_index])
elif arg.startswith('--sl_inverse_cholesky='):
# --sl_inverse_cholesky=nprow,npcol,mb,cpus_per_node
# use 'd' for the default of one or more of the parameters
# --sl_inverse_cholesky=default to use all default values
sl_args = [n for n in arg.split('=')[1].split(',')]
if len(sl_args) == 1:
assert sl_args[0] == 'default'
sl_inverse_cholesky = ['d'] * 3
else:
sl_inverse_cholesky = []
assert len(sl_args) == 3
for sl_args_index in range(len(sl_args)):
assert sl_args[sl_args_index] is not None
if sl_args[sl_args_index] is not 'd':
assert int(sl_args[sl_args_index]) > 0
sl_inverse_cholesky.append(int(sl_args[sl_args_index]))
else:
sl_inverse_cholesky.append(sl_args[sl_args_index])<|fim▁hole|> elif arg.startswith('--sl_lcao='):
# --sl_lcao=nprow,npcol,mb,cpus_per_node
# use 'd' for the default of one or more of the parameters
# --sl_lcao=default to use all default values
sl_args = [n for n in arg.split('=')[1].split(',')]
if len(sl_args) == 1:
assert sl_args[0] == 'default'
sl_lcao = ['d'] * 3
else:
sl_lcao = []
assert len(sl_args) == 3
for sl_args_index in range(len(sl_args)):
assert sl_args[sl_args_index] is not None
if sl_args[sl_args_index] is not 'd':
assert int(sl_args[sl_args_index]) > 0
sl_lcao.append(int(sl_args[sl_args_index]))
else:
sl_lcao.append(sl_args[sl_args_index])
elif arg.startswith('--sl_lrtddft='):
# --sl_lcao=nprow,npcol,mb,cpus_per_node
# use 'd' for the default of one or more of the parameters
# --sl_lcao=default to use all default values
sl_args = [n for n in arg.split('=')[1].split(',')]
if len(sl_args) == 1:
assert sl_args[0] == 'default'
sl_lrtddft = ['d'] * 3
else:
sl_lrtddft = []
assert len(sl_args) == 3
for sl_args_index in range(len(sl_args)):
assert sl_args[sl_args_index] is not None
if sl_args[sl_args_index] is not 'd':
assert int(sl_args[sl_args_index]) > 0
sl_lrtddft.append(int(sl_args[sl_args_index]))
else:
sl_lrtddft.append(sl_args[sl_args_index])
elif arg.startswith('--buffer_size='):
# Buffer size for MatrixOperator in MB
buffer_size = int(arg.split('=')[1])
elif arg.startswith('--gpaw='):
extra_parameters = eval('dict(%s)' % arg[7:])
elif arg == '--gpaw':
extra_parameters = eval('dict(%s)' % sys.argv.pop(i + 1))
elif arg.startswith('--profile='):
profile = arg.split('=')[1]
else:
i += 1
continue
# Delete used command line argument:
del sys.argv[i]
if debug:
np.seterr(over='raise', divide='raise', invalid='raise', under='ignore')
oldempty = np.empty
def empty(*args, **kwargs):
a = oldempty(*args, **kwargs)
try:
a.fill(np.nan)
except ValueError:
a.fill(-1000000)
return a
np.empty = empty
build_path = join(__path__[0], '..', 'build')
arch = '%s-%s' % (get_platform(), sys.version[0:3])
# If we are running the code from the source directory, then we will
# want to use the extension from the distutils build directory:
sys.path.insert(0, join(build_path, 'lib.' + arch))
def get_gpaw_python_path():
paths = os.environ['PATH'].split(os.pathsep)
paths.insert(0, join(build_path, 'bin.' + arch))
for path in paths:
if isfile(join(path, 'gpaw-python')):
return path
raise RuntimeError('Could not find gpaw-python!')
try:
setup_paths = os.environ['GPAW_SETUP_PATH'].split(os.pathsep)
except KeyError:
if os.pathsep == ';':
setup_paths = [r'C:\gpaw-setups']
else:
setup_paths = ['/usr/local/share/gpaw-setups',
'/usr/share/gpaw-setups']
from gpaw.aseinterface import GPAW
from gpaw.mixer import Mixer, MixerSum, MixerDif, MixerSum2
from gpaw.eigensolvers import Davidson, RMM_DIIS, CG, LCAO
from gpaw.poisson import PoissonSolver
from gpaw.occupations import FermiDirac, MethfesselPaxton
from gpaw.wavefunctions.pw import PW
class Calculator(GPAW):
def __init__(self, *args, **kwargs):
sys.stderr.write('Please start using GPAW instead of Calculator!\n')
GPAW.__init__(self, *args, **kwargs)
def restart(filename, Class=GPAW, **kwargs):
calc = Class(filename, **kwargs)
atoms = calc.get_atoms()
return atoms, calc
if trace:
indent = ' '
path = __path__[0]
from gpaw.mpi import parallel, rank
if parallel:
indent = 'CPU%d ' % rank
def f(frame, event, arg):
global indent
f = frame.f_code.co_filename
if not f.startswith(path):
return
if event == 'call':
print('%s%s:%d(%s)' % (indent, f[len(path):], frame.f_lineno,
frame.f_code.co_name))
indent += '| '
elif event == 'return':
indent = indent[:-2]
sys.setprofile(f)
if profile:
from cProfile import Profile
import atexit
prof = Profile()
def f(prof, filename):
prof.disable()
from gpaw.mpi import rank
if filename == '-':
prof.print_stats('time')
else:
prof.dump_stats(filename + '.%04d' % rank)
atexit.register(f, prof, profile)
prof.enable()
command = os.environ.get('GPAWSTARTUP')
if command is not None:
exec(command)
def is_parallel_environment():
"""Check if we are running in a parallel environment.
This function can be redefined in ~/.gpaw/rc.py. Example::
def is_parallel_environment():
import os
return 'PBS_NODEFILE' in os.environ
"""
return False
home = os.environ.get('HOME')
if home is not None:
rc = os.path.join(home, '.gpaw', 'rc.py')
if os.path.isfile(rc):
# Read file in ~/.gpaw/rc.py
execfile(rc)<|fim▁end|> | |
<|file_name|>base.py<|end_file_name|><|fim▁begin|># Unix SMB/CIFS implementation.
# Copyright (C) Sean Dague <[email protected]> 2011
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# This provides a wrapper around the cmd interface so that tests can
# easily be built on top of it and have minimal code to run basic tests
# of the commands. A list of the environmental variables can be found in
# ~/selftest/selftest.pl
#
# These can all be accesses via os.environ["VARIBLENAME"] when needed
import random
import string
from samba.auth import system_session
from samba.samdb import SamDB
from cStringIO import StringIO
from samba.netcmd.main import cmd_sambatool
import samba.tests
class SambaToolCmdTest(samba.tests.TestCaseInTempDir):
def getSamDB(self, *argv):
"""a convenience function to get a samdb instance so that we can query it"""
# We build a fake command to get the options created the same
# way the command classes do it. It would be better if the command
# classes had a way to more cleanly do this, but this lets us write
# tests for now
cmd = cmd_sambatool.subcommands["user"].subcommands["setexpiry"]
parser, optiongroups = cmd._create_parser("user")
opts, args = parser.parse_args(list(argv))
# Filter out options from option groups
args = args[1:]
kwargs = dict(opts.__dict__)
for option_group in parser.option_groups:
for option in option_group.option_list:
if option.dest is not None:
del kwargs[option.dest]
kwargs.update(optiongroups)
H = kwargs.get("H", None)
sambaopts = kwargs.get("sambaopts", None)
credopts = kwargs.get("credopts", None)
lp = sambaopts.get_loadparm()
creds = credopts.get_credentials(lp, fallback_machine=True)
samdb = SamDB(url=H, session_info=system_session(),
credentials=creds, lp=lp)
return samdb
def runcmd(self, name, *args):
"""run a single level command"""
cmd = cmd_sambatool.subcommands[name]
cmd.outf = StringIO()
cmd.errf = StringIO()
result = cmd._run(name, *args)
return (result, cmd.outf.getvalue(), cmd.errf.getvalue())
def runsubcmd(self, name, sub, *args):
"""run a command with sub commands"""
# The reason we need this function separate from runcmd is
# that the .outf StringIO assignment is overriden if we use
# runcmd, so we can't capture stdout and stderr
cmd = cmd_sambatool.subcommands[name].subcommands[sub]
cmd.outf = StringIO()
cmd.errf = StringIO()
result = cmd._run(name, *args)
return (result, cmd.outf.getvalue(), cmd.errf.getvalue())
def assertCmdSuccess(self, val, msg=""):
self.assertIsNone(val, msg)
<|fim▁hole|> def assertMatch(self, base, string, msg=""):
self.assertTrue(string in base, msg)
def randomName(self, count=8):
"""Create a random name, cap letters and numbers, and always starting with a letter"""
name = random.choice(string.ascii_uppercase)
name += ''.join(random.choice(string.ascii_uppercase + string.ascii_lowercase+ string.digits) for x in range(count - 1))
return name
def randomPass(self, count=16):
name = random.choice(string.ascii_uppercase)
name += random.choice(string.digits)
name += random.choice(string.ascii_lowercase)
name += ''.join(random.choice(string.ascii_uppercase + string.ascii_lowercase+ string.digits) for x in range(count - 3))
return name
def randomXid(self):
# pick some hopefully unused, high UID/GID range to avoid interference
# from the system the test runs on
xid = random.randint(4711000, 4799000)
return xid
def assertWithin(self, val1, val2, delta, msg=""):
"""Assert that val1 is within delta of val2, useful for time computations"""
self.assertTrue(((val1 + delta) > val2) and ((val1 - delta) < val2), msg)<|fim▁end|> | def assertCmdFail(self, val, msg=""):
self.assertIsNotNone(val, msg)
|
<|file_name|>CardCollectionDao.java<|end_file_name|><|fim▁begin|>/*
* $Id: CardCollectionDao.java 475 2005-12-08 23:44:08 -0800 (Thu, 08 Dec 2005) ivaynberg $
* $Revision: 475 $
* $Date: 2005-12-08 23:44:08 -0800 (Thu, 08 Dec 2005) $
*
* ==============================================================================
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.alienlabs.hatchetharry.persistence.dao;
import java.io.Serializable;
import java.util.List;
import org.alienlabs.hatchetharry.model.CardCollection;
import org.hibernate.Session;
/**
* The implementation-independent DAO interface. Defines the operations required
* to be supported by an implementation.
*
* @author igor
*/
public interface CardCollectionDao extends Serializable
{
Session getSession();
/**
* Load a {@link CardCollection} from the DB, given it's <tt>id</tt>.
*
* @param id
* The id of the Contact to load.
* @return CardCollection
*/
CardCollection load(long id);
/**
* Save the CardCollection to the DB
*
* @param CardCollection
* @return persistent instance of contact
*/
CardCollection save(CardCollection contact);
/**
* Delete a {@link CardCollection} from the DB, given it's <tt>id</tt>.
*
* @param id
* The id of the CardCollection to delete.
*/
void delete(long id);
/**
* Return the number of CardCollections in the DB.
*
* @return count
*/
int count();
/**
* Returns the list of all unique last names in the database
*
* @return the list of all unique last names in the database
*/
<|fim▁hole|>}<|fim▁end|> | List<String> getUniqueLastNames();
|
<|file_name|>padStart.d.ts<|end_file_name|><|fim▁begin|>import { padStart } from "../fp";
<|fim▁hole|><|fim▁end|> | export = padStart; |
<|file_name|>basic.js<|end_file_name|><|fim▁begin|>'use strict';
var BigNumber = require('bignumber.js');<|fim▁hole|>var quotaMb = function(invites) { return min(BigNumber(2048).plus(BigNumber(512).times(invites)), upperLimitMb) };
module.exports = {
cycles: {
$subscription: {
storageUsedMb: {},
invites: {}
}
},
values: {
storageQuotaMb: function(invites) { return quotaMb(invites) }
},
notifications: {
storage10pcntLeft: function(storageUsedMb, storageQuotaMb) {
return BigNumber(storageUsedMb).gt(BigNumber(storageQuotaMb).times(0.9))
},
storageOverUsed: function(storageUsedMb, storageQuotaMb) { return BigNumber(storageUsedMb).gt(storageQuotaMb) }
}
};<|fim▁end|> |
var upperLimitMb = BigNumber(1024).times(16);
var min = function(x, y) { return x.comparedTo(y) < 0 ? x : y }; |
<|file_name|>render.rs<|end_file_name|><|fim▁begin|>use std::collections::{HashMap, BTreeMap};
use std::error;
use std::fmt;
use std::io::Write;
use std::io::Error as IOError;
use serialize::json::Json;
use template::{Template, TemplateElement, Parameter, HelperTemplate};
use template::TemplateElement::{RawString, Expression, Comment, HelperBlock, HTMLExpression, HelperExpression};
use registry::Registry;
use context::{Context, JsonRender};
use support::str::StringWriter;
#[derive(Debug, Clone, Copy)]
pub struct RenderError {
pub desc: &'static str
}
impl fmt::Display for RenderError {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
write!(f, "{}", self.desc)
}
}
impl error::Error for RenderError {
fn description(&self) -> &str {
self.desc
}
}
impl From<IOError> for RenderError {
fn from(_: IOError) -> RenderError {
render_error("IO Error")
}
}
/// The context of a render call
///
/// this context stores information of a render and a writer where generated
/// content is written to.
///
pub struct RenderContext<'a> {
partials: HashMap<String, Template>,
path: String,
local_variables: HashMap<String, Json>,
default_var: Json,
/// the `Write` where page is generated
pub writer: &'a mut Write
}
impl<'a> RenderContext<'a> {
/// Create a render context from a `Write`
pub fn new(w: &'a mut Write) -> RenderContext<'a> {
RenderContext {
partials: HashMap::new(),
path: ".".to_string(),
local_variables: HashMap::new(),
default_var: Json::Null,
writer: w
}
}
/// Create a new `RenderContext` with a different `Write`
pub fn with_writer<'b>(&self, w: &'b mut Write) -> RenderContext<'b> {
RenderContext {
partials: self.partials.clone(),
path: self.path.clone(),
local_variables: self.local_variables.clone(),
default_var: self.default_var.clone(),
writer: w
}
}
pub fn get_partial(&self, name: &String) -> Option<Template> {
match self.partials.get(name) {
Some(t) => Some(t.clone()),
None => None
}
}
pub fn set_partial(&mut self, name: String, result: Template) {
self.partials.insert(name, result);
}
pub fn get_path(&self) -> &String {
&self.path
}
pub fn set_path(&mut self, path: String) {
self.path = path
}
pub fn set_local_var(&mut self, name: String, value: Json) {
self.local_variables.insert(name, value);
}
pub fn clear_local_vars(&mut self){
self.local_variables.clear();
}
pub fn promote_local_vars(&mut self) {
let mut new_map: HashMap<String, Json> = HashMap::new();
for key in self.local_variables.keys() {
let mut new_key = String::new();
new_key.push_str("@../");
new_key.push_str(&key[1..]);
let v = self.local_variables.get(key).unwrap().clone();
new_map.insert(new_key, v);
}
self.local_variables = new_map;
}
pub fn demote_local_vars(&mut self) {
let mut new_map: HashMap<String, Json> = HashMap::new();
for key in self.local_variables.keys() {
if key.starts_with("@../") {
let mut new_key = String::new();
new_key.push('@');
new_key.push_str(&key[4..]);
let v = self.local_variables.get(key).unwrap().clone();
new_map.insert(new_key, v);
}
}
self.local_variables = new_map;
}
pub fn get_local_var(&self, name: &String) -> &Json {
match self.local_variables.get(name) {
Some(j) => j,
None => &self.default_var
}
}
pub fn writer(&mut self) -> &mut Write {
self.writer
}
}
pub struct Helper<'a> {
name: &'a String,
params: Vec<String>,
hash: BTreeMap<String, Json>,
template: &'a Option<Template>,
inverse: &'a Option<Template>,
block: bool
}
impl<'a, 'b> Helper<'a> {
fn from_template(ht: &'a HelperTemplate, ctx: &Context, registry: &Registry, rc: &'b mut RenderContext) -> Result<Helper<'a>, RenderError> {
let mut evaluated_params = Vec::new();
for p in ht.params.iter() {
let r = try!(p.renders(ctx, registry, rc));
evaluated_params.push(r);
}
let mut evaluated_hash = BTreeMap::new();
for (k, p) in ht.hash.iter() {
let r = try!(p.renders(ctx, registry, rc));
// subexpression in hash values are all treated as json string for now
// FIXME: allow different types evaluated as hash value
evaluated_hash.insert(k.clone(), Json::String(r));
}
Ok(Helper {
name: &ht.name,
params: evaluated_params,
hash: evaluated_hash,
template: &ht.template,
inverse: &ht.inverse,
block: ht.block
})
}
pub fn name(&self) -> &String {
&self.name
}
pub fn params(&self) -> &Vec<String> {
&self.params
}
pub fn param(&self, idx: usize) -> Option<&String> {
self.params.get(idx)
}
pub fn hash(&self) -> &BTreeMap<String, Json> {
&self.hash
}
pub fn hash_get(&self, key: &str) -> Option<&Json> {
self.hash.get(key)
}
pub fn template(&self) -> Option<&Template> {
match *self.template {
Some(ref t) => {
Some(t)
},
None => None
}
}
pub fn inverse(&self) -> Option<&Template> {
match *self.inverse {
Some(ref t) => {
Some(t)
},
None => None
}
}
pub fn is_block(&self) -> bool {
self.block
}
}
pub trait Renderable {
fn render(&self, ctx: &Context, registry: &Registry, rc: &mut RenderContext) -> Result<(), RenderError>;
}
impl Parameter {
fn renders(&self, ctx: &Context, registry: &Registry, rc: &mut RenderContext) -> Result<String, RenderError> {
match self {
&Parameter::Name(ref n) => {
Ok(n.clone())
},
&Parameter::Subexpression(ref t) => {
let mut local_writer = StringWriter::new();
let result = {
let mut local_rc = rc.with_writer(&mut local_writer);
t.render(ctx, registry, &mut local_rc)
};
match result {
Ok(_) => {
Ok(local_writer.to_string())
},
Err(e) => {
Err(e)
}
}
}
}
}
}
impl Renderable for Template {
fn render(&self, ctx: &Context, registry: &Registry, rc: &mut RenderContext) -> Result<(), RenderError> {
let iter = self.elements.iter();
for t in iter {
let c = ctx;
try!(t.render(c, registry, rc))
}
Ok(())
}
}
pub fn render_error(desc: &'static str) -> RenderError {
RenderError {
desc: desc
}
}
impl Renderable for TemplateElement {
fn render(&self, ctx: &Context, registry: &Registry, rc: &mut RenderContext) -> Result<(), RenderError> {
match *self {
RawString(ref v) => {
try!(rc.writer.write(v.clone().into_bytes().as_ref()));
Ok(())
},
Expression(ref v) => {
let name = try!(v.renders(ctx, registry, rc));
let rendered = {
let value = if name.starts_with("@") {
rc.get_local_var(&name)
} else {
ctx.navigate(rc.get_path(), &name)
};
value.render()
};
let output = rendered.replace("&", "&")
.replace("\"", """)
.replace("<", "<")
.replace(">", ">");
try!(rc.writer.write(output.into_bytes().as_ref()));
Ok(())
},
HTMLExpression(ref v) => {
let name = try!(v.renders(ctx, registry, rc));
let rendered = {
let value = if name.starts_with("@") {
rc.get_local_var(&name)
} else {
ctx.navigate(rc.get_path(), &name)
};
value.render()
};
try!(rc.writer.write(rendered.into_bytes().as_ref()));
Ok(())
},
HelperExpression(ref ht) | HelperBlock(ref ht) => {
let helper = try!(Helper::from_template(ht, ctx, registry, rc));
match registry.get_helper(&ht.name) {
Some(d) => {
(**d).call(ctx, &helper, registry, rc)
},
None => {
let meta_helper_name = if ht.block {
"blockHelperMissing"
} else {
"helperMissing"
}.to_string();
match registry.get_helper(&meta_helper_name) {
Some (md) => {
(**md).call(ctx, &helper, registry, rc)
}
None => {
Err(RenderError{
desc: "Helper not defined."
})
}
}
}
}
},
Comment(_) => {
Ok(())
}
}
}
}
#[test]
fn test_raw_string() {
let r = Registry::new();
let mut sw = StringWriter::new();
{
let mut rc = RenderContext::new(&mut sw);
let raw_string = RawString("<h1>hello world</h1>".to_string());
raw_string.render(&Context::null(), &r, &mut rc).ok().unwrap();
}
assert_eq!(sw.to_string(), "<h1>hello world</h1>".to_string());
}
#[test]
fn test_expression() {
let r = Registry::new();
let mut sw = StringWriter::new();
{
let mut rc = RenderContext::new(&mut sw);
let element = Expression(Parameter::Name("hello".into()));
let mut m: HashMap<String, String> = HashMap::new();
let value = "<p></p>".to_string();
m.insert("hello".to_string(), value);
let ctx = Context::wraps(&m);
element.render(&ctx, &r, &mut rc).ok().unwrap();
}
assert_eq!(sw.to_string(), "<p></p>".to_string());
}
#[test]
fn test_html_expression() {
let r = Registry::new();
let mut sw = StringWriter::new();
let value = "world";
{
let mut rc = RenderContext::new(&mut sw);
let element = HTMLExpression(Parameter::Name("hello".into()));<|fim▁hole|> let mut m: HashMap<String, String> = HashMap::new();
m.insert("hello".to_string(), value.to_string());
let ctx = Context::wraps(&m);
element.render(&ctx, &r, &mut rc).ok().unwrap();
}
assert_eq!(sw.to_string(), value.to_string());
}
#[test]
fn test_template() {
let r = Registry::new();
let mut sw = StringWriter::new();
{
let mut rc = RenderContext::new(&mut sw);
let mut elements: Vec<TemplateElement> = Vec::new();
let e1 = RawString("<h1>".to_string());
elements.push(e1);
let e2 = Expression(Parameter::Name("hello".into()));
elements.push(e2);
let e3 = RawString("</h1>".to_string());
elements.push(e3);
let e4 = Comment("".to_string());
elements.push(e4);
let template = Template {
elements: elements
};
let mut m: HashMap<String, String> = HashMap::new();
let value = "world".to_string();
m.insert("hello".to_string(), value);
let ctx = Context::wraps(&m);
template.render(&ctx, &r, &mut rc).ok().unwrap();
}
assert_eq!(sw.to_string(), "<h1>world</h1>".to_string());
}
#[test]
fn test_render_context_promotion_and_demotion() {
use serialize::json::ToJson;
let mut sw = StringWriter::new();
let mut render_context = RenderContext::new(&mut sw);
render_context.set_local_var("@index".to_string(), 0usize.to_json());
render_context.promote_local_vars();
assert_eq!(render_context.get_local_var(&"@../index".to_string()),
&0usize.to_json());
render_context.demote_local_vars();
assert_eq!(render_context.get_local_var(&"@index".to_string()),
&0usize.to_json());
}
#[test]
fn test_render_subexpression() {
let r = Registry::new();
let mut sw =StringWriter::new();
{
let mut rc = RenderContext::new(&mut sw);
let mut elements: Vec<TemplateElement> = Vec::new();
let e1 = RawString("<h1>".to_string());
elements.push(e1);
let e2 = Expression(Parameter::parse("(hello)".into()).ok().unwrap());
elements.push(e2);
let e3 = RawString("</h1>".to_string());
elements.push(e3);
let template = Template {
elements: elements
};
let mut m: HashMap<String, String> = HashMap::new();
m.insert("hello".to_string(), "world".to_string());
m.insert("world".to_string(), "nice".to_string());
let ctx = Context::wraps(&m);
template.render(&ctx, &r, &mut rc).ok().unwrap();
}
assert_eq!(sw.to_string(), "<h1>nice</h1>".to_string());
}<|fim▁end|> | |
<|file_name|>gen-accessors.go<|end_file_name|><|fim▁begin|>// Copyright 2017 The go-github AUTHORS. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build ignore
// gen-accessors generates accessor methods for structs with pointer fields.
//
// It is meant to be used by the go-github authors in conjunction with the
// go generate tool before sending a commit to GitHub.
package main
import (
"bytes"
"flag"
"fmt"
"go/ast"
"go/format"
"go/parser"
"go/token"
"io/ioutil"
"log"
"os"
"sort"
"strings"
"text/template"
"time"
)
const (
fileSuffix = "-accessors.go"
)
var (
verbose = flag.Bool("v", false, "Print verbose log messages")
sourceTmpl = template.Must(template.New("source").Parse(source))
// blacklistStructMethod lists "struct.method" combos to skip.
blacklistStructMethod = map[string]bool{
"RepositoryContent.GetContent": true,
"Client.GetBaseURL": true,
"Client.GetUploadURL": true,
"ErrorResponse.GetResponse": true,
"RateLimitError.GetResponse": true,
"AbuseRateLimitError.GetResponse": true,
}
// blacklistStruct lists structs to skip.
blacklistStruct = map[string]bool{
"Client": true,
}
)
func logf(fmt string, args ...interface{}) {
if *verbose {
log.Printf(fmt, args...)
}
}
func main() {
flag.Parse()
fset := token.NewFileSet()
pkgs, err := parser.ParseDir(fset, ".", sourceFilter, 0)
if err != nil {
log.Fatal(err)
return
}
for pkgName, pkg := range pkgs {
t := &templateData{
filename: pkgName + fileSuffix,
Year: time.Now().Year(),
Package: pkgName,
Imports: map[string]string{},
}
for filename, f := range pkg.Files {
logf("Processing %v...", filename)
if err := t.processAST(f); err != nil {
log.Fatal(err)
}
}
if err := t.dump(); err != nil {
log.Fatal(err)
}
}
logf("Done.")
}
func (t *templateData) processAST(f *ast.File) error {
for _, decl := range f.Decls {
gd, ok := decl.(*ast.GenDecl)
if !ok {
continue
}
for _, spec := range gd.Specs {
ts, ok := spec.(*ast.TypeSpec)
if !ok {
continue
}
// Skip unexported identifiers.
if !ts.Name.IsExported() {
logf("Struct %v is unexported; skipping.", ts.Name)
continue
}
// Check if the struct is blacklisted.
if blacklistStruct[ts.Name.Name] {
logf("Struct %v is blacklisted; skipping.", ts.Name)
continue
}
st, ok := ts.Type.(*ast.StructType)
if !ok {
continue
}
for _, field := range st.Fields.List {
se, ok := field.Type.(*ast.StarExpr)
if len(field.Names) == 0 || !ok {
continue
}
fieldName := field.Names[0]
// Skip unexported identifiers.
if !fieldName.IsExported() {
logf("Field %v is unexported; skipping.", fieldName)
continue
}
// Check if "struct.method" is blacklisted.
if key := fmt.Sprintf("%v.Get%v", ts.Name, fieldName); blacklistStructMethod[key] {
logf("Method %v is blacklisted; skipping.", key)
continue
}
switch x := se.X.(type) {
case *ast.ArrayType:
t.addArrayType(x, ts.Name.String(), fieldName.String())
case *ast.Ident:
t.addIdent(x, ts.Name.String(), fieldName.String())
case *ast.MapType:
t.addMapType(x, ts.Name.String(), fieldName.String())
case *ast.SelectorExpr:
t.addSelectorExpr(x, ts.Name.String(), fieldName.String())
default:
logf("processAST: type %q, field %q, unknown %T: %+v", ts.Name, fieldName, x, x)
}
}
}
}
return nil
}
func sourceFilter(fi os.FileInfo) bool {
return !strings.HasSuffix(fi.Name(), "_test.go") && !strings.HasSuffix(fi.Name(), fileSuffix)
}
func (t *templateData) dump() error {
if len(t.Getters) == 0 {
logf("No getters for %v; skipping.", t.filename)
return nil
}
// Sort getters by ReceiverType.FieldName.
sort.Sort(byName(t.Getters))
var buf bytes.Buffer
if err := sourceTmpl.Execute(&buf, t); err != nil {
return err
}
clean, err := format.Source(buf.Bytes())
if err != nil {
return err
}
logf("Writing %v...", t.filename)
return ioutil.WriteFile(t.filename, clean, 0644)
}
func newGetter(receiverType, fieldName, fieldType, zeroValue string, namedStruct bool) *getter {
return &getter{
sortVal: strings.ToLower(receiverType) + "." + strings.ToLower(fieldName),
ReceiverVar: strings.ToLower(receiverType[:1]),
ReceiverType: receiverType,<|fim▁hole|> ZeroValue: zeroValue,
NamedStruct: namedStruct,
}
}
func (t *templateData) addArrayType(x *ast.ArrayType, receiverType, fieldName string) {
var eltType string
switch elt := x.Elt.(type) {
case *ast.Ident:
eltType = elt.String()
default:
logf("addArrayType: type %q, field %q: unknown elt type: %T %+v; skipping.", receiverType, fieldName, elt, elt)
return
}
t.Getters = append(t.Getters, newGetter(receiverType, fieldName, "[]"+eltType, "nil", false))
}
func (t *templateData) addIdent(x *ast.Ident, receiverType, fieldName string) {
var zeroValue string
var namedStruct = false
switch x.String() {
case "int":
zeroValue = "0"
case "string":
zeroValue = `""`
case "bool":
zeroValue = "false"
case "Timestamp":
zeroValue = "Timestamp{}"
default:
zeroValue = "nil"
namedStruct = true
}
t.Getters = append(t.Getters, newGetter(receiverType, fieldName, x.String(), zeroValue, namedStruct))
}
func (t *templateData) addMapType(x *ast.MapType, receiverType, fieldName string) {
var keyType string
switch key := x.Key.(type) {
case *ast.Ident:
keyType = key.String()
default:
logf("addMapType: type %q, field %q: unknown key type: %T %+v; skipping.", receiverType, fieldName, key, key)
return
}
var valueType string
switch value := x.Value.(type) {
case *ast.Ident:
valueType = value.String()
default:
logf("addMapType: type %q, field %q: unknown value type: %T %+v; skipping.", receiverType, fieldName, value, value)
return
}
fieldType := fmt.Sprintf("map[%v]%v", keyType, valueType)
zeroValue := fmt.Sprintf("map[%v]%v{}", keyType, valueType)
t.Getters = append(t.Getters, newGetter(receiverType, fieldName, fieldType, zeroValue, false))
}
func (t *templateData) addSelectorExpr(x *ast.SelectorExpr, receiverType, fieldName string) {
if strings.ToLower(fieldName[:1]) == fieldName[:1] { // Non-exported field.
return
}
var xX string
if xx, ok := x.X.(*ast.Ident); ok {
xX = xx.String()
}
switch xX {
case "time", "json":
if xX == "json" {
t.Imports["encoding/json"] = "encoding/json"
} else {
t.Imports[xX] = xX
}
fieldType := fmt.Sprintf("%v.%v", xX, x.Sel.Name)
zeroValue := fmt.Sprintf("%v.%v{}", xX, x.Sel.Name)
if xX == "time" && x.Sel.Name == "Duration" {
zeroValue = "0"
}
t.Getters = append(t.Getters, newGetter(receiverType, fieldName, fieldType, zeroValue, false))
default:
logf("addSelectorExpr: xX %q, type %q, field %q: unknown x=%+v; skipping.", xX, receiverType, fieldName, x)
}
}
type templateData struct {
filename string
Year int
Package string
Imports map[string]string
Getters []*getter
}
type getter struct {
sortVal string // Lower-case version of "ReceiverType.FieldName".
ReceiverVar string // The one-letter variable name to match the ReceiverType.
ReceiverType string
FieldName string
FieldType string
ZeroValue string
NamedStruct bool // Getter for named struct.
}
type byName []*getter
func (b byName) Len() int { return len(b) }
func (b byName) Less(i, j int) bool { return b[i].sortVal < b[j].sortVal }
func (b byName) Swap(i, j int) { b[i], b[j] = b[j], b[i] }
const source = `// Copyright {{.Year}} The go-github AUTHORS. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Code generated by gen-accessors; DO NOT EDIT.
package {{.Package}}
{{with .Imports}}
import (
{{- range . -}}
"{{.}}"
{{end -}}
)
{{end}}
{{range .Getters}}
{{if .NamedStruct}}
// Get{{.FieldName}} returns the {{.FieldName}} field.
func ({{.ReceiverVar}} *{{.ReceiverType}}) Get{{.FieldName}}() *{{.FieldType}} {
if {{.ReceiverVar}} == nil {
return {{.ZeroValue}}
}
return {{.ReceiverVar}}.{{.FieldName}}
}
{{else}}
// Get{{.FieldName}} returns the {{.FieldName}} field if it's non-nil, zero value otherwise.
func ({{.ReceiverVar}} *{{.ReceiverType}}) Get{{.FieldName}}() {{.FieldType}} {
if {{.ReceiverVar}} == nil || {{.ReceiverVar}}.{{.FieldName}} == nil {
return {{.ZeroValue}}
}
return *{{.ReceiverVar}}.{{.FieldName}}
}
{{end}}
{{end}}
`<|fim▁end|> | FieldName: fieldName,
FieldType: fieldType, |
<|file_name|>reducer_test.ts<|end_file_name|><|fim▁begin|>import { fakeState } from "../../__test_support__/fake_state";
import { overwrite, refreshStart, refreshOK, refreshNO } from "../../api/crud";
import {
SpecialStatus,
TaggedSequence,
TaggedDevice,
ResourceName,
TaggedResource,
TaggedTool,
} from "farmbot";
import { buildResourceIndex } from "../../__test_support__/resource_index_builder";
import { GeneralizedError } from "../actions";
import { Actions } from "../../constants";
import { fakeResource } from "../../__test_support__/fake_resource";
import { resourceReducer } from "../reducer";
import { findByUuid } from "../reducer_support";
import { EditResourceParams } from "../../api/interfaces";
import { fakeFolder } from "../../__test_support__/fake_state/resources";
describe("resource reducer", () => {
it("marks resources as DIRTY when reducing OVERWRITE_RESOURCE", () => {
const state = fakeState().resources;
const uuid = Object.keys(state.index.byKind.Sequence)[0];
const sequence = state.index.references[uuid] as TaggedSequence;
expect(sequence).toBeTruthy();
expect(sequence.kind).toBe("Sequence");
const next = resourceReducer(state, overwrite(sequence, {
kind: "sequence",
name: "wow",
folder_id: undefined,
args: { version: -0, locals: { kind: "scope_declaration", args: {} } },
body: [],
color: "red"
}));
const seq2 = next.index.references[uuid] as TaggedSequence;
expect(seq2.specialStatus).toBe(SpecialStatus.DIRTY);
});
it("marks resources as SAVING when reducing REFRESH_RESOURCE_START", () => {
const state = fakeState().resources;
const uuid = Object.keys(state.index.byKind.Device)[0];
const device = state.index.references[uuid] as TaggedDevice;
expect(device).toBeTruthy();
expect(device.kind).toBe("Device");
const afterStart = resourceReducer(state, refreshStart(device.uuid));
const dev2 = afterStart.index.references[uuid] as TaggedDevice;
expect(dev2.specialStatus).toBe(SpecialStatus.SAVING);
// SCENARIO: REFRESH_START ===> REFRESH_OK
const afterOk = resourceReducer(afterStart, refreshOK(device));
const dev3 = afterOk.index.references[uuid] as TaggedDevice;
expect(dev3.specialStatus).toBe(SpecialStatus.SAVED);
const payl: GeneralizedError = {
err: "X",
uuid: dev3.uuid,
statusBeforeError: SpecialStatus.DIRTY
};
// SCENARIO: REFRESH_START ===> REFRESH_NO
const afterNo =
resourceReducer(afterStart, refreshNO(payl));
const dev4 = afterNo.index.references[uuid] as TaggedDevice;
expect(dev4.specialStatus).toBe(SpecialStatus.SAVED);
});
const TEST_RESOURCE_NAMES: TaggedResource["kind"][] = ["Crop", "Device",
"FarmEvent", "FarmwareInstallation", "FbosConfig",
"FirmwareConfig", "Log", "Peripheral", "PinBinding", "PlantTemplate",
"Point", "Regimen", "SavedGarden", "Sensor"];
it("EDITs a _RESOURCE", () => {
const startingState = fakeState().resources;
const { index } = startingState;
const uuid = Object.keys(index.byKind.Tool)[0];
const update: Partial<TaggedTool["body"]> = { name: "after" };
const payload: EditResourceParams = {
uuid,
update,
specialStatus: SpecialStatus.SAVED<|fim▁hole|> const action = { type: Actions.EDIT_RESOURCE, payload };
const newState = resourceReducer(startingState, action);
const oldTool = index.references[uuid] as TaggedTool;
const newTool = newState.index.references[uuid] as TaggedTool;
expect(oldTool.body.name).not.toEqual("after");
expect(newTool.body.name).toEqual("after");
});
it("handles resource failures", () => {
const startingState = fakeState().resources;
const uuid = Object.keys(startingState.index.byKind.Tool)[0];
const action = {
type: Actions._RESOURCE_NO,
payload: { uuid, err: "Whatever", statusBeforeError: SpecialStatus.DIRTY }
};
const newState = resourceReducer(startingState, action);
const tool = newState.index.references[uuid] as TaggedTool;
expect(tool.specialStatus).toBe(SpecialStatus.DIRTY);
});
it("covers destroy resource branches", () => {
const testResourceDestroy = (kind: ResourceName) => {
const state = fakeState().resources;
const resource = fakeResource(kind as TaggedResource["kind"], {});
const action = {
type: Actions.DESTROY_RESOURCE_OK,
payload: resource
};
const newState = resourceReducer(state, action);
expect(newState.index.references[resource.uuid]).toEqual(undefined);
};
TEST_RESOURCE_NAMES
.concat(["Image", "SensorReading"])
.map((kind: ResourceName) => testResourceDestroy(kind));
});
it("toggles folder open state", () => {
const folder = fakeFolder();
folder.body.id = 1;
const startingState = buildResourceIndex([folder]);
(startingState.index.sequenceFolders.localMetaAttributes[1].open as unknown)
= undefined;
const action = { type: Actions.FOLDER_TOGGLE, payload: { id: 1 } };
const newState = resourceReducer(startingState, action);
expect(newState.index.sequenceFolders.localMetaAttributes[1].open)
.toEqual(false);
});
});
describe("findByUuid", () => {
it("crashes on bad UUIDs", () => {
expect(() => findByUuid(buildResourceIndex().index, "Nope!")).toThrow();
});
});<|fim▁end|> | }; |
<|file_name|>CachedIoAdapter.java<|end_file_name|><|fim▁begin|>/* This file is part of the db4o object database http://www.db4o.com
Copyright (C) 2004 - 2011 Versant Corporation http://www.versant.com
db4o is free software; you can redistribute it and/or modify it under
the terms of version 3 of the GNU General Public License as published
by the Free Software Foundation.
db4o is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
for more details.
You should have received a copy of the GNU General Public License along
with this program. If not, see http://www.gnu.org/licenses/. */
package com.db4o.io;
import com.db4o.ext.*;
import com.db4o.internal.fileheader.*;
/**
* CachedIoAdapter is an IOAdapter for random access files, which caches data
* for IO access. Its functionality is similar to OS cache.<br>
* Example:<br>
* <code>delegateAdapter = new RandomAccessFileAdapter();</code><br>
* <code>Db4o.configure().io(new CachedIoAdapter(delegateAdapter));</code><br>
* @deprecated Use {@link CachingStorage} instead.
*/
public class CachedIoAdapter extends IoAdapter {
private Page _head;
private Page _tail;
private long _position;
private int _pageSize;
private int _pageCount;
private long _fileLength;
private long _filePointer;
private IoAdapter _io;
private boolean _readOnly;
private static int DEFAULT_PAGE_SIZE = 1024;
private static int DEFAULT_PAGE_COUNT = 64;
// private Hashtable4 _posPageMap = new Hashtable4(PAGE_COUNT);
/**
* Creates an instance of CachedIoAdapter with the default page size and
* page count.
*
* @param ioAdapter
* delegate IO adapter (RandomAccessFileAdapter by default)
*/
public CachedIoAdapter(IoAdapter ioAdapter) {
this(ioAdapter, DEFAULT_PAGE_SIZE, DEFAULT_PAGE_COUNT);
}
/**
* Creates an instance of CachedIoAdapter with a custom page size and page
* count.<br>
*
* @param ioAdapter
* delegate IO adapter (RandomAccessFileAdapter by default)
* @param pageSize
* cache page size
* @param pageCount
* allocated amount of pages
*/
public CachedIoAdapter(IoAdapter ioAdapter, int pageSize, int pageCount) {
_io = ioAdapter;
_pageSize = pageSize;
_pageCount = pageCount;
}
/**
* Creates an instance of CachedIoAdapter with extended parameters.<br>
*
* @param path
* database file path
* @param lockFile
* determines if the file should be locked
* @param initialLength
* initial file length, new writes will start from this point
* @param readOnly
* if the file should be used in read-onlyt mode.
* @param io
* delegate IO adapter (RandomAccessFileAdapter by default)
* @param pageSize
* cache page size
* @param pageCount
* allocated amount of pages
*/
public CachedIoAdapter(String path, boolean lockFile, long initialLength,
boolean readOnly, IoAdapter io, int pageSize, int pageCount)
throws Db4oIOException {
_readOnly = readOnly;
_pageSize = pageSize;
_pageCount = pageCount;
initCache();
initIOAdaptor(path, lockFile, initialLength, readOnly, io);
_position = initialLength;
_filePointer = initialLength;
_fileLength = _io.getLength();
}
/**
* Creates and returns a new CachedIoAdapter <br>
*
* @param path
* database file path
* @param lockFile
* determines if the file should be locked
* @param initialLength
* initial file length, new writes will start from this point
*/
public IoAdapter open(String path, boolean lockFile, long initialLength, boolean readOnly)
throws Db4oIOException {
return new CachedIoAdapter(path, lockFile, initialLength, readOnly, _io,
_pageSize, _pageCount);
}
/**
* Deletes the database file
*
* @param path
* file path
*/
public void delete(String path) {
_io.delete(path);
}
/**
* Checks if the file exists
*
* @param path
* file path
*/
public boolean exists(String path) {
return _io.exists(path);
}
private void initIOAdaptor(String path, boolean lockFile, long initialLength, boolean readOnly, IoAdapter io)
throws Db4oIOException {
_io = io.open(path, lockFile, initialLength, readOnly);
}
private void initCache() {
_head = new Page(_pageSize);
_head._prev = null;
Page page = _head;
Page next = _head;
for (int i = 0; i < _pageCount - 1; ++i) {
next = new Page(_pageSize);
page._next = next;
next._prev = page;
page = next;
}
_tail = next;
}
/**
* Reads the file into the buffer using pages from cache. If the next page
* is not cached it will be read from the file.
*
* @param buffer
* destination buffer
* @param length
* how many bytes to read
*/
public int read(byte[] buffer, int length) throws Db4oIOException {
long startAddress = _position;
int bytesToRead = length;
int totalRead = 0;
while (bytesToRead > 0) {
final Page page = getPage(startAddress, true);
final int readBytes = page.read(buffer, totalRead, startAddress, bytesToRead);
movePageToHead(page);
if (readBytes <= 0) {
break;
}
bytesToRead -= readBytes;
startAddress += readBytes;
totalRead += readBytes;
}
_position = startAddress;
return totalRead == 0 ? -1 : totalRead;
}
/**
* Writes the buffer to cache using pages
*
* @param buffer
* source buffer
* @param length
* how many bytes to write
*/
public void write(byte[] buffer, int length) throws Db4oIOException {
validateReadOnly();
long startAddress = _position;
int bytesToWrite = length;
int bufferOffset = 0;
while (bytesToWrite > 0) {
// page doesn't need to loadFromDisk if the whole page is dirty
boolean loadFromDisk = (bytesToWrite < _pageSize)
|| (startAddress % _pageSize != 0);
final Page page = getPage(startAddress, loadFromDisk);
page.ensureEndAddress(getLength());
final int writtenBytes = page.write(buffer, bufferOffset, startAddress, bytesToWrite);
flushIfHeaderBlockPage(page);
movePageToHead(page);
bytesToWrite -= writtenBytes;
startAddress += writtenBytes;
bufferOffset += writtenBytes;
}
long endAddress = startAddress;
_position = endAddress;
_fileLength = Math.max(endAddress, _fileLength);
}
private void flushIfHeaderBlockPage(final Page page) {
if(containsHeaderBlock(page)) {
flushPage(page);
}
}
private void validateReadOnly() {
if(_readOnly) {
throw new Db4oIOException();
}
}
/**
* Flushes cache to a physical storage
*/
public void sync() throws Db4oIOException {
validateReadOnly();
flushAllPages();
_io.sync();
}
/**
* Returns the file length
*/
public long getLength() throws Db4oIOException {
return _fileLength;
}
/**
* Flushes and closes the file
*/
public void close() throws Db4oIOException {
try {
<|fim▁hole|> finally {
_io.close();
}
}
public IoAdapter delegatedIoAdapter() {
return _io.delegatedIoAdapter();
}
private Page getPage(long startAddress, boolean loadFromDisk)
throws Db4oIOException {
Page page = getPageFromCache(startAddress);
if (page != null) {
if (containsHeaderBlock(page)) {
getPageFromDisk(page, startAddress);
}
page.ensureEndAddress(_fileLength);
return page;
}
// in case that page is not found in the cache
page = getFreePageFromCache();
if (loadFromDisk) {
getPageFromDisk(page, startAddress);
} else {
resetPageAddress(page, startAddress);
}
return page;
}
private boolean containsHeaderBlock(Page page) {
return page.startAddress() <= FileHeader1.HEADER_LENGTH;
}
private void resetPageAddress(Page page, long startAddress) {
page.startAddress(startAddress);
page.endAddress(startAddress + _pageSize);
}
private Page getFreePageFromCache() throws Db4oIOException {
if (!_tail.isFree()) {
flushPage(_tail);
// _posPageMap.remove(new Long(tail.startPosition / PAGE_SIZE));
}
return _tail;
}
private Page getPageFromCache(long pos) throws Db4oIOException {
Page page = _head;
while (page != null) {
if (page.contains(pos)) {
return page;
}
page = page._next;
}
return null;
// Page page = (Page) _posPageMap.get(new Long(pos/PAGE_SIZE));
// return page;
}
private void flushAllPages() throws Db4oIOException {
Page node = _head;
while (node != null) {
flushPage(node);
node = node._next;
}
}
private void flushPage(Page page) throws Db4oIOException {
if (!page._dirty) {
return;
}
ioSeek(page.startAddress());
writePageToDisk(page);
return;
}
private void getPageFromDisk(Page page, long pos) throws Db4oIOException {
long startAddress = pos - pos % _pageSize;
page.startAddress(startAddress);
ioSeek(page._startAddress);
int count = ioRead(page);
if (count > 0) {
page.endAddress(startAddress + count);
} else {
page.endAddress(startAddress);
}
// _posPageMap.put(new Long(page.startPosition / PAGE_SIZE), page);
}
private int ioRead(Page page) throws Db4oIOException {
int count = _io.read(page._buffer);
if (count > 0) {
_filePointer = page._startAddress + count;
}
return count;
}
private void movePageToHead(Page page) {
if (page == _head) {
return;
}
if (page == _tail) {
Page tempTail = _tail._prev;
tempTail._next = null;
_tail._next = _head;
_tail._prev = null;
_head._prev = page;
_head = _tail;
_tail = tempTail;
} else {
page._prev._next = page._next;
page._next._prev = page._prev;
page._next = _head;
_head._prev = page;
page._prev = null;
_head = page;
}
}
private void writePageToDisk(Page page) throws Db4oIOException {
validateReadOnly();
try{
_io.write(page._buffer, page.size());
_filePointer = page.endAddress();
page._dirty = false;
}catch (Db4oIOException e){
_readOnly = true;
throw e;
}
}
/**
* Moves the pointer to the specified file position
*
* @param pos
* position within the file
*/
public void seek(long pos) throws Db4oIOException {
_position = pos;
}
private void ioSeek(long pos) throws Db4oIOException {
if (_filePointer != pos) {
_io.seek(pos);
_filePointer = pos;
}
}
private static class Page {
byte[] _buffer;
long _startAddress = -1;
long _endAddress;
final int _bufferSize;
boolean _dirty;
Page _prev;
Page _next;
private byte[] zeroBytes;
public Page(int size) {
_bufferSize = size;
_buffer = new byte[_bufferSize];
}
/*
* This method must be invoked before page.write/read, because seek and
* write may write ahead the end of file.
*/
void ensureEndAddress(long fileLength) {
long bufferEndAddress = _startAddress + _bufferSize;
if (_endAddress < bufferEndAddress && fileLength > _endAddress) {
long newEndAddress = Math.min(fileLength, bufferEndAddress);
if (zeroBytes == null) {
zeroBytes = new byte[_bufferSize];
}
System.arraycopy(zeroBytes, 0, _buffer,
(int) (_endAddress - _startAddress),
(int) (newEndAddress - _endAddress));
_endAddress = newEndAddress;
}
}
long endAddress() {
return _endAddress;
}
void startAddress(long address) {
_startAddress = address;
}
long startAddress() {
return _startAddress;
}
void endAddress(long address) {
_endAddress = address;
}
int size() {
return (int) (_endAddress - _startAddress);
}
int read(byte[] out, int outOffset, long startAddress, int length) {
int bufferOffset = (int) (startAddress - _startAddress);
int pageAvailbeDataSize = (int) (_endAddress - startAddress);
int readBytes = Math.min(pageAvailbeDataSize, length);
if (readBytes <= 0) { // meaning reach EOF
return -1;
}
System.arraycopy(_buffer, bufferOffset, out, outOffset, readBytes);
return readBytes;
}
int write(byte[] data, int dataOffset, long startAddress, int length) {
int bufferOffset = (int) (startAddress - _startAddress);
int pageAvailabeBufferSize = _bufferSize - bufferOffset;
int writtenBytes = Math.min(pageAvailabeBufferSize, length);
System.arraycopy(data, dataOffset, _buffer, bufferOffset,
writtenBytes);
long endAddress = startAddress + writtenBytes;
if (endAddress > _endAddress) {
_endAddress = endAddress;
}
_dirty = true;
return writtenBytes;
}
boolean contains(long address) {
return (_startAddress != -1 && address >= _startAddress && address < _startAddress
+ _bufferSize);
}
boolean isFree() {
return _startAddress == -1;
}
}
}<|fim▁end|> | flushAllPages();
}
|
<|file_name|>hrdoc.py<|end_file_name|><|fim▁begin|>import os
import sys
import shutil
from glob import glob
# --------------------------------------------------------------------------
DOC_DIR = 'hr-html'
PYWRAPS_FN = 'idaapi.py'
# --------------------------------------------------------------------------
def add_footer(lines):
S1 = 'Generated by Epydoc'
S2 = '</table>'
p = lines.find(S1)
if p == -1:
return None
p = lines.find(S2, p)
if p == -1:
return None
p += len(S2)
return lines[0:p] + '\n<!--#include virtual="/footer.shtml" -->' + lines[p:]
# --------------------------------------------------------------------------
def define_idaapi_resolver():
"""
Whenever a module named \"idaapi_<something>\" is
spotted, turn it into \"idaapi\".
"""
import epydoc.apidoc
dn = epydoc.apidoc.DottedName.__init__
def resolver(piece):
if piece is not None and isinstance(piece, basestring) and piece.startswith("idaapi_"):
return "idaapi"
else:
return piece
def wrapper(self, *pieces, **options):
return dn(self, *map(resolver, pieces), **options);
epydoc.apidoc.DottedName.__init__ = wrapper
<|fim▁hole|>def gen_docs():
import epydoc.cli
import swigdocs
define_idaapi_resolver()
swigdocs.gen_docs(outfn = 'pywraps.py')
# append obj/x86_win_vc_32/idaapi.py to it
# os.system(r'copy /b idaapi.py+..\obj\x86_win_vc_32\idaapi.py idaapi.py')
# delete all output files
for fn in glob('hr-html/*'):
os.unlink(fn)
epydoc.cli.optparse.sys.argv = [ 'epydoc',
'--config', '../hrdoc.cfg',
'--simple-term'
]
# Generate the documentation
epydoc.cli.cli()
# --------------------------------------------------------------------------
def patch_docs():
shutil.copy('../../hrdoc.css', 'epydoc.css')
os.system('chmod +w epydoc.css')
for fn in glob('*.html'):
f = open(fn, 'r')
lines = f.read()
f.close()
r = add_footer(lines)
if not r:
print "-",
continue
f = open(fn, 'w')
f.write(r)
f.close()
print "+",
print "\nDocumentation patched!"
# --------------------------------------------------------------------------
def main():
# Save old directory and adjust import path
curdir = os.getcwd() + os.sep
sys.path.append(curdir + 'python')
sys.path.append(curdir + 'tools')
sys.path.append(curdir + 'docs')
old_dir = os.getcwd()
try:
print "Generating documentation....."
os.chdir('docs')
gen_docs()
os.chdir(DOC_DIR)
patch_docs()
print "Documentation generated!"
finally:
os.chdir(old_dir)
# --------------------------------------------------------------------------
if __name__ == '__main__':
main()
Exit(0)<|fim▁end|> | # -------------------------------------------------------------------------- |
<|file_name|>util.py<|end_file_name|><|fim▁begin|>def clean_dict_repr(mw):
"""Produce a repr()-like output of dict mw with ordered keys"""
return '{' + \
', '.join('{k!r}: {v!r}'.format(k=k, v=v) for k, v in<|fim▁hole|> sorted(mw.items())) +\
'}'<|fim▁end|> | |
<|file_name|>OgreOverlay.cpp<|end_file_name|><|fim▁begin|>/*
-----------------------------------------------------------------------------
This source file is part of OGRE
(Object-oriented Graphics Rendering Engine)
For the latest info, see http://www.ogre3d.org/
Copyright (c) 2000-2014 Torus Knot Software Ltd
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
-----------------------------------------------------------------------------
*/
#include "OgreOverlay.h"
#include "OgreRoot.h"
#include "OgreSceneManager.h"
#include "OgreOverlayContainer.h"
#include "OgreCamera.h"
#include "OgreOverlayManager.h"
#include "OgreQuaternion.h"
#include "OgreVector3.h"
namespace Ogre {
//---------------------------------------------------------------------
Overlay::Overlay(const String& name) :<|fim▁hole|> mTransformOutOfDate(true), mTransformUpdated(true),
mZOrder(100), mVisible(false), mInitialised(false)
{
mRootNode = OGRE_NEW SceneNode(NULL);
}
//---------------------------------------------------------------------
Overlay::~Overlay()
{
// remove children
OGRE_DELETE mRootNode;
for (OverlayContainerList::iterator i = m2DElements.begin();
i != m2DElements.end(); ++i)
{
(*i)->_notifyParent(0, 0);
}
}
//---------------------------------------------------------------------
const String& Overlay::getName(void) const
{
return mName;
}
//---------------------------------------------------------------------
void Overlay::assignZOrders()
{
ushort zorder = static_cast<ushort>(mZOrder * 100.0f);
// Notify attached 2D elements
OverlayContainerList::iterator i, iend;
iend = m2DElements.end();
for (i = m2DElements.begin(); i != iend; ++i)
{
zorder = (*i)->_notifyZOrder(zorder);
}
}
//---------------------------------------------------------------------
void Overlay::setZOrder(ushort zorder)
{
// Limit to 650 since this is multiplied by 100 to pad out for containers
assert (zorder <= 650 && "Overlay Z-order cannot be greater than 650!");
mZOrder = zorder;
assignZOrders();
}
//---------------------------------------------------------------------
ushort Overlay::getZOrder(void) const
{
return (ushort)mZOrder;
}
//---------------------------------------------------------------------
bool Overlay::isVisible(void) const
{
return mVisible;
}
//---------------------------------------------------------------------
void Overlay::show(void)
{
mVisible = true;
if (!mInitialised)
{
initialise();
}
}
//---------------------------------------------------------------------
void Overlay::hide(void)
{
mVisible = false;
}
//---------------------------------------------------------------------
void Overlay::initialise(void)
{
OverlayContainerList::iterator i, iend;
iend = m2DElements.end();
for (i = m2DElements.begin(); i != m2DElements.end(); ++i)
{
(*i)->initialise();
}
mInitialised = true;
}
//---------------------------------------------------------------------
void Overlay::add2D(OverlayContainer* cont)
{
m2DElements.push_back(cont);
// Notify parent
cont->_notifyParent(0, this);
assignZOrders();
Matrix4 xform;
_getWorldTransforms(&xform);
cont->_notifyWorldTransforms(xform);
cont->_notifyViewport();
}
//---------------------------------------------------------------------
void Overlay::remove2D(OverlayContainer* cont)
{
m2DElements.remove(cont);
cont->_notifyParent(0, 0);
assignZOrders();
}
//---------------------------------------------------------------------
void Overlay::add3D(SceneNode* node)
{
mRootNode->addChild(node);
}
//---------------------------------------------------------------------
void Overlay::remove3D(SceneNode* node)
{
mRootNode->removeChild(node->getName());
}
//---------------------------------------------------------------------
void Overlay::clear(void)
{
mRootNode->removeAllChildren();
m2DElements.clear();
// Note no deallocation, memory handled by OverlayManager & SceneManager
}
//---------------------------------------------------------------------
void Overlay::setScroll(Real x, Real y)
{
mScrollX = x;
mScrollY = y;
mTransformOutOfDate = true;
mTransformUpdated = true;
}
//---------------------------------------------------------------------
Real Overlay::getScrollX(void) const
{
return mScrollX;
}
//---------------------------------------------------------------------
Real Overlay::getScrollY(void) const
{
return mScrollY;
}
//---------------------------------------------------------------------
OverlayContainer* Overlay::getChild(const String& name)
{
OverlayContainerList::iterator i, iend;
iend = m2DElements.end();
for (i = m2DElements.begin(); i != iend; ++i)
{
if ((*i)->getName() == name)
{
return *i;
}
}
return NULL;
}
//---------------------------------------------------------------------
void Overlay::scroll(Real xoff, Real yoff)
{
mScrollX += xoff;
mScrollY += yoff;
mTransformOutOfDate = true;
mTransformUpdated = true;
}
//---------------------------------------------------------------------
void Overlay::setRotate(const Radian& angle)
{
mRotate = angle;
mTransformOutOfDate = true;
mTransformUpdated = true;
}
//---------------------------------------------------------------------
void Overlay::rotate(const Radian& angle)
{
setRotate(mRotate + angle);
}
//---------------------------------------------------------------------
void Overlay::setScale(Real x, Real y)
{
mScaleX = x;
mScaleY = y;
mTransformOutOfDate = true;
mTransformUpdated = true;
}
//---------------------------------------------------------------------
Real Overlay::getScaleX(void) const
{
return mScaleX;
}
//---------------------------------------------------------------------
Real Overlay::getScaleY(void) const
{
return mScaleY;
}
//---------------------------------------------------------------------
void Overlay::_getWorldTransforms(Matrix4* xform) const
{
if (mTransformOutOfDate)
{
updateTransform();
}
*xform = mTransform;
}
//---------------------------------------------------------------------
void Overlay::_findVisibleObjects(Camera* cam, RenderQueue* queue)
{
OverlayContainerList::iterator i, iend;
if (OverlayManager::getSingleton().hasViewportChanged())
{
iend = m2DElements.end();
for (i = m2DElements.begin(); i != iend; ++i)
{
(*i)->_notifyViewport();
}
}
// update elements
if (mTransformUpdated)
{
Matrix4 xform;
_getWorldTransforms(&xform);
iend = m2DElements.end();
for (i = m2DElements.begin(); i != iend; ++i)
{
(*i)->_notifyWorldTransforms(xform);
}
mTransformUpdated = false;
}
if (mVisible)
{
// Add 3D elements
mRootNode->setPosition(cam->getDerivedPosition());
mRootNode->setOrientation(cam->getDerivedOrientation());
mRootNode->_update(true, false);
// Set up the default queue group for the objects about to be added
uint8 oldgrp = queue->getDefaultQueueGroup();
ushort oldPriority = queue-> getDefaultRenderablePriority();
queue->setDefaultQueueGroup(RENDER_QUEUE_OVERLAY);
queue->setDefaultRenderablePriority(static_cast<ushort>((mZOrder*100)-1));
mRootNode->_findVisibleObjects(cam, queue, NULL, true, false);
// Reset the group
queue->setDefaultQueueGroup(oldgrp);
queue->setDefaultRenderablePriority(oldPriority);
// Add 2D elements
iend = m2DElements.end();
for (i = m2DElements.begin(); i != iend; ++i)
{
(*i)->_update();
(*i)->_updateRenderQueue(queue);
}
}
}
//---------------------------------------------------------------------
void Overlay::updateTransform(void) const
{
// Ordering:
// 1. Scale
// 2. Rotate
// 3. Translate
Radian orientationRotation = Radian(0);
#if OGRE_NO_VIEWPORT_ORIENTATIONMODE == 0
orientationRotation = Radian(OverlayManager::getSingleton().getViewportOrientationMode() * Math::HALF_PI);
#endif
Matrix3 rot3x3, scale3x3;
rot3x3.FromEulerAnglesXYZ(Radian(0), Radian(0), mRotate + orientationRotation);
scale3x3 = Matrix3::ZERO;
scale3x3[0][0] = mScaleX;
scale3x3[1][1] = mScaleY;
scale3x3[2][2] = 1.0f;
mTransform = Matrix4::IDENTITY;
mTransform = rot3x3 * scale3x3;
mTransform.setTrans(Vector3(mScrollX, mScrollY, 0));
mTransformOutOfDate = false;
}
//---------------------------------------------------------------------
OverlayElement* Overlay::findElementAt(Real x, Real y)
{
OverlayElement* ret = NULL;
int currZ = -1;
OverlayContainerList::iterator i, iend;
iend = m2DElements.end();
for (i = m2DElements.begin(); i != iend; ++i)
{
int z = (*i)->getZOrder();
if (z > currZ)
{
OverlayElement* elementFound = (*i)->findElementAt(x,y);
if(elementFound)
{
currZ = elementFound->getZOrder();
ret = elementFound;
}
}
}
return ret;
}
}<|fim▁end|> | mName(name),
mRotate(0.0f),
mScrollX(0.0f), mScrollY(0.0f),
mScaleX(1.0f), mScaleY(1.0f), |
<|file_name|>fractions.py<|end_file_name|><|fim▁begin|># Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Handles backports of the standard library's `fractions.py`.
The fractions module in 2.6 does not handle being instantiated using a
float and then calculating an approximate fraction based on that.
This functionality is required by the FITS unit format generator,
since the FITS unit format handles only rational, not decimal point,
powers.<|fim▁hole|>import sys
if sys.version_info[:2] == (2, 6):
from ._fractions_py2 import *
else:
from fractions import *<|fim▁end|> | """
from __future__ import absolute_import
|
<|file_name|>unused-async.rs<|end_file_name|><|fim▁begin|>// edition:2018
// run-pass
#![allow(dead_code)]
#[must_use]
//~^ WARNING `must_use`
async fn test() -> i32 {
1
}
struct Wowee {}
impl Wowee {
#[must_use]
//~^ WARNING `must_use`
async fn test_method() -> i32 {
1
}
}
/* FIXME(guswynn) update this test when async-fn-in-traits works
trait Doer {
#[must_use]
async fn test_trait_method() -> i32;
WARNING must_use
async fn test_other_trait() -> i32;
}
impl Doer for Wowee {
async fn test_trait_method() -> i32 {
1<|fim▁hole|> async fn test_other_trait() -> i32 {
WARNING must_use
1
}
}
*/
fn main() {
}<|fim▁end|> | }
#[must_use] |
<|file_name|>actions.js<|end_file_name|><|fim▁begin|>/*
* App Actions
*
* Actions change things in your application
* Since this boilerplate uses a uni-directional data flow, specifically redux,
* we have these actions which are the only way your application interacts with
* your application state. This guarantees that your state is up to date and nobody
* messes it up weirdly somewhere.
*
* To add a new Action:
* 1) Import your constant
* 2) Add a function like this:
* export function yourAction(var) {
* return { type: YOUR_ACTION_CONSTANT, var: var }
* }
*/
import {
LOAD_SONGS,
LOAD_SONGS_SUCCESS,
LOAD_SONGS_ERROR,
} from './constants';
// /**
// * Load the repositories, this action starts the request saga
// *
// * @return {object} An action object with a type of LOAD_REPOS
// */
export function loadSongs() {
return {
type: LOAD_SONGS,
};
}
// /**
// * Dispatched when the repositories are loaded by the request saga
// *
// * @param {array} repos The repository data
// * @param {string} username The current username
// *
// * @return {object} An action object with a type of LOAD_REPOS_SUCCESS passing the repos
// */
export function songsLoaded(repos, username) {
return {
type: LOAD_SONGS_SUCCESS,
repos,
username,
};
}
// /**
// * Dispatched when loading the repositories fails
// *<|fim▁hole|>export function songsLoadingError(error) {
return {
type: LOAD_SONGS_ERROR,
error,
};
}<|fim▁end|> | // * @param {object} error The error
// *
// * @return {object} An action object with a type of LOAD_REPOS_ERROR passing the error
// */ |
<|file_name|>Intro.js<|end_file_name|><|fim▁begin|>// ==ClosureCompiler==
// @compilation_level SIMPLE_OPTIMIZATIONS
/**
* @license @product.name@ JS [email protected]@ (@product.date@)
*
* (c) 2009-2014 Torstein Honsi
*
* License: www.highcharts.com/license
*/
<|fim▁hole|> module.exports = root.document ?
factory(root) :
function (w) {
return factory(w);
};
} else {
root.Highcharts = factory();
}
}(typeof window !== 'undefined' ? window : this, function (w) {<|fim▁end|> | (function (root, factory) {
if (typeof module === 'object' && module.exports) { |
<|file_name|>constellation_msg.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! The high-level interface from script to constellation. Using this abstract interface helps
//! reduce coupling between these two components.
use std::cell::Cell;
use std::fmt;
use webrender_traits;
#[derive(PartialEq, Eq, Copy, Clone, Debug, Deserialize, Serialize)]
pub enum KeyState {
Pressed,
Released,
Repeated,
}
//N.B. Based on the glutin key enum
#[derive(Debug, PartialEq, Eq, Copy, Clone, Deserialize, Serialize, HeapSizeOf)]
pub enum Key {
Space,
Apostrophe,
Comma,
Minus,
Period,
Slash,
Num0,
Num1,
Num2,
Num3,
Num4,
Num5,
Num6,
Num7,
Num8,
Num9,
Semicolon,
Equal,
A,
B,
C,
D,
E,
F,
G,
H,
I,
J,
K,
L,
M,
N,
O,
P,
Q,
R,
S,
T,
U,
V,
W,
X,
Y,
Z,
LeftBracket,
Backslash,
RightBracket,
GraveAccent,
World1,
World2,
Escape,
Enter,
Tab,
Backspace,
Insert,
Delete,
Right,
Left,
Down,
Up,
PageUp,
PageDown,
Home,
End,
CapsLock,
ScrollLock,
NumLock,
PrintScreen,
Pause,
F1,
F2,
F3,
F4,
F5,
F6,
F7,
F8,
F9,
F10,
F11,
F12,
F13,
F14,
F15,
F16,
F17,
F18,
F19,
F20,
F21,
F22,
F23,
F24,
F25,
Kp0,
Kp1,
Kp2,
Kp3,
Kp4,
Kp5,
Kp6,
Kp7,
Kp8,
Kp9,
KpDecimal,
KpDivide,
KpMultiply,
KpSubtract,
KpAdd,
KpEnter,
KpEqual,
LeftShift,
LeftControl,
LeftAlt,
LeftSuper,
RightShift,
RightControl,
RightAlt,
RightSuper,
Menu,
NavigateBackward,
NavigateForward,
}
bitflags! {
#[derive(Deserialize, Serialize)]
pub flags KeyModifiers: u8 {
const NONE = 0x00,
const SHIFT = 0x01,
const CONTROL = 0x02,
const ALT = 0x04,
const SUPER = 0x08,
}
}
#[derive(Clone, PartialEq, Eq, Copy, Hash, Debug, Deserialize, Serialize)]
pub enum TraversalDirection {
Forward(usize),
Back(usize),
}
/// Each pipeline ID needs to be unique. However, it also needs to be possible to
/// generate the pipeline ID from an iframe element (this simplifies a lot of other
/// code that makes use of pipeline IDs).
///
/// To achieve this, each pipeline index belongs to a particular namespace. There is
/// a namespace for the constellation thread, and also one for every script thread.
/// This allows pipeline IDs to be generated by any of those threads without conflicting
/// with pipeline IDs created by other script threads or the constellation. The
/// constellation is the only code that is responsible for creating new *namespaces*.
/// This ensures that namespaces are always unique, even when using multi-process mode.
///
/// It may help conceptually to think of the namespace ID as an identifier for the
/// thread that created this pipeline ID - however this is really an implementation
/// detail so shouldn't be relied upon in code logic. It's best to think of the
/// pipeline ID as a simple unique identifier that doesn't convey any more information.
#[derive(Clone, Copy)]
pub struct PipelineNamespace {
id: PipelineNamespaceId,
index: u32,
}
impl PipelineNamespace {
pub fn install(namespace_id: PipelineNamespaceId) {
PIPELINE_NAMESPACE.with(|tls| {
assert!(tls.get().is_none());
tls.set(Some(PipelineNamespace {
id: namespace_id,
index: 0,
}));
});
}
fn next_index(&mut self) -> u32 {
let result = self.index;
self.index = result + 1;
result
}
fn next_pipeline_id(&mut self) -> PipelineId {
PipelineId {
namespace_id: self.id,
index: PipelineIndex(self.next_index()),
}
}
fn next_frame_id(&mut self) -> FrameId {
FrameId {
namespace_id: self.id,
index: FrameIndex(self.next_index()),
}
}
}
thread_local!(pub static PIPELINE_NAMESPACE: Cell<Option<PipelineNamespace>> = Cell::new(None));
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Copy, Hash, Debug, Deserialize, Serialize, HeapSizeOf)]
pub struct PipelineNamespaceId(pub u32);
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Copy, Hash, Debug, Deserialize, Serialize, HeapSizeOf)]
pub struct PipelineIndex(pub u32);
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Copy, Hash, Debug, Deserialize, Serialize, HeapSizeOf)]
pub struct PipelineId {
pub namespace_id: PipelineNamespaceId,
pub index: PipelineIndex
}
impl PipelineId {
pub fn new() -> PipelineId {
PIPELINE_NAMESPACE.with(|tls| {
let mut namespace = tls.get().expect("No namespace set for this thread!");
let new_pipeline_id = namespace.next_pipeline_id();
tls.set(Some(namespace));<|fim▁hole|> new_pipeline_id
})
}
pub fn to_webrender(&self) -> webrender_traits::PipelineId {
let PipelineNamespaceId(namespace_id) = self.namespace_id;
let PipelineIndex(index) = self.index;
webrender_traits::PipelineId(namespace_id, index)
}
pub fn root_scroll_node(&self) -> webrender_traits::ClipId {
webrender_traits::ClipId::root_scroll_node(self.to_webrender())
}
}
impl fmt::Display for PipelineId {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
let PipelineNamespaceId(namespace_id) = self.namespace_id;
let PipelineIndex(index) = self.index;
write!(fmt, "({},{})", namespace_id, index)
}
}
thread_local!(pub static TOP_LEVEL_FRAME_ID: Cell<Option<FrameId>> = Cell::new(None));
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Copy, Hash, Debug, Deserialize, Serialize, HeapSizeOf)]
pub struct FrameIndex(pub u32);
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Copy, Hash, Debug, Deserialize, Serialize, HeapSizeOf)]
pub struct FrameId {
pub namespace_id: PipelineNamespaceId,
pub index: FrameIndex
}
impl FrameId {
pub fn new() -> FrameId {
PIPELINE_NAMESPACE.with(|tls| {
let mut namespace = tls.get().expect("No namespace set for this thread!");
let new_frame_id = namespace.next_frame_id();
tls.set(Some(namespace));
new_frame_id
})
}
/// Each script and layout thread should have the top-level frame id installed,
/// since it is used by crash reporting.
pub fn install(id: FrameId) {
TOP_LEVEL_FRAME_ID.with(|tls| tls.set(Some(id)))
}
pub fn installed() -> Option<FrameId> {
TOP_LEVEL_FRAME_ID.with(|tls| tls.get())
}
}
impl fmt::Display for FrameId {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
let PipelineNamespaceId(namespace_id) = self.namespace_id;
let FrameIndex(index) = self.index;
write!(fmt, "({},{})", namespace_id, index)
}
}
// We provide ids just for unit testing.
pub const TEST_NAMESPACE: PipelineNamespaceId = PipelineNamespaceId(1234);
pub const TEST_PIPELINE_INDEX: PipelineIndex = PipelineIndex(5678);
pub const TEST_PIPELINE_ID: PipelineId = PipelineId { namespace_id: TEST_NAMESPACE, index: TEST_PIPELINE_INDEX };
pub const TEST_FRAME_INDEX: FrameIndex = FrameIndex(8765);
pub const TEST_FRAME_ID: FrameId = FrameId { namespace_id: TEST_NAMESPACE, index: TEST_FRAME_INDEX };
#[derive(Clone, PartialEq, Eq, Copy, Hash, Debug, Deserialize, Serialize, HeapSizeOf)]
pub enum FrameType {
IFrame,
MozBrowserIFrame,
}<|fim▁end|> | |
<|file_name|>dma.rs<|end_file_name|><|fim▁begin|>use core::sync::atomic::{AtomicBool, AtomicU64, Ordering};
use libd7::{syscall, PhysAddr, VirtAddr};
static MAPPED: AtomicBool = AtomicBool::new(false);
static VIRTUAL_ADDR: VirtAddr = unsafe { VirtAddr::new_unsafe(0x10_0000_0000) }; // Should be free
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct DMARegion {
pub phys: PhysAddr,
pub virt: VirtAddr,
}
impl DMARegion {
pub fn allocate(size_bytes: usize) -> Self {
let phys = syscall::dma_allocate(size_bytes as u64).unwrap();
// Assumes that DMA block is on the first page.
// Keep in sync with plan.md<|fim▁hole|> VIRTUAL_ADDR,
size_bytes as u64,
syscall::MemoryProtectionFlags::READ | syscall::MemoryProtectionFlags::WRITE,
)
.unwrap();
}
}
Self {
phys,
virt: VIRTUAL_ADDR + phys.as_u64(),
}
}
}<|fim▁end|> | if !MAPPED.compare_and_swap(false, true, Ordering::SeqCst) {
unsafe {
syscall::mmap_physical(
PhysAddr::new(0), |
<|file_name|>43.cpp<|end_file_name|><|fim▁begin|>class Solution {
public:
string multiply(string num1, string num2) {
string a, b;
a = num1;
b = num2;
string A, B;
// cout << a;
// cout << b;
int fa = 1;
int fb = 1;
int f = 1;
int lena = 0;
int lenb = 0;
if (a[0] == '-') {
A = a.substr(1, a.size() - 1);
fa = -1;
lena = a.size() - 1;
<|fim▁hole|> lena = a.size();
}
if (b[0] == '-') {
B = b.substr(1, b.size() - 1);
fb = -1;
lenb = b.size() - 1;
} else {
B = b;
lenb = b.size();
}
// cout << A << endl;
// cout << B;
f = fa * fb;
int lenmax = max(lena, lenb);
int na[lenmax];
int nb[lenmax];
int i;
int j;
// cout << lenmax<< endl;
for (i = lenmax - 1, j = lena - 1; j >= 0; --i, --j) {
na[i] = A[j] - '0';
// cout << na[i] << endl;
}
while (i >= 0) {
na[i] = 0;
--i;
}
for (i = lenmax - 1, j = lenb - 1; j >= 0; --i, --j) {
nb[i] = B[j] - '0';
// cout << nb[i] << endl;
}
while (i >= 0) {
nb[i] = 0;
--i;
}
int nc[2 * lenmax];
for (i = 0; i < 2 * lenmax; ++i) {
nc[i] = 0;
}
for (i = 0; i < lenmax; ++i) {
for (j = 0; j < lenmax; ++j) {
nc[i + j] += na[lenmax - 1 - i] * nb[lenmax - 1 - j];
}
}
for (i = 0; i < 2 * lenmax - 1; ++i) {
nc[i + 1] += nc[i] / 10;
nc[i] = nc[i] % 10;
}
j = 2 * lenmax - 1;
// cout << j << endl;
while (nc[j] == 0 && j >= 0) {
--j;
if (j == -1) {
break;
}
}
string res;
if (f == -1) {
// printf("-");
res += "-";
}
if (j == -1) {
res += "0";
return res;
} else {
for (i = j; i >= 0; --i) {
// printf("%d",nc[i]);
res += to_string(nc[i]);
}
return res;
}
}
};<|fim▁end|> | } else {
A = a;
|
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>#![feature(proc_macro)]
#![recursion_limit = "128"]
#[macro_use]
extern crate lazy_static;
extern crate proc_macro;
#[macro_use]
extern crate quote;
extern crate regex;
extern crate remacs_util;
extern crate syn;
use proc_macro::TokenStream;
use regex::Regex;
mod function;
#[proc_macro_attribute]
pub fn lisp_fn(attr_ts: TokenStream, fn_ts: TokenStream) -> TokenStream {
let fn_item = syn::parse_item(&fn_ts.to_string()).unwrap();
let function = function::parse(&fn_item).unwrap();
let lisp_fn_args = match remacs_util::parse_lisp_fn(
&attr_ts.to_string(),
&function.name,
function.fntype.def_min_args(),
) {
Ok(v) => v,
Err(e) => panic!("Invalid lisp_fn attribute: {}", e),
};
let mut cargs = quote::Tokens::new();
let mut rargs = quote::Tokens::new();
let mut body = quote::Tokens::new();
let max_args = function.args.len() as i16;
let intspec = if let Some(intspec) = lisp_fn_args.intspec {
let cbyte_intspec = CByteLiteral(intspec.as_str());
quote!{ (#cbyte_intspec).as_ptr() as *const ::libc::c_char }
} else {
quote!{ ::std::ptr::null() }
};
match function.fntype {
function::LispFnType::Normal(_) => for ident in function.args {
let arg = quote! { #ident: ::remacs_sys::Lisp_Object, };
cargs.append(arg);
let arg = quote! { ::lisp::LispObject::from_raw(#ident).into(), };
rargs.append(arg);
},
function::LispFnType::Many => {
let args = quote! {
nargs: ::libc::ptrdiff_t,
args: *mut ::remacs_sys::Lisp_Object,
};
cargs.append(args);
let b = quote! {
let args = unsafe {
::std::slice::from_raw_parts_mut::<::remacs_sys::Lisp_Object>(
args, nargs as usize)
};
};
body.append(b);
let arg = quote! { unsafe { ::std::mem::transmute(args) } };<|fim▁hole|> }
}
let cname = lisp_fn_args.c_name;
let sname = concat_idents("S", &cname);
let fname = concat_idents("F", &cname);
let rname = function.name;
let min_args = lisp_fn_args.min;
let mut windows_header = quote!{};
let max_args = if lisp_fn_args.unevalled {
quote! { -1 }
} else {
match function.fntype {
function::LispFnType::Normal(_) => quote! { #max_args },
function::LispFnType::Many => quote! { ::lisp::MANY },
}
};
let symbol_name = CByteLiteral(&lisp_fn_args.name);
if cfg!(windows) {
windows_header = quote!{
| (::std::mem::size_of::<::remacs_sys::Lisp_Subr>()
/ ::std::mem::size_of::<::remacs_sys::EmacsInt>()) as ::libc::ptrdiff_t
};
}
let tokens = quote! {
#[no_mangle]
pub extern "C" fn #fname(#cargs) -> ::remacs_sys::Lisp_Object {
#body
let ret = #rname(#rargs);
::lisp::LispObject::from(ret).to_raw()
}
lazy_static! {
pub static ref #sname: ::lisp::LispSubrRef = {
let subr = ::remacs_sys::Lisp_Subr {
header: ::remacs_sys::Lisp_Vectorlike_Header {
size: ((::remacs_sys::PseudovecType::PVEC_SUBR as ::libc::ptrdiff_t)
<< ::remacs_sys::PSEUDOVECTOR_AREA_BITS) #windows_header,
},
function: self::#fname as *const ::libc::c_void,
min_args: #min_args,
max_args: #max_args,
symbol_name: (#symbol_name).as_ptr() as *const ::libc::c_char,
intspec: #intspec,
doc: ::std::ptr::null(),
lang: ::remacs_sys::Lisp_Subr_Lang_Rust,
};
unsafe {
let ptr =
::remacs_sys::xmalloc(::std::mem::size_of::<::remacs_sys::Lisp_Subr>())
as *mut ::remacs_sys::Lisp_Subr;
::std::ptr::copy_nonoverlapping(&subr, ptr, 1);
::std::mem::forget(subr);
::lisp::ExternalPtr::new(ptr)
}
};
}
};
// we could put #fn_item into the quoted code above, but doing so
// drops all of the line numbers on the floor and causes the
// compiler to attribute any errors in the function to the macro
// invocation instead.
// Note: TokenStream has a FromIterator trait impl that converts
// an iterator over Token{Stream,Tree,Node}s into a single
// TokenStream; collect() calls that impl for us.
vec![tokens.parse().unwrap(), fn_ts].into_iter().collect()
}
struct CByteLiteral<'a>(&'a str);
impl<'a> quote::ToTokens for CByteLiteral<'a> {
fn to_tokens(&self, tokens: &mut quote::Tokens) {
lazy_static! {
static ref RE: Regex = Regex::new(r#"["\\]"#).unwrap();
}
let s = RE.replace_all(self.0, |caps: ®ex::Captures| {
format!("\\x{:x}", u32::from(caps[0].chars().next().unwrap()))
});
tokens.append(&format!(r#"b"{}\0""#, s));
}
}
fn concat_idents(lhs: &str, rhs: &str) -> syn::Ident {
syn::Ident::new(format!("{}{}", lhs, rhs))
}<|fim▁end|> | rargs.append(arg); |
<|file_name|>ScheduleIterator.java<|end_file_name|><|fim▁begin|>/**
* Copyright 2011 Pedro Ribeiro
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.<|fim▁hole|> * You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jalphanode.scheduler;
import java.util.Date;
/**
* Iterator which builds the next execution time.
*
* @author ribeirux
* @version $Revision$
*/
public interface ScheduleIterator {
/**
* Builds the next execution time according the specified {@code date}.
*
* @param date the date to begin the search for the next valid date
*
* @return the next execution date
*/
Date next(Date date);
}<|fim▁end|> | |
<|file_name|>Log.cpp<|end_file_name|><|fim▁begin|>/*
This file is part of Ingen.
Copyright 2007-2016 David Robillard <http://drobilla.net/>
Ingen is free software: you can redistribute it and/or modify it under the
terms of the GNU Affero General Public License as published by the Free
Software Foundation, either version 3 of the License, or any later version.
Ingen is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
A PARTICULAR PURPOSE. See the GNU Affero General Public License for details.
You should have received a copy of the GNU Affero General Public License
along with Ingen. If not, see <http://www.gnu.org/licenses/>.
*/
#include "ingen/Log.hpp"
#include "ingen/ColorContext.hpp"
#include "ingen/Node.hpp"
#include "ingen/URIs.hpp"
#include "ingen/World.hpp"
#include "lv2/core/lv2.h"
#include "lv2/log/log.h"
#include "lv2/urid/urid.h"
#include "raul/Path.hpp"
#include <cstdio>
#include <cstdlib>
namespace ingen {
Log::Log(LV2_Log_Log* log, URIs& uris)
: _log(log)
, _uris(uris)
, _flush(false)
, _trace(false)
{}
void
Log::rt_error(const char* msg)
{
#ifndef NDEBUG
va_list args;
vtprintf(_uris.log_Error, msg, args);<|fim▁hole|>}
void
Log::error(const std::string& msg)
{
va_list args;
vtprintf(_uris.log_Error, msg.c_str(), args);
}
void
Log::warn(const std::string& msg)
{
va_list args;
vtprintf(_uris.log_Warning, msg.c_str(), args);
}
void
Log::info(const std::string& msg)
{
va_list args;
vtprintf(_uris.log_Note, msg.c_str(), args);
}
void
Log::trace(const std::string& msg)
{
va_list args;
vtprintf(_uris.log_Trace, msg.c_str(), args);
}
void
Log::print(FILE* stream, const std::string& msg) const
{
fprintf(stream, "%s", msg.c_str());
if (_flush) {
fflush(stdout);
}
}
int
Log::vtprintf(LV2_URID type, const char* fmt, va_list args)
{
int ret = 0;
if (type == _uris.log_Trace && !_trace) {
return 0;
} else if (_sink) {
_sink(type, fmt, args);
}
if (_log) {
ret = _log->vprintf(_log->handle, type, fmt, args);
} else if (type == _uris.log_Error) {
ColorContext ctx(stderr, ColorContext::Color::RED);
ret = vfprintf(stderr, fmt, args);
} else if (type == _uris.log_Warning) {
ColorContext ctx(stderr, ColorContext::Color::YELLOW);
ret = vfprintf(stderr, fmt, args);
} else if (type == _uris.log_Note) {
ColorContext ctx(stderr, ColorContext::Color::GREEN);
ret = vfprintf(stdout, fmt, args);
} else if (_trace && type == _uris.log_Trace) {
ColorContext ctx(stderr, ColorContext::Color::GREEN);
ret = vfprintf(stderr, fmt, args);
} else {
fprintf(stderr, "Unknown log type %u\n", type);
return 0;
}
if (_flush) {
fflush(stdout);
}
return ret;
}
static int
log_vprintf(LV2_Log_Handle handle, LV2_URID type, const char* fmt, va_list args)
{
auto* f = static_cast<Log::Feature::Handle*>(handle);
va_list noargs = {};
int ret = f->log->vtprintf(type, f->node->path().c_str(), noargs);
ret += f->log->vtprintf(type, ": ", noargs);
ret += f->log->vtprintf(type, fmt, args);
return ret;
}
static int
log_printf(LV2_Log_Handle handle, LV2_URID type, const char* fmt, ...)
{
va_list args;
va_start(args, fmt);
const int ret = log_vprintf(handle, type, fmt, args);
va_end(args);
return ret;
}
static void
free_log_feature(LV2_Feature* feature) {
auto* lv2_log = static_cast<LV2_Log_Log*>(feature->data);
free(lv2_log->handle);
free(feature);
}
std::shared_ptr<LV2_Feature>
Log::Feature::feature(World& world, Node* block)
{
auto* handle = static_cast<Handle*>(calloc(1, sizeof(Handle)));
handle->lv2_log.handle = handle;
handle->lv2_log.printf = log_printf;
handle->lv2_log.vprintf = log_vprintf;
handle->log = &world.log();
handle->node = block;
auto* f = static_cast<LV2_Feature*>(malloc(sizeof(LV2_Feature)));
f->URI = LV2_LOG__log;
f->data = &handle->lv2_log;
return std::shared_ptr<LV2_Feature>(f, &free_log_feature);
}
} // namespace ingen<|fim▁end|> | #endif |
<|file_name|>28 Implement strStr.py<|end_file_name|><|fim▁begin|>class Solution(object):
def strStr(self, haystack, needle):
"""
:type haystack: str
:type needle: str
:rtype: int
"""
if not needle:
return 0
if len(haystack) < len(needle):
return -1
for i in xrange(len(haystack)):
if i + len(needle) > len(haystack):
return -1
if haystack[i] != needle[0] or haystack[i+len(needle)-1] != needle[-1]:
continue
else:<|fim▁hole|> j += 1
if j == len(needle):
return i
return -1
if __name__ == '__main__':
s1 = ""
s2 = ""
s3 = ""
print Solution().strStr(s1,s2)<|fim▁end|> | j=0
while j < len(needle) and i+j < len(haystack):
if haystack[i+j] != needle[j]:
break |
<|file_name|>encoding.py<|end_file_name|><|fim▁begin|>import re
import chardet
import sys
RE_CHARSET = re.compile(br'<meta.*?charset=["\']*(.+?)["\'>]', flags=re.I)
RE_PRAGMA = re.compile(br'<meta.*?content=["\']*;?charset=(.+?)["\'>]', flags=re.I)
RE_XML = re.compile(br'^<\?xml.*?encoding=["\']*(.+?)["\'>]')
CHARSETS = {
'big5': 'big5hkscs',
'gb2312': 'gb18030',
'ascii': 'utf-8',
'maccyrillic': 'cp1251',
'win1251': 'cp1251',
'win-1251': 'cp1251',
'windows-1251': 'cp1251',
}<|fim▁hole|>
def fix_charset(encoding):
"""Overrides encoding when charset declaration
or charset determination is a subset of a larger
charset. Created because of issues with Chinese websites"""
encoding = encoding.lower()
return CHARSETS.get(encoding, encoding)
def get_encoding(page):
# Regex for XML and HTML Meta charset declaration
declared_encodings = (RE_CHARSET.findall(page) +
RE_PRAGMA.findall(page) +
RE_XML.findall(page))
# Try any declared encodings
for declared_encoding in declared_encodings:
try:
if sys.version_info[0] == 3:
# declared_encoding will actually be bytes but .decode() only
# accepts `str` type. Decode blindly with ascii because no one should
# ever use non-ascii characters in the name of an encoding.
declared_encoding = declared_encoding.decode('ascii', 'replace')
encoding = fix_charset(declared_encoding)
# Now let's decode the page
page.decode()
# It worked!
return encoding
except UnicodeDecodeError:
pass
# Fallback to chardet if declared encodings fail
# Remove all HTML tags, and leave only text for chardet
text = re.sub(b'(\s*</?[^>]*>)+\s*', b' ', page).strip()
enc = 'utf-8'
if len(text) < 10:
return enc # can't guess
res = chardet.detect(text)
enc = res['encoding'] or 'utf-8'
#print '->', enc, "%.2f" % res['confidence']
enc = fix_charset(enc)
return enc<|fim▁end|> | |
<|file_name|>kis_transparency_mask.cc<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2006 Boudewijn Rempt <[email protected]>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
#include "kis_transparency_mask.h"
#include "kis_debug.h"
#include <KoIcon.h>
#include <KoColor.h>
#include <KoColorSpace.h>
#include <KoCompositeOpRegistry.h>
#include "kis_paint_device.h"
#include "kis_painter.h"
#include "kis_node_visitor.h"
#include "kis_processing_visitor.h"
KisTransparencyMask::KisTransparencyMask()
: KisEffectMask()
{
}
KisTransparencyMask::KisTransparencyMask(const KisTransparencyMask& rhs)
: KisEffectMask(rhs)
{
}
KisTransparencyMask::~KisTransparencyMask()
{
}
bool KisTransparencyMask::allowAsChild(KisNodeSP node) const
{
Q_UNUSED(node);
return false;
}
QRect KisTransparencyMask::decorateRect(KisPaintDeviceSP &src,
KisPaintDeviceSP &dst,
const QRect & rc) const
{
if (src != dst) {
KisPainter gc(dst);
gc.setCompositeOp(src->colorSpace()->compositeOp(COMPOSITE_COPY));
gc.bitBlt(rc.topLeft(), src, rc);
src->fill(rc, KoColor(Qt::transparent, src->colorSpace()));
}
return rc;
}
QRect KisTransparencyMask::extent() const
{
return parent() ? parent()->extent() : QRect();
}
QRect KisTransparencyMask::exactBounds() const
{
return parent() ? parent()->exactBounds() : QRect();
}
QRect KisTransparencyMask::changeRect(const QRect &rect, PositionToFilthy pos) const
{
/**
* Selection on transparency masks have no special meaning:
* They do crop both: change and need area
*/
return KisMask::changeRect(rect, pos);
}
QRect KisTransparencyMask::needRect(const QRect &rect, PositionToFilthy pos) const
{
/**
* Selection on transparency masks have no special meaning:
* They do crop both: change and need area
*/
return KisMask::needRect(rect, pos);
}
QIcon KisTransparencyMask::icon() const<|fim▁hole|>}
bool KisTransparencyMask::accept(KisNodeVisitor &v)
{
return v.visit(this);
}
void KisTransparencyMask::accept(KisProcessingVisitor &visitor, KisUndoAdapter *undoAdapter)
{
return visitor.visit(this, undoAdapter);
}
#include "kis_transparency_mask.moc"<|fim▁end|> | {
return koIcon("view-filter"); |
<|file_name|>Guides through All Selected Nodes.py<|end_file_name|><|fim▁begin|>#MenuTitle: Guides through All Selected Nodes
# -*- coding: utf-8 -*-
from __future__ import division, print_function, unicode_literals
__doc__="""
Creates guides through all selected nodes.
"""
from Foundation import NSPoint
import math
thisFont = Glyphs.font # frontmost font
selectedLayers = thisFont.selectedLayers # active layers of selected glyphs
def angle( firstPoint, secondPoint ):
"""
Returns the angle (in degrees) of the straight line between firstPoint and secondPoint,
0 degrees being the second point to the right of first point.
firstPoint, secondPoint: must be NSPoint or GSNode
"""
xDiff = secondPoint.x - firstPoint.x
yDiff = secondPoint.y - firstPoint.y
return math.degrees(math.atan2(yDiff,xDiff))
def newGuide( position, angle=0 ):
try:
# GLYPHS 3
newGuide = GSGuide()
except:
# GLYPHS 2
newGuide = GSGuideLine()
newGuide.position = position
newGuide.angle = angle
return newGuide<|fim▁hole|>def isThereAlreadyAGuideWithTheseProperties(thisLayer,guideposition,guideangle):
if guideangle < 0:
guideangle += 180
if guideangle > 180:
guideangle -= 180
for thisGuide in thisLayer.guides:
thisAngle = thisGuide.angle
if thisAngle < 0:
thisAngle += 180
if thisAngle > 180:
thisAngle -= 180
if abs(thisAngle - guideangle) < 0.01 and abs(thisGuide.position.x - guideposition.x) < 0.01 and abs(thisGuide.position.y - guideposition.y) < 0.01:
return True
return False
if len(selectedLayers) == 1:
thisLayer = selectedLayers[0]
thisGlyph = thisLayer.parent
currentPointSelection = [point.position for point in thisLayer.selection if type(point) in (GSNode,GSAnchor)]
# thisGlyph.beginUndo() # undo grouping causes crashes
try:
if len(currentPointSelection) > 1:
# clear selection:
thisLayer.clearSelection()
currentPointSelection.append(currentPointSelection[0])
for i,j in enumerate(range(1,len(currentPointSelection))):
point1 = currentPointSelection[i]
point2 = currentPointSelection[j]
angleBetweenPoints = angle(point1,point2)
middlePoint = addPoints(point1,point2)
middlePoint.x *= 0.5
middlePoint.y *= 0.5
# create guide and add it to layer:
if not isThereAlreadyAGuideWithTheseProperties(thisLayer, middlePoint, angleBetweenPoints):
guideBetweenPoints = newGuide(middlePoint, angleBetweenPoints)
thisLayer.guides.append( guideBetweenPoints )
# select it:
thisLayer.selection.append(guideBetweenPoints)
elif len(currentPointSelection) == 1:
point = currentPointSelection[0]
guide = newGuide(point)
thisLayer.guides.append(guide)
# select only guide:
thisLayer.clearSelection()
thisLayer.selection.append(guide)
except Exception as e:
raise e
# finally:
# thisGlyph.endUndo() # undo grouping causes crashes<|fim▁end|> | |
<|file_name|>animation.rs<|end_file_name|><|fim▁begin|>use crate::ipc_protocol::{ServerOneshotSender, ServerResponse, RotationDirection};
use crate::radians::Radians;
use crate::{Distance, Point};
use super::HandlerError;
use super::super::{
event_loop_notifier::EventLoopNotifier,
state::TurtleState,
app::{TurtleId, App},
animation::{MoveAnimation, RotateAnimation, AnimationRunner},
renderer::display_list::DisplayList,
};
pub(crate) fn move_forward(
conn: ServerOneshotSender,
app: &mut App,
display_list: &mut DisplayList,
event_loop: &EventLoopNotifier,
anim_runner: &AnimationRunner,
id: TurtleId,
distance: Distance,
) -> Result<(), HandlerError> {
let turtle = app.turtle_mut(id);
let TurtleState {position, heading, ..} = turtle.state;
// The total amount we'll move in the x and y directions
let movement = Point {
x: distance * heading.cos(),
y: distance * heading.sin(),
};
let target_pos = position + movement;
let anim = MoveAnimation::new(turtle, display_list, target_pos);
if anim.is_running() {
anim_runner.play(id, anim, conn.client_id());
} else {
// Instant animations complete right away and don't need to be queued
// Signal the main thread that the image has changed
event_loop.request_redraw()?;
conn.send(ServerResponse::AnimationComplete(id))?;
}
Ok(())
}
pub(crate) fn move_to(
conn: ServerOneshotSender,
app: &mut App,
display_list: &mut DisplayList,
event_loop: &EventLoopNotifier,
anim_runner: &AnimationRunner,
id: TurtleId,
target_pos: Point,
) -> Result<(), HandlerError> {
let turtle = app.turtle_mut(id);
let anim = MoveAnimation::new(turtle, display_list, target_pos);
if anim.is_running() {
anim_runner.play(id, anim, conn.client_id());
} else {
// Instant animations complete right away and don't need to be queued
// Signal the main thread that the image has changed
event_loop.request_redraw()?;
conn.send(ServerResponse::AnimationComplete(id))?;
}
Ok(())
}
pub(crate) fn rotate_in_place(
conn: ServerOneshotSender,
app: &mut App,
event_loop: &EventLoopNotifier,
anim_runner: &AnimationRunner,
id: TurtleId,
angle: Radians,
direction: RotationDirection,
) -> Result<(), HandlerError> {
let turtle = app.turtle_mut(id);
let anim = RotateAnimation::new(turtle, angle, direction);
if anim.is_running() {
anim_runner.play(id, anim, conn.client_id());
} else {
// Instant animations complete right away and don't need to be queued<|fim▁hole|> // Signal the main thread that the image has changed
event_loop.request_redraw()?;
conn.send(ServerResponse::AnimationComplete(id))?;
}
Ok(())
}<|fim▁end|> | |
<|file_name|>chain_call_after.rs<|end_file_name|><|fim▁begin|>fn main() {<|fim▁hole|> <caret>
}<|fim▁end|> | let moo = foo().bar().baz() |
<|file_name|>mmu.rs<|end_file_name|><|fim▁begin|>//! Generic definitions related to the MMU
use core::ops::{Add, AddAssign, Deref};
use hal::arch::mmu::PageEntry;
use hal::arch::mmu::{
PAGE_SHIFT,
OFFSET_MASK,
L1_PAGE_SHIFT,
L2_PAGE_SHIFT,
L3_PAGE_SHIFT,
L4_PAGE_SHIFT
};
pub trait Address {
type Repr;
}
#[repr(C)]
#[derive(Clone, Copy, PartialEq, PartialOrd)]
#[cfg(feature = "xen")]
/// A physical address
pub struct Paddr(u64);
#[cfg(feature = "xen")]
impl Address for Paddr { type Repr = u64; }
#[repr(C)]
#[derive(Clone, Copy, PartialEq, PartialOrd)]
#[cfg(feature = "xen")]
/// A machine address
pub struct Maddr(u64);
#[cfg(feature = "xen")]
impl Address for Maddr { type Repr = u64; }
#[repr(C)]
#[derive(Clone, Copy, PartialEq, PartialOrd)]
#[cfg(not(feature = "xen"))]
/// A physical address
pub struct Paddr(usize);
#[cfg(not(feature = "xen"))]
impl Address for Paddr { type Repr = usize; }
#[repr(C)]
#[derive(Clone, Copy, PartialEq, PartialOrd)]
#[cfg(not(feature = "xen"))]
/// A machine address
pub struct Maddr(usize);
#[cfg(not(feature = "xen"))]
impl Address for Maddr { type Repr = usize; }
#[repr(C)]
#[derive(Clone, Copy, PartialEq, PartialOrd)]
/// A virtual address
pub struct Vaddr(usize);
impl Address for Vaddr { type Repr = usize; }
#[repr(C)]
#[derive(Clone, Copy)]
/// Physical frame number
pub struct Pfn(usize);
#[repr(C)]
#[derive(Clone, Copy)]
/// Machine frame number
pub struct Mfn(usize);
impl Vaddr {
/// Create a new virtual address
pub fn new(value: <Vaddr as Address>::Repr) -> Self {
Vaddr(value)
}
/// Create a new virtual address from a constant pointer
pub fn from_ptr<T>(ptr: *const T) -> Self {
Vaddr(ptr as usize)
}
/// Create a new virtual address from a mutable pointer
pub fn from_mut_ptr<T>(ptr: *mut T) -> Self {
Vaddr(ptr as usize)
}
/// Get the offset in the 4th layer of MMU configuration tables
///
/// Note: This value will be garbage if the table does not exist
pub fn l4_offset(&self) -> usize {
((self.0 >> L4_PAGE_SHIFT) & OFFSET_MASK) as usize
}
/// Get the offset in the 3rd layer of MMU configuration tables
///
/// Note: This value will be garbage if the table does not exist
pub fn l3_offset(&self) -> usize {
((self.0 >> L3_PAGE_SHIFT) & OFFSET_MASK) as usize
}
/// Get the offset in the 2nd layer of MMU configuration tables
pub fn l2_offset(&self) -> usize {
((self.0 >> L2_PAGE_SHIFT) & OFFSET_MASK) as usize
}
/// Get the offset in the 1st layer of MMU configuration tables
pub fn l1_offset(&self) -> usize {
((self.0 >> L1_PAGE_SHIFT) & OFFSET_MASK) as usize
}
/// Get the internal value as a constant rust pointer
///
/// This is unsafe as the internal value might not be a valid pointer
pub unsafe fn as_ptr<T>(&self) -> *const T {
self.0 as *const u8 as *const T
}
/// Get the internal value as a mutable rust pointer
///
/// This is unsafe as the internal value might not be a valid pointer
pub unsafe fn as_mut_ptr<T>(&mut self) -> *mut T {
self.0 as *mut u8 as *mut T
}
/// Increment the virtual address
///
/// `count` is the number of bytes you want to increment the pointed value
pub unsafe fn incr(mut self, count: usize) -> Self {
self.0 += count as <Self as Address>::Repr;
self
}
}
impl Paddr {
/// Create a new physical address
pub fn new(value: <Paddr as Address>::Repr) -> Self {
Paddr(value)
}
/// Increment the virtual address
///
/// `count` is the number of bytes you want to increment the pointed value
pub unsafe fn incr(mut self, count: usize) -> Self {
self.0 += count as <Self as Address>::Repr;
self
}
}
impl Maddr {
/// Create a new machine address
pub fn new(value: <Maddr as Address>::Repr) -> Self {
Maddr(value)
}
/// Increment the virtual address
///
/// `count` is the number of bytes you want to increment the pointed value
pub unsafe fn incr(mut self, count: usize) -> Self {
self.0 += count as <Self as Address>::Repr;
self
}
}
impl Pfn {
/// Create a new physical frame number
pub fn new(value: usize) -> Self {
Pfn(value)
}
}
impl Mfn {
/// Create a new machine frame number
pub fn new(value: usize) -> Self {
Mfn(value)
}
}
impl Add<usize> for Pfn {
type Output = Pfn;
fn add(mut self, rhs: usize) -> Self::Output {
self.0 += rhs;
self
}
}
impl AddAssign<usize> for Pfn {
fn add_assign(&mut self, rhs: usize) {
self.0 += rhs;
}
}
impl Add<usize> for Mfn {
type Output = Mfn;
fn add(mut self, rhs: usize) -> Self::Output {
self.0 += rhs;
self
}
}
impl AddAssign<usize> for Mfn {
fn add_assign(&mut self, rhs: usize) {
self.0 += rhs;
}
}
impl Deref for Vaddr {
type Target = <Self as Address>::Repr;
#[inline]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl Deref for Paddr {
type Target = <Self as Address>::Repr;
#[inline]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl Deref for Maddr {
type Target = <Self as Address>::Repr;
#[inline]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl Deref for Pfn {
type Target = usize;
#[inline]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl Deref for Mfn {
type Target = usize;
#[inline]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl From<Pfn> for Vaddr {
fn from(pfn: Pfn) -> Vaddr {
Vaddr(pfn.0 << PAGE_SHIFT)
}
}
impl From<Vaddr> for Pfn {
fn from(vaddr: Vaddr) -> Pfn {
Pfn(vaddr.0 >> PAGE_SHIFT)
}
}
impl From<Mfn> for Vaddr {
fn from(mfn: Mfn) -> Vaddr {
Vaddr::from(Pfn::from(mfn))
}
}
impl From<Mfn> for Maddr {
fn from(mfn: Mfn) -> Maddr {
Maddr((mfn.0 as <Self as Address>::Repr) << PAGE_SHIFT)
}
}
impl From<Vaddr> for Mfn {
fn from(vaddr: Vaddr) -> Mfn {
Mfn::from(Pfn::from(vaddr))
}
}
impl From<PageEntry> for Vaddr {
fn from(entry: PageEntry) -> Vaddr {
Vaddr::from(Mfn::from(entry))
}
}
#[cfg(feature = "xen")]
impl From<PageEntry> for Mfn {
fn from(entry: PageEntry) -> Mfn {
use hal::arch::mmu::PTE_MASK;
let page_entry_value = entry.mask(PTE_MASK).value() >> PAGE_SHIFT;
Mfn(PageEntry::new(page_entry_value).value() as usize)
}
}
#[cfg(feature = "xen")]
impl From<Maddr> for PageEntry {
fn from(maddr: Maddr) -> PageEntry {
use hal::arch::mmu::PTE_FLAGS_MASK;
use hal::arch::mmu::PageFlags;
let value = (maddr.0 as u64) & !PTE_FLAGS_MASK;
PageEntry::new(value).set(PageFlags::Present).set(PageFlags::Writable)
}
}
#[cfg(feature = "xen")]
impl From<Mfn> for PageEntry {
fn from(mfn: Mfn) -> PageEntry {
PageEntry::from(Maddr::from(mfn))
}
}
#[cfg(feature = "xen")]
impl From<Pfn> for PageEntry {
fn from(pfn: Pfn) -> PageEntry {
PageEntry::from(Mfn::from(pfn))
}
}
#[cfg(feature = "xen")]
impl From<Mfn> for Pfn {
fn from(mfn: Mfn) -> Pfn {
use hal::xen::defs::MACH2PHYS_VIRT_START;
let mtp_mapping: *const Pfn = MACH2PHYS_VIRT_START as *const Pfn;
unsafe {
*mtp_mapping.offset(mfn.0 as isize)
}
}
}
#[cfg(feature = "xen")]
impl From<Pfn> for Mfn {<|fim▁hole|> // XXX: Won't work for ARM
use hal::xen::boot::start_info;
unsafe {
let ptm_mapping: *const Mfn = (*start_info).mfn_list as *const Mfn;
*ptm_mapping.offset(pfn.0 as isize)
}
}
}<|fim▁end|> | fn from(pfn: Pfn) -> Mfn { |
<|file_name|>utilities.js<|end_file_name|><|fim▁begin|>/**
* Escapes a string for SQL insertion
*/
exports.escapeString = function(_string) {
if(typeof _string !== "string") {
return "\"" + _string + "\"";
}
return "\"" + _string.replace(/"/g, "'") + "\"";
};
/**
* Removes HTML entities, replaces breaks/paragraphs with newline, strips HTML, trims
*/
exports.cleanString = function(_string) {
if(typeof _string !== "string") {
return _string;
}
_string = _string.replace(/&*/ig, "&");
_string = exports.htmlDecode(_string);
_string = _string.replace(/\s*<br[^>]*>\s*/ig, "\n");
_string = _string.replace(/\s*<\/p>*\s*/ig, "\n\n");
_string = _string.replace(/<[^>]*>/g, "");
_string = _string.replace(/\s*\n{3,}\s*/g, "\n\n");
_string = _string.replace(/[^\S\n]{2,}/g, " ");
_string = _string.replace(/\n[^\S\n]*/g, "\n");
_string = _string.replace(/^\s+|\s+$/g, "");
return _string;
};
/**
* Combination of clean and escape string
*/
exports.cleanEscapeString = function(_string) {
_string = exports.cleanString(_string);
return exports.escapeString(_string);
};
/**
* Cleans up nasty XML
*/
exports.xmlNormalize = function(_string) {
_string = _string.replace(/ */ig, " ");
_string = _string.replace(/&(?!amp;)\s*/g, "&");
_string = _string.replace(/^\s+|\s+$/g, "");
_string = _string.replace(/<title>(?!<!\[CDATA\[)/ig, "<title><![CDATA[");
_string = _string.replace(/<description>(?!<!\[CDATA\[)/ig, "<description><![CDATA[");
_string = _string.replace(/(\]\]>)?<\/title>/ig, "]]></title>");
_string = _string.replace(/(\]\]>)?<\/description>/ig, "]]></description>");
return _string;
};
/**
* Decodes HTML entities
*/
exports.htmlDecode = function(_string) {
var tmp_str = _string.toString();
var hash_map = exports.htmlTranslationTable();
var results = tmp_str.match(/&#\d*;/ig);
if(results) {
for(var i = 0, x = results.length; i < x; i++) {
var code = parseInt(results[i].replace("&#", "").replace(";", ""), 10);
hash_map[results[i]] = code;
}
}
for(var entity in hash_map) {
var symbol = String.fromCharCode(hash_map[entity]);
tmp_str = tmp_str.split(entity).join(symbol);
}
return tmp_str;
};
exports.htmlTranslationTable = function() {
var entities = {
"–": "8211",
"—": "8212",
"‘": "8216",
"’": "8217",
"&": "38",
"„": "8222",
"•": "8226",
"ˆ": "710",
"†": "8224",
"‡": "8225",
"ƒ": "402",
"…": "8230",
"“": "8220",
"‹": "8249",
"‘": "8216",
"—": "8212",
"–": "8211",
"Œ": "338",
"œ": "339",
"‰": "8240",
"”": "8221",
"›": "8250",
"’": "8217",
"‚": "8218",
"š": "353",
"Š": "352",
"˜": "152",
"™": "8482",
"Ÿ": "376",
"Ì": "204",
"ì": "236",
"Ι": "921",
"ι": "953",
"Ï": "207",
"ï": "239",
"←": "8592",
"⇐": "8656",
"Á": "193",
"á": "225",
"Â": "194",
"â": "226",
"´": "180",
"Æ": "198",
"æ": "230",
"À": "192",
"à": "224",
"ℵ": "8501",
"Α": "913",
"α": "945",
"∧": "8743",
"∠": "8736",
"Å": "197",
"å": "229",
"≈": "8776",
"Ã": "195",
"ã": "227",
"Ä": "196",
"ä": "228",
"Β": "914",
"β": "946",
"¦": "166",
"∩": "8745",
"Ç": "199",
"ç": "231",
"¸": "184",
"¢": "162",
"Χ": "935",
"χ": "967",
"♣": "9827",
"≅": "8773",
"©": "169",
"↵": "8629",
"∪": "8746",
"¤": "164",
"↓": "8595",
"⇓": "8659",
"°": "176",
"Δ": "916",
"δ": "948",
"♦": "9830",
"÷": "247",
"É": "201",
"é": "233",
"Ê": "202",
"ê": "234",
"È": "200",
"è": "232",
"∅": "8709",
" ": "8195",
" ": "8194",
"Ε": "917",
"ε": "949",
"≡": "8801",
"Η": "919",
"η": "951",
"Ð": "208",
"ð": "240",
"Ë": "203",
"ë": "235",
"€": "8364",
"∃": "8707",
"∀": "8704",
"½": "189",
"¼": "188",
"¾": "190",
"⁄": "8260",
"Γ": "915",
"γ": "947",
"≥": "8805",
"↔": "8596",
"⇔": "8660",
"♥": "9829",
"Í": "205",
"í": "237",
"Î": "206",
"î": "238",
"¡": "161",
"ℑ": "8465",
"∞": "8734",
"∫": "8747",
"¿": "191",
"∈": "8712",
"Κ": "922",
"κ": "954",
"Λ": "923",
"λ": "955",
"⟨": "9001",
"«": "171",
"⌈": "8968",
"≤": "8804",
"⌊": "8970",
"∗": "8727",
"◊": "9674",
"‎": "8206",
"¯": "175",
"µ": "181",
"·": "183",
"−": "8722",
"Μ": "924",
"μ": "956",
"∇": "8711",
" ": "160",
"≠": "8800",
"∋": "8715",
"¬": "172",
"∉": "8713",
"⊄": "8836",
"Ñ": "209",
"ñ": "241",
"Ν": "925",
"ν": "957",
"Ó": "211",
"ó": "243",
"Ô": "212",
"ô": "244",
"Ò": "210",
"ò": "242",
"‾": "8254",
"Ω": "937",
"ω": "969",
"Ο": "927",
"ο": "959",
"⊕": "8853",
"∨": "8744",
"ª": "170",
"º": "186",
"Ø": "216",
"ø": "248",
"Õ": "213",
"õ": "245",
"⊗": "8855",
"Ö": "214",
"ö": "246",
"¶": "182",
"∂": "8706",
"⊥": "8869",
"Φ": "934",
"φ": "966",
"Π": "928",
"π": "960",
"ϖ": "982",
"±": "177",
"£": "163",
"′": "8242",
"″": "8243",
"∏": "8719",
"∝": "8733",
"Ψ": "936",
"ψ": "968",
"√": "8730",
"⟩": "9002",
"»": "187",
"→": "8594",
"⇒": "8658",
"⌉": "8969",
"ℜ": "8476",
"®": "174",
"⌋": "8971",
"Ρ": "929",
"ρ": "961",
"‏": "8207",
"⋅": "8901",
"§": "167",
"­": "173",
"Σ": "931",
"σ": "963",
"ς": "962",
"∼": "8764",
"♠": "9824",
"⊂": "8834",
"⊆": "8838",
"∑": "8721",
"⊃": "8835",
"¹": "185",
"²": "178",
"³": "179",
"⊇": "8839",
"ß": "223",
"Τ": "932",
"τ": "964",
"∴": "8756",
"Θ": "920",
"θ": "952",
"ϑ": "977",
" ": "8201",
"Þ": "222",
"þ": "254",
"˜": "732",
"×": "215",
"Ú": "218",
"ú": "250",
"↑": "8593",
"⇑": "8657",
"Û": "219",
"û": "251",
"Ù": "217",
"ù": "249",
"¨": "168",
"ϒ": "978",
"Υ": "933",
"υ": "965",
"Ü": "220",
"ü": "252",
"℘": "8472",
"
": "10",
"
": "13",
"Ξ": "926",
"ξ": "958",
"Ý": "221",
"ý": "253",
"¥": "165",
"ÿ": "255",
"Ζ": "918",
"ζ": "950",
"‍": "8205",
"‌": "8204",
""": "34",
"<": "60",
">": "62"
};
return entities;
};
/**
* Adds thousands separators to a number
*/
exports.formatNumber = function(_number) {
_number = _number + "";
x = _number.split(".");
x1 = x[0];
x2 = x.length > 1 ? "." + x[1] : "";
var expression = /(\d+)(\d{3})/;
while (expression.test(x1)) {
x1 = x1.replace(expression, "$1" + "," + "$2");
}
return x1 + x2;
};
/**
* Converts a date to absolute time (e.g. "May 2, 2011 12:00PM")
*/
exports.toDateAbsolute = function(_date) {
var date = new Date();
date.setTime(_date);
var dateHour = date.getHours() > 11 ? date.getHours() - 12 : date.getHours();
dateHour = dateHour == 0 ? 12 : dateHour;
var dateMinutes = date.getMinutes() < 10 ? "0" + date.getMinutes() : date.getMinutes();
var datePeriod = date.getHours() > 12 ? "PM" : "AM";
var months = [ "January", "February", "March", "April", "May", "June", "July", "August", "September", "October", "November", "December" ];
return months[date.getMonth()] + " " + date.getDate() + ", " + date.getFullYear() + " " + dateHour + ":" + dateMinutes + datePeriod;
};
/**
* Converts a date to relative time (e.g. "Yesterday 12:01PM")
*/
exports.toDateRelative = function(_date) {
var date = new Date();
date.setTime(_date);
var now = new Date();
var days = [ "Sunday", "Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday" ];
var dateMonth = date.getMonth();
var dateDate = date.getDate();
var dateDay = days[date.getDay()];
var dateYear = date.getFullYear();
var dateHour = date.getHours() > 11 ? date.getHours() - 12 : date.getHours();
dateHour = dateHour == 0 ? 12 : dateHour;
var dateMinutes = date.getMinutes() < 10 ? "0" + date.getMinutes() : date.getMinutes();
var datePeriod = date.getHours() > 12 ? "PM" : "AM";
var nowMonth = now.getMonth();
var nowDate = now.getDate();
var nowYear = now.getFullYear();
if(dateYear == nowYear && dateMonth == nowMonth) {
if(dateDate == nowDate) {
return "Today " + dateHour + ":" + dateMinutes + datePeriod;
} else if(dateDate >= nowDate - 1) {
return "Yesterday " + dateHour + ":" + dateMinutes + datePeriod;
} else if(dateDate >= nowDate - 6) {
return dateDay + " " + dateHour + ":" + dateMinutes + datePeriod;
} else {
return exports.toDateAbsolute(_date);
}
} else {
return exports.toDateAbsolute(_date);
}
};
/**
* Universal error callback function
*
*/
exports.onErrorCallback = function(e) {
Ti.API.info('error: '+e.data.error);
Ti.UI.createAlertDialog({
message: "Can't load data. Check network connection? Error message: "+e.error,
ok: 'OK',
title: 'Error'
}).show();
}
/*
* Attempt to extract 1st image URL, height, width properties from an HTML string
*
*/
exports.parseImageProperties = function(html){
// extract imageURL
var imageURL = exports.htmlDecode(html).match(/src=(.+?")/)[1] || '';
if (imageURL) {
imageURL = imageURL.replace(/\"/g,"");
var width = exports.htmlDecode(html).match(/width=(.+?")/)[1] || '';
width = width.replace(/\"/g, "") || '';
var height = exports.htmlDecode(html).match(/height=(.+?")/)[1] || '';
height = height.replace(/\"/g, "") || '';
//Ti.API.info('image url:' + imageURL);
//Ti.API.info('thumb: '+"http://events.jftc.nato.int/sites/default/files/imagecache/thumbnail/"+parseImageName(imageURL))
return {
"url": imageURL,
"width": width,
"height": height,<|fim▁hole|> // "thumbnail":"http://events.jftc.nato.int/sites/default/files/imagecache/thumbnail/"+parseImageName(imageURL),
};
}
else {
return false;
}
}
/**
* Strip HTML tags
* Note: regex is not prefered method, I know.
*/
exports.stripTags = function(str, allowed_tags) {
// http://kevin.vanzonneveld.net
// + original by: Kevin van Zonneveld (http://kevin.vanzonneveld.net)
// + improved by: Luke Godfrey
// + input by: Pul
// + bugfixed by: Kevin van Zonneveld (http://kevin.vanzonneveld.net)
// + bugfixed by: Onno Marsman
// + input by: Alex
// + bugfixed by: Kevin van Zonneveld (http://kevin.vanzonneveld.net)
// + input by: Marc Palau
// + improved by: Kevin van Zonneveld (http://kevin.vanzonneveld.net)
// + input by: Brett Zamir (http://brett-zamir.me)
// + bugfixed by: Kevin van Zonneveld (http://kevin.vanzonneveld.net)
// + bugfixed by: Eric Nagel
// + input by: Bobby Drake
// + bugfixed by: Kevin van Zonneveld (http://kevin.vanzonneveld.net)
// + bugfixed by: Tomasz Wesolowski
// fixed Titanium warning by: Kosso
// * example 1: strip_tags('<p>Kevin</p> <br /><b>van</b> <i>Zonneveld</i>', '<i><b>');
// * returns 1: 'Kevin <b>van</b> <i>Zonneveld</i>'
// * example 2: strip_tags('<p>Kevin <img src="someimage.png" onmouseover="someFunction()">van <i>Zonneveld</i></p>', '<p>');
// * returns 2: '<p>Kevin van Zonneveld</p>'
// * example 3: strip_tags("<a href='http://kevin.vanzonneveld.net'>Kevin van Zonneveld</a>", "<a>");
// * returns 3: '<a href='http://kevin.vanzonneveld.net'>Kevin van Zonneveld</a>'
// * example 4: strip_tags('1 < 5 5 > 1');
// * returns 4: '1 < 5 5 > 1'
var key = '', allowed = false;
var matches = [];
var allowed_array = [];
var allowed_tag = '';
var i = 0;
var k = '';
var html = '';
var replacer = function (search, replace, str) {
return str.split(search).join(replace);
};
// Build allowes tags associative array
if (allowed_tags) {
allowed_array = allowed_tags.match(/([a-zA-Z0-9]+)/gi);
}
str += '';
// Match tags
matches = str.match(/(<\/?[\S][^>]*>)/gi);
// Go through all HTML tags
for (key in matches) {
if(key){
// Save HTML tag
html = matches[key].toString();
// Is tag not in allowed list? Remove from str!
allowed = false;
// Go through all allowed tags
for (k in allowed_array) {
if(k){
// Init
allowed_tag = allowed_array[k];
i = -1;
if (i != 0) { i = html.toLowerCase().indexOf('<'+allowed_tag+'>');}
if (i != 0) { i = html.toLowerCase().indexOf('<'+allowed_tag+' ');}
if (i != 0) { i = html.toLowerCase().indexOf('</'+allowed_tag) ;}
// Determine
if (i == 0) {
allowed = true;
break;
}
}
}
if (!allowed) {
str = replacer(html, "", str); // Custom replace. No regexing
}
}
}
return str;
}
/**
* Just a helper app to fire up /lib/MiniBrowser...
*/
exports.miniBrowser = function(e){
//console.info(JSON.stringify(e));
var TiMiniBrowser = require("MiniBrowser/TiMiniBrowser");
var browser = new TiMiniBrowser({
url: e.link,
barColor: "#178cce"
});
browser.open();
}<|fim▁end|> | |
<|file_name|>bdist_egg.py<|end_file_name|><|fim▁begin|>"""setuptools.command.bdist_egg
Build .egg distributions"""
# This module should be kept compatible with Python 2.3
import sys, os, marshal
from setuptools import Command
from distutils.dir_util import remove_tree, mkpath
try:
from distutils.sysconfig import get_python_version, get_python_lib
except ImportError:
from sysconfig import get_python_version
from distutils.sysconfig import get_python_lib
from distutils import log
from distutils.errors import DistutilsSetupError
from pkg_resources import get_build_platform, Distribution, ensure_directory
from pkg_resources import EntryPoint
from types import CodeType
from setuptools.extension import Library
def strip_module(filename):
if '.' in filename:
filename = os.path.splitext(filename)[0]
if filename.endswith('module'):
filename = filename[:-6]
return filename
def write_stub(resource, pyfile):
f = open(pyfile,'w')
f.write('\n'.join([
"def __bootstrap__():",
" global __bootstrap__, __loader__, __file__",
" import sys, pkg_resources, imp",
" __file__ = pkg_resources.resource_filename(__name__,%r)"
% resource,
" __loader__ = None; del __bootstrap__, __loader__",
" imp.load_dynamic(__name__,__file__)",
"__bootstrap__()",
"" # terminal \n
]))
f.close()
# stub __init__.py for packages distributed without one
NS_PKG_STUB = '__import__("pkg_resources").declare_namespace(__name__)'
class bdist_egg(Command):
description = "create an \"egg\" distribution"
user_options = [
('bdist-dir=', 'b',
"temporary directory for creating the distribution"),
('plat-name=', 'p',
"platform name to embed in generated filenames "
"(default: %s)" % get_build_platform()),
('exclude-source-files', None,
"remove all .py files from the generated egg"),
('keep-temp', 'k',
"keep the pseudo-installation tree around after " +
"creating the distribution archive"),
('dist-dir=', 'd',
"directory to put final built distributions in"),
('skip-build', None,
"skip rebuilding everything (for testing/debugging)"),
]
boolean_options = [
'keep-temp', 'skip-build', 'exclude-source-files'
]
def initialize_options (self):
self.bdist_dir = None
self.plat_name = None
self.keep_temp = 0
self.dist_dir = None
self.skip_build = 0
self.egg_output = None
self.exclude_source_files = None
def finalize_options(self):
ei_cmd = self.ei_cmd = self.get_finalized_command("egg_info")
self.egg_info = ei_cmd.egg_info
if self.bdist_dir is None:
bdist_base = self.get_finalized_command('bdist').bdist_base
self.bdist_dir = os.path.join(bdist_base, 'egg')
if self.plat_name is None:
self.plat_name = get_build_platform()
self.set_undefined_options('bdist',('dist_dir', 'dist_dir'))
if self.egg_output is None:
# Compute filename of the output egg
basename = Distribution(
None, None, ei_cmd.egg_name, ei_cmd.egg_version,
get_python_version(),
self.distribution.has_ext_modules() and self.plat_name
).egg_name()
self.egg_output = os.path.join(self.dist_dir, basename+'.egg')
def do_install_data(self):
# Hack for packages that install data to install's --install-lib
self.get_finalized_command('install').install_lib = self.bdist_dir
site_packages = os.path.normcase(os.path.realpath(get_python_lib()))
old, self.distribution.data_files = self.distribution.data_files,[]
for item in old:
if isinstance(item,tuple) and len(item)==2:
if os.path.isabs(item[0]):
realpath = os.path.realpath(item[0])
normalized = os.path.normcase(realpath)
if normalized==site_packages or normalized.startswith(
site_packages+os.sep
):
item = realpath[len(site_packages)+1:], item[1]
# XXX else: raise ???
self.distribution.data_files.append(item)
try:
log.info("installing package data to %s" % self.bdist_dir)
self.call_command('install_data', force=0, root=None)
finally:
self.distribution.data_files = old
def get_outputs(self):
return [self.egg_output]
def call_command(self,cmdname,**kw):
"""Invoke reinitialized command `cmdname` with keyword args"""
for dirname in INSTALL_DIRECTORY_ATTRS:
kw.setdefault(dirname,self.bdist_dir)
kw.setdefault('skip_build',self.skip_build)
kw.setdefault('dry_run', self.dry_run)
cmd = self.reinitialize_command(cmdname, **kw)
self.run_command(cmdname)
return cmd
def run(self):
# Generate metadata first
self.run_command("egg_info")
# We run install_lib before install_data, because some data hacks
# pull their data path from the install_lib command.
log.info("installing library code to %s" % self.bdist_dir)
instcmd = self.get_finalized_command('install')
old_root = instcmd.root; instcmd.root = None
cmd = self.call_command('install_lib', warn_dir=0)
instcmd.root = old_root
all_outputs, ext_outputs = self.get_ext_outputs()
self.stubs = []
to_compile = []
for (p,ext_name) in enumerate(ext_outputs):
filename,ext = os.path.splitext(ext_name)
pyfile = os.path.join(self.bdist_dir, strip_module(filename)+'.py')
self.stubs.append(pyfile)
log.info("creating stub loader for %s" % ext_name)
if not self.dry_run:
write_stub(os.path.basename(ext_name), pyfile)
to_compile.append(pyfile)
ext_outputs[p] = ext_name.replace(os.sep,'/')
to_compile.extend(self.make_init_files())
if to_compile:
cmd.byte_compile(to_compile)
if self.distribution.data_files:
self.do_install_data()
# Make the EGG-INFO directory
archive_root = self.bdist_dir
egg_info = os.path.join(archive_root,'EGG-INFO')
self.mkpath(egg_info)
if self.distribution.scripts:
script_dir = os.path.join(egg_info, 'scripts')
log.info("installing scripts to %s" % script_dir)
self.call_command('install_scripts',install_dir=script_dir,no_ep=1)
self.copy_metadata_to(egg_info)
native_libs = os.path.join(egg_info, "native_libs.txt")
if all_outputs:
log.info("writing %s" % native_libs)
if not self.dry_run:
ensure_directory(native_libs)
libs_file = open(native_libs, 'wt')
libs_file.write('\n'.join(all_outputs))
libs_file.write('\n')
libs_file.close()
elif os.path.isfile(native_libs):
log.info("removing %s" % native_libs)
if not self.dry_run:
os.unlink(native_libs)
write_safety_flag(
os.path.join(archive_root,'EGG-INFO'), self.zip_safe()
)
if os.path.exists(os.path.join(self.egg_info,'depends.txt')):
log.warn(
"WARNING: 'depends.txt' will not be used by setuptools 0.6!\n"
"Use the install_requires/extras_require setup() args instead."
)
if self.exclude_source_files:
self.zap_pyfiles()
# Make the archive
make_zipfile(self.egg_output, archive_root, verbose=self.verbose,
dry_run=self.dry_run, mode=self.gen_header())
if not self.keep_temp:
remove_tree(self.bdist_dir, dry_run=self.dry_run)
# Add to 'Distribution.dist_files' so that the "upload" command works
getattr(self.distribution,'dist_files',[]).append(
('bdist_egg',get_python_version(),self.egg_output))
def zap_pyfiles(self):
log.info("Removing .py files from temporary directory")
for base,dirs,files in walk_egg(self.bdist_dir):
for name in files:
if name.endswith('.py'):
path = os.path.join(base,name)
log.debug("Deleting %s", path)
os.unlink(path)
def zip_safe(self):
safe = getattr(self.distribution,'zip_safe',None)
if safe is not None:
return safe
log.warn("zip_safe flag not set; analyzing archive contents...")
return analyze_egg(self.bdist_dir, self.stubs)
def make_init_files(self):
"""Create missing package __init__ files"""
init_files = []
for base,dirs,files in walk_egg(self.bdist_dir):
if base==self.bdist_dir:
# don't put an __init__ in the root
continue
for name in files:
if name.endswith('.py'):
if '__init__.py' not in files:
pkg = base[len(self.bdist_dir)+1:].replace(os.sep,'.')
if self.distribution.has_contents_for(pkg):
log.warn("Creating missing __init__.py for %s",pkg)
filename = os.path.join(base,'__init__.py')
if not self.dry_run:
f = open(filename,'w'); f.write(NS_PKG_STUB)
f.close()
init_files.append(filename)
break
else:
# not a package, don't traverse to subdirectories
dirs[:] = []
return init_files
def gen_header(self):
epm = EntryPoint.parse_map(self.distribution.entry_points or '')
ep = epm.get('setuptools.installation',{}).get('eggsecutable')
if ep is None:
return 'w' # not an eggsecutable, do it the usual way.
if not ep.attrs or ep.extras:
raise DistutilsSetupError(
"eggsecutable entry point (%r) cannot have 'extras' "
"or refer to a module" % (ep,)
)
pyver = sys.version[:3]
pkg = ep.module_name
full = '.'.join(ep.attrs)
base = ep.attrs[0]
basename = os.path.basename(self.egg_output)
header = (
"#!/bin/sh\n"
'if [ `basename $0` = "%(basename)s" ]\n'
'then exec python%(pyver)s -c "'
"import sys, os; sys.path.insert(0, os.path.abspath('$0')); "
"from %(pkg)s import %(base)s; sys.exit(%(full)s())"
'" "$@"\n'
'else\n'
' echo $0 is not the correct name for this egg file.\n'
' echo Please rename it back to %(basename)s and try again.\n'
' exec false\n'
'fi\n'
) % locals()
if not self.dry_run:
mkpath(os.path.dirname(self.egg_output), dry_run=self.dry_run)
f = open(self.egg_output, 'w')
f.write(header)
f.close()
return 'a'
def copy_metadata_to(self, target_dir):
"Copy metadata (egg info) to the target_dir"
# normalize the path (so that a forward-slash in egg_info will
# match using startswith below)
norm_egg_info = os.path.normpath(self.egg_info)
prefix = os.path.join(norm_egg_info,'')
for path in self.ei_cmd.filelist.files:
if path.startswith(prefix):
target = os.path.join(target_dir, path[len(prefix):])
ensure_directory(target)
self.copy_file(path, target)
def get_ext_outputs(self):
"""Get a list of relative paths to C extensions in the output distro"""
all_outputs = []
ext_outputs = []
paths = {self.bdist_dir:''}
for base, dirs, files in os.walk(self.bdist_dir):
for filename in files:
if os.path.splitext(filename)[1].lower() in NATIVE_EXTENSIONS:
all_outputs.append(paths[base]+filename)
for filename in dirs:
paths[os.path.join(base,filename)] = paths[base]+filename+'/'
if self.distribution.has_ext_modules():
build_cmd = self.get_finalized_command('build_ext')
for ext in build_cmd.extensions:
if isinstance(ext,Library):
continue
fullname = build_cmd.get_ext_fullname(ext.name)
filename = build_cmd.get_ext_filename(fullname)
if not os.path.basename(filename).startswith('dl-'):
if os.path.exists(os.path.join(self.bdist_dir,filename)):
ext_outputs.append(filename)
return all_outputs, ext_outputs
NATIVE_EXTENSIONS = dict.fromkeys('.dll .so .dylib .pyd'.split())
def walk_egg(egg_dir):
"""Walk an unpacked egg's contents, skipping the metadata directory"""
walker = os.walk(egg_dir)
base,dirs,files = walker.next()
if 'EGG-INFO' in dirs:
dirs.remove('EGG-INFO')
yield base,dirs,files
for bdf in walker:
yield bdf
def analyze_egg(egg_dir, stubs):
# check for existing flag in EGG-INFO
for flag,fn in safety_flags.items():
if os.path.exists(os.path.join(egg_dir,'EGG-INFO',fn)):
return flag
if not can_scan(): return False
safe = True
for base, dirs, files in walk_egg(egg_dir):
for name in files:
if name.endswith('.py') or name.endswith('.pyw'):
continue
elif name.endswith('.pyc') or name.endswith('.pyo'):
# always scan, even if we already know we're not safe
safe = scan_module(egg_dir, base, name, stubs) and safe
return safe
def write_safety_flag(egg_dir, safe):
# Write or remove zip safety flag file(s)
for flag,fn in safety_flags.items():
fn = os.path.join(egg_dir, fn)
if os.path.exists(fn):
if safe is None or bool(safe)<>flag:
os.unlink(fn)
elif safe is not None and bool(safe)==flag:
f=open(fn,'wt'); f.write('\n'); f.close()
safety_flags = {
True: 'zip-safe',
False: 'not-zip-safe',
}
def scan_module(egg_dir, base, name, stubs):
"""Check whether module possibly uses unsafe-for-zipfile stuff"""
filename = os.path.join(base,name)
if filename[:-1] in stubs:
return True # Extension module
pkg = base[len(egg_dir)+1:].replace(os.sep,'.')
module = pkg+(pkg and '.' or '')+os.path.splitext(name)[0]
f = open(filename,'rb'); f.read(8) # skip magic & date
try:
code = marshal.load(f); f.close()
except ValueError:
f.seek(0); f.read(12) # skip magic & date & file size; file size added in Python 3.3
code = marshal.load(f); f.close()
safe = True
symbols = dict.fromkeys(iter_symbols(code))
for bad in ['__file__', '__path__']:
if bad in symbols:
log.warn("%s: module references %s", module, bad)
safe = False
if 'inspect' in symbols:
for bad in [
'getsource', 'getabsfile', 'getsourcefile', 'getfile'
'getsourcelines', 'findsource', 'getcomments', 'getframeinfo',
'getinnerframes', 'getouterframes', 'stack', 'trace'
]:
if bad in symbols:
log.warn("%s: module MAY be using inspect.%s", module, bad)
safe = False
if '__name__' in symbols and '__main__' in symbols and '.' not in module:<|fim▁hole|> if sys.version[:3]=="2.4": # -m works w/zipfiles in 2.5
log.warn("%s: top-level module may be 'python -m' script", module)
safe = False
return safe
def iter_symbols(code):
"""Yield names and strings used by `code` and its nested code objects"""
for name in code.co_names: yield name
for const in code.co_consts:
if isinstance(const,basestring):
yield const
elif isinstance(const,CodeType):
for name in iter_symbols(const):
yield name
def can_scan():
if sys.version_info > (3, 3):
return False # Can't scan recent formats
if not sys.platform.startswith('java') and sys.platform != 'cli':
# CPython, PyPy, etc.
return True
log.warn("Unable to analyze compiled code on this platform.")
log.warn("Please ask the author to include a 'zip_safe'"
" setting (either True or False) in the package's setup.py")
# Attribute names of options for commands that might need to be convinced to
# install to the egg build directory
INSTALL_DIRECTORY_ATTRS = [
'install_lib', 'install_dir', 'install_data', 'install_base'
]
def make_zipfile(zip_filename, base_dir, verbose=0, dry_run=0, compress=None,
mode='w'
):
"""Create a zip file from all the files under 'base_dir'. The output
zip file will be named 'base_dir' + ".zip". Uses either the "zipfile"
Python module (if available) or the InfoZIP "zip" utility (if installed
and found on the default search path). If neither tool is available,
raises DistutilsExecError. Returns the name of the output zip file.
"""
import zipfile
mkpath(os.path.dirname(zip_filename), dry_run=dry_run)
log.info("creating '%s' and adding '%s' to it", zip_filename, base_dir)
def visit(z, dirname, names):
for name in names:
path = os.path.normpath(os.path.join(dirname, name))
if os.path.isfile(path):
p = path[len(base_dir)+1:]
if not dry_run:
z.write(path, p)
log.debug("adding '%s'" % p)
if compress is None:
compress = (sys.version>="2.4") # avoid 2.3 zipimport bug when 64 bits
compression = [zipfile.ZIP_STORED, zipfile.ZIP_DEFLATED][bool(compress)]
if not dry_run:
z = zipfile.ZipFile(zip_filename, mode, compression=compression)
for dirname, dirs, files in os.walk(base_dir):
visit(z, dirname, files)
z.close()
else:
for dirname, dirs, files in os.walk(base_dir):
visit(None, dirname, files)
return zip_filename
#<|fim▁end|> | |
<|file_name|>forms.py<|end_file_name|><|fim▁begin|># Copyright (c) 2016-2017, NVIDIA CORPORATION. All rights reserved.
from __future__ import absolute_import
import os
from flask.ext.wtf import Form
from wtforms import validators
from digits import utils
from digits.utils import subclass
from digits.utils.forms import validate_required_iff
@subclass
class DatasetForm(Form):
"""
A form used to create an image processing dataset
"""
def validate_folder_path(form, field):
if not field.data:
pass
else:
# make sure the filesystem path exists
if not os.path.exists(field.data) or not os.path.isdir(field.data):
raise validators.ValidationError(
'Folder does not exist or is not reachable')
else:
return True
feature_folder = utils.forms.StringField(
u'Feature image folder',
validators=[
validators.DataRequired(),
validate_folder_path,
],
tooltip="Indicate a folder full of images."
)
label_folder = utils.forms.StringField(
u'Label image folder',
validators=[
validators.DataRequired(),
validate_folder_path,
],<|fim▁hole|> " for the extension, which may differ."
)
folder_pct_val = utils.forms.IntegerField(
u'% for validation',
default=10,
validators=[
validators.NumberRange(min=0, max=100)
],
tooltip="You can choose to set apart a certain percentage of images "
"from the training images for the validation set."
)
has_val_folder = utils.forms.BooleanField('Separate validation images',
default=False,
)
validation_feature_folder = utils.forms.StringField(
u'Validation feature image folder',
validators=[
validate_required_iff(has_val_folder=True),
validate_folder_path,
],
tooltip="Indicate a folder full of images."
)
validation_label_folder = utils.forms.StringField(
u'Validation label image folder',
validators=[
validate_required_iff(has_val_folder=True),
validate_folder_path,
],
tooltip="Indicate a folder full of images. For each image in the feature"
" image folder there must be one corresponding image in the label"
" image folder. The label image must have the same filename except"
" for the extension, which may differ."
)
channel_conversion = utils.forms.SelectField(
'Channel conversion',
choices=[
('RGB', 'RGB'),
('L', 'Grayscale'),
('none', 'None'),
],
default='none',
tooltip="Perform selected channel conversion."
)<|fim▁end|> | tooltip="Indicate a folder full of images. For each image in the feature"
" image folder there must be one corresponding image in the label"
" image folder. The label image must have the same filename except" |
<|file_name|>orf.py<|end_file_name|><|fim▁begin|># coding: utf-8
from __future__ import unicode_literals
import re
import calendar
import datetime
from .common import InfoExtractor
from ..compat import compat_str
from ..utils import (
HEADRequest,
unified_strdate,
strip_jsonp,
int_or_none,
float_or_none,
determine_ext,
remove_end,
unescapeHTML,
)
class ORFTVthekIE(InfoExtractor):
IE_NAME = 'orf:tvthek'
IE_DESC = 'ORF TVthek'
_VALID_URL = r'https?://tvthek\.orf\.at/(?:[^/]+/)+(?P<id>\d+)'
_TESTS = [{
'url': 'http://tvthek.orf.at/program/Aufgetischt/2745173/Aufgetischt-Mit-der-Steirischen-Tafelrunde/8891389',
'playlist': [{
'md5': '2942210346ed779588f428a92db88712',
'info_dict': {
'id': '8896777',
'ext': 'mp4',
'title': 'Aufgetischt: Mit der Steirischen Tafelrunde',
'description': 'md5:c1272f0245537812d4e36419c207b67d',
'duration': 2668,
'upload_date': '20141208',
},
}],
'skip': 'Blocked outside of Austria / Germany',
}, {
'url': 'http://tvthek.orf.at/topic/Im-Wandel-der-Zeit/8002126/Best-of-Ingrid-Thurnher/7982256',
'info_dict': {
'id': '7982259',
'ext': 'mp4',
'title': 'Best of Ingrid Thurnher',
'upload_date': '20140527',
'description': 'Viele Jahre war Ingrid Thurnher das "Gesicht" der ZIB 2. Vor ihrem Wechsel zur ZIB 2 im Jahr 1995 moderierte sie unter anderem "Land und Leute", "Österreich-Bild" und "Niederösterreich heute".',
},
'params': {
'skip_download': True, # rtsp downloads
},
'_skip': 'Blocked outside of Austria / Germany',
}, {
'url': 'http://tvthek.orf.at/topic/Fluechtlingskrise/10463081/Heimat-Fremde-Heimat/13879132/Senioren-betreuen-Migrantenkinder/13879141',
'skip_download': True,
}, {
'url': 'http://tvthek.orf.at/profile/Universum/35429',
'skip_download': True,
}]
def _real_extract(self, url):
playlist_id = self._match_id(url)
webpage = self._download_webpage(url, playlist_id)
data_jsb = self._parse_json(
self._search_regex(
r'<div[^>]+class=(["\']).*?VideoPlaylist.*?\1[^>]+data-jsb=(["\'])(?P<json>.+?)\2',
webpage, 'playlist', group='json'),
playlist_id, transform_source=unescapeHTML)['playlist']['videos']
def quality_to_int(s):
m = re.search('([0-9]+)', s)
if m is None:
return -1
return int(m.group(1))
entries = []
for sd in data_jsb:
video_id, title = sd.get('id'), sd.get('title')
if not video_id or not title:
continue
video_id = compat_str(video_id)
formats = [{
'preference': -10 if fd['delivery'] == 'hls' else None,
'format_id': '%s-%s-%s' % (
fd['delivery'], fd['quality'], fd['quality_string']),
'url': fd['src'],
'protocol': fd['protocol'],
'quality': quality_to_int(fd['quality']),
} for fd in sd['sources']]
# Check for geoblocking.
# There is a property is_geoprotection, but that's always false
geo_str = sd.get('geoprotection_string')
if geo_str:
try:
http_url = next(
f['url']
for f in formats
if re.match(r'^https?://.*\.mp4$', f['url']))
except StopIteration:
pass
else:
req = HEADRequest(http_url)
self._request_webpage(
req, video_id,
note='Testing for geoblocking',
errnote=((
'This video seems to be blocked outside of %s. '
'You may want to try the streaming-* formats.')
% geo_str),
fatal=False)
self._check_formats(formats, video_id)
self._sort_formats(formats)
<|fim▁hole|> for sub in sd.get('subtitles', []):
sub_src = sub.get('src')
if not sub_src:
continue
subtitles.setdefault(sub.get('lang', 'de-AT'), []).append({
'url': sub_src,
})
upload_date = unified_strdate(sd.get('created_date'))
entries.append({
'_type': 'video',
'id': video_id,
'title': title,
'formats': formats,
'subtitles': subtitles,
'description': sd.get('description'),
'duration': int_or_none(sd.get('duration_in_seconds')),
'upload_date': upload_date,
'thumbnail': sd.get('image_full_url'),
})
return {
'_type': 'playlist',
'entries': entries,
'id': playlist_id,
}
class ORFOE1IE(InfoExtractor):
IE_NAME = 'orf:oe1'
IE_DESC = 'Radio Österreich 1'
_VALID_URL = r'https?://oe1\.orf\.at/(?:programm/|konsole\?.*?\btrack_id=)(?P<id>[0-9]+)'
# Audios on ORF radio are only available for 7 days, so we can't add tests.
_TESTS = [{
'url': 'http://oe1.orf.at/konsole?show=on_demand#?track_id=394211',
'only_matching': True,
}, {
'url': 'http://oe1.orf.at/konsole?show=ondemand&track_id=443608&load_day=/programm/konsole/tag/20160726',
'only_matching': True,
}]
def _real_extract(self, url):
show_id = self._match_id(url)
data = self._download_json(
'http://oe1.orf.at/programm/%s/konsole' % show_id,
show_id
)
timestamp = datetime.datetime.strptime('%s %s' % (
data['item']['day_label'],
data['item']['time']
), '%d.%m.%Y %H:%M')
unix_timestamp = calendar.timegm(timestamp.utctimetuple())
return {
'id': show_id,
'title': data['item']['title'],
'url': data['item']['url_stream'],
'ext': 'mp3',
'description': data['item'].get('info'),
'timestamp': unix_timestamp
}
class ORFFM4IE(InfoExtractor):
IE_NAME = 'orf:fm4'
IE_DESC = 'radio FM4'
_VALID_URL = r'https?://fm4\.orf\.at/(?:7tage/?#|player/)(?P<date>[0-9]+)/(?P<show>\w+)'
_TEST = {
'url': 'http://fm4.orf.at/player/20160110/IS/',
'md5': '01e736e8f1cef7e13246e880a59ad298',
'info_dict': {
'id': '2016-01-10_2100_tl_54_7DaysSun13_11244',
'ext': 'mp3',
'title': 'Im Sumpf',
'description': 'md5:384c543f866c4e422a55f66a62d669cd',
'duration': 7173,
'timestamp': 1452456073,
'upload_date': '20160110',
},
'skip': 'Live streams on FM4 got deleted soon',
}
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
show_date = mobj.group('date')
show_id = mobj.group('show')
data = self._download_json(
'http://audioapi.orf.at/fm4/json/2.0/broadcasts/%s/4%s' % (show_date, show_id),
show_id
)
def extract_entry_dict(info, title, subtitle):
return {
'id': info['loopStreamId'].replace('.mp3', ''),
'url': 'http://loopstream01.apa.at/?channel=fm4&id=%s' % info['loopStreamId'],
'title': title,
'description': subtitle,
'duration': (info['end'] - info['start']) / 1000,
'timestamp': info['start'] / 1000,
'ext': 'mp3'
}
entries = [extract_entry_dict(t, data['title'], data['subtitle']) for t in data['streams']]
return {
'_type': 'playlist',
'id': show_id,
'title': data['title'],
'description': data['subtitle'],
'entries': entries
}
class ORFIPTVIE(InfoExtractor):
IE_NAME = 'orf:iptv'
IE_DESC = 'iptv.ORF.at'
_VALID_URL = r'https?://iptv\.orf\.at/(?:#/)?stories/(?P<id>\d+)'
_TEST = {
'url': 'http://iptv.orf.at/stories/2275236/',
'md5': 'c8b22af4718a4b4af58342529453e3e5',
'info_dict': {
'id': '350612',
'ext': 'flv',
'title': 'Weitere Evakuierungen um Vulkan Calbuco',
'description': 'md5:d689c959bdbcf04efeddedbf2299d633',
'duration': 68.197,
'thumbnail': 're:^https?://.*\.jpg$',
'upload_date': '20150425',
},
}
def _real_extract(self, url):
story_id = self._match_id(url)
webpage = self._download_webpage(
'http://iptv.orf.at/stories/%s' % story_id, story_id)
video_id = self._search_regex(
r'data-video(?:id)?="(\d+)"', webpage, 'video id')
data = self._download_json(
'http://bits.orf.at/filehandler/static-api/json/current/data.json?file=%s' % video_id,
video_id)[0]
duration = float_or_none(data['duration'], 1000)
video = data['sources']['default']
load_balancer_url = video['loadBalancerUrl']
abr = int_or_none(video.get('audioBitrate'))
vbr = int_or_none(video.get('bitrate'))
fps = int_or_none(video.get('videoFps'))
width = int_or_none(video.get('videoWidth'))
height = int_or_none(video.get('videoHeight'))
thumbnail = video.get('preview')
rendition = self._download_json(
load_balancer_url, video_id, transform_source=strip_jsonp)
f = {
'abr': abr,
'vbr': vbr,
'fps': fps,
'width': width,
'height': height,
}
formats = []
for format_id, format_url in rendition['redirect'].items():
if format_id == 'rtmp':
ff = f.copy()
ff.update({
'url': format_url,
'format_id': format_id,
})
formats.append(ff)
elif determine_ext(format_url) == 'f4m':
formats.extend(self._extract_f4m_formats(
format_url, video_id, f4m_id=format_id))
elif determine_ext(format_url) == 'm3u8':
formats.extend(self._extract_m3u8_formats(
format_url, video_id, 'mp4', m3u8_id=format_id))
else:
continue
self._sort_formats(formats)
title = remove_end(self._og_search_title(webpage), ' - iptv.ORF.at')
description = self._og_search_description(webpage)
upload_date = unified_strdate(self._html_search_meta(
'dc.date', webpage, 'upload date'))
return {
'id': video_id,
'title': title,
'description': description,
'duration': duration,
'thumbnail': thumbnail,
'upload_date': upload_date,
'formats': formats,
}<|fim▁end|> | subtitles = {} |
<|file_name|>proj3d.py<|end_file_name|><|fim▁begin|># 3dproj.py
#
"""
Various transforms used for by the 3D code
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
from six.moves import zip
from matplotlib.collections import LineCollection
from matplotlib.patches import Circle
import numpy as np
import numpy.linalg as linalg
def line2d(p0, p1):
"""
Return 2D equation of line in the form ax+by+c = 0
"""
# x + x1 = 0
x0, y0 = p0[:2]
x1, y1 = p1[:2]
#
if x0 == x1:
a = -1
b = 0
c = x1
elif y0 == y1:
a = 0
b = 1
c = -y1
else:
a = (y0-y1)
b = (x0-x1)
c = (x0*y1 - x1*y0)
return a, b, c
def line2d_dist(l, p):
"""
Distance from line to point
line is a tuple of coefficients a,b,c
"""
a, b, c = l
x0, y0 = p
return abs((a*x0 + b*y0 + c)/np.sqrt(a**2+b**2))
def line2d_seg_dist(p1, p2, p0):
"""distance(s) from line defined by p1 - p2 to point(s) p0
p0[0] = x(s)
p0[1] = y(s)
intersection point p = p1 + u*(p2-p1)
and intersection point lies within segment if u is between 0 and 1
"""
x21 = p2[0] - p1[0]
y21 = p2[1] - p1[1]<|fim▁hole|> x01 = np.asarray(p0[0]) - p1[0]
y01 = np.asarray(p0[1]) - p1[1]
u = (x01*x21 + y01*y21)/float(abs(x21**2 + y21**2))
u = np.clip(u, 0, 1)
d = np.sqrt((x01 - u*x21)**2 + (y01 - u*y21)**2)
return d
def test_lines_dists():
import pylab
ax = pylab.gca()
xs, ys = (0,30), (20,150)
pylab.plot(xs, ys)
points = list(zip(xs, ys))
p0, p1 = points
xs, ys = (0,0,20,30), (100,150,30,200)
pylab.scatter(xs, ys)
dist = line2d_seg_dist(p0, p1, (xs[0], ys[0]))
dist = line2d_seg_dist(p0, p1, np.array((xs, ys)))
for x, y, d in zip(xs, ys, dist):
c = Circle((x, y), d, fill=0)
ax.add_patch(c)
pylab.xlim(-200, 200)
pylab.ylim(-200, 200)
pylab.show()
def mod(v):
"""3d vector length"""
return np.sqrt(v[0]**2+v[1]**2+v[2]**2)
def world_transformation(xmin, xmax,
ymin, ymax,
zmin, zmax):
dx, dy, dz = (xmax-xmin), (ymax-ymin), (zmax-zmin)
return np.array([
[1.0/dx,0,0,-xmin/dx],
[0,1.0/dy,0,-ymin/dy],
[0,0,1.0/dz,-zmin/dz],
[0,0,0,1.0]])
def test_world():
xmin, xmax = 100, 120
ymin, ymax = -100, 100
zmin, zmax = 0.1, 0.2
M = world_transformation(xmin, xmax, ymin, ymax, zmin, zmax)
print(M)
def view_transformation(E, R, V):
n = (E - R)
## new
# n /= mod(n)
# u = np.cross(V,n)
# u /= mod(u)
# v = np.cross(n,u)
# Mr = np.diag([1.]*4)
# Mt = np.diag([1.]*4)
# Mr[:3,:3] = u,v,n
# Mt[:3,-1] = -E
## end new
## old
n = n / mod(n)
u = np.cross(V, n)
u = u / mod(u)
v = np.cross(n, u)
Mr = [[u[0],u[1],u[2],0],
[v[0],v[1],v[2],0],
[n[0],n[1],n[2],0],
[0, 0, 0, 1],
]
#
Mt = [[1, 0, 0, -E[0]],
[0, 1, 0, -E[1]],
[0, 0, 1, -E[2]],
[0, 0, 0, 1]]
## end old
return np.dot(Mr, Mt)
def persp_transformation(zfront, zback):
a = (zfront+zback)/(zfront-zback)
b = -2*(zfront*zback)/(zfront-zback)
return np.array([[1,0,0,0],
[0,1,0,0],
[0,0,a,b],
[0,0,-1,0]
])
def proj_transform_vec(vec, M):
vecw = np.dot(M, vec)
w = vecw[3]
# clip here..
txs, tys, tzs = vecw[0]/w, vecw[1]/w, vecw[2]/w
return txs, tys, tzs
def proj_transform_vec_clip(vec, M):
vecw = np.dot(M, vec)
w = vecw[3]
# clip here..
txs, tys, tzs = vecw[0]/w, vecw[1]/w, vecw[2]/w
tis = (vecw[0] >= 0) * (vecw[0] <= 1) * (vecw[1] >= 0) * (vecw[1] <= 1)
if np.sometrue(tis):
tis = vecw[1] < 1
return txs, tys, tzs, tis
def inv_transform(xs, ys, zs, M):
iM = linalg.inv(M)
vec = vec_pad_ones(xs, ys, zs)
vecr = np.dot(iM, vec)
try:
vecr = vecr/vecr[3]
except OverflowError:
pass
return vecr[0], vecr[1], vecr[2]
def vec_pad_ones(xs, ys, zs):
try:
try:
vec = np.array([xs,ys,zs,np.ones(xs.shape)])
except (AttributeError,TypeError):
vec = np.array([xs,ys,zs,np.ones((len(xs)))])
except TypeError:
vec = np.array([xs,ys,zs,1])
return vec
def proj_transform(xs, ys, zs, M):
"""
Transform the points by the projection matrix
"""
vec = vec_pad_ones(xs, ys, zs)
return proj_transform_vec(vec, M)
def proj_transform_clip(xs, ys, zs, M):
"""
Transform the points by the projection matrix
and return the clipping result
returns txs,tys,tzs,tis
"""
vec = vec_pad_ones(xs, ys, zs)
return proj_transform_vec_clip(vec, M)
transform = proj_transform
def proj_points(points, M):
return list(zip(*proj_trans_points(points, M)))
def proj_trans_points(points, M):
xs, ys, zs = list(zip(*points))
return proj_transform(xs, ys, zs, M)
def proj_trans_clip_points(points, M):
xs, ys, zs = list(zip(*points))
return proj_transform_clip(xs, ys, zs, M)
def test_proj_draw_axes(M, s=1):
import pylab
xs, ys, zs = [0, s, 0, 0], [0, 0, s, 0], [0, 0, 0, s]
txs, tys, tzs = proj_transform(xs, ys, zs, M)
o, ax, ay, az = (txs[0], tys[0]), (txs[1], tys[1]), \
(txs[2], tys[2]), (txs[3], tys[3])
lines = [(o, ax), (o, ay), (o, az)]
ax = pylab.gca()
linec = LineCollection(lines)
ax.add_collection(linec)
for x, y, t in zip(txs, tys, ['o', 'x', 'y', 'z']):
pylab.text(x, y, t)
def test_proj_make_M(E=None):
# eye point
E = E or np.array([1, -1, 2]) * 1000
#E = np.array([20,10,20])
R = np.array([1, 1, 1]) * 100
V = np.array([0, 0, 1])
viewM = view_transformation(E, R, V)
perspM = persp_transformation(100, -100)
M = np.dot(perspM, viewM)
return M
def test_proj():
import pylab
M = test_proj_make_M()
ts = ['%d' % i for i in [0,1,2,3,0,4,5,6,7,4]]
xs, ys, zs = [0,1,1,0,0, 0,1,1,0,0], [0,0,1,1,0, 0,0,1,1,0], \
[0,0,0,0,0, 1,1,1,1,1]
xs, ys, zs = [np.array(v)*300 for v in (xs, ys, zs)]
#
test_proj_draw_axes(M, s=400)
txs, tys, tzs = proj_transform(xs, ys, zs, M)
ixs, iys, izs = inv_transform(txs, tys, tzs, M)
pylab.scatter(txs, tys, c=tzs)
pylab.plot(txs, tys, c='r')
for x, y, t in zip(txs, tys, ts):
pylab.text(x, y, t)
pylab.xlim(-0.2, 0.2)
pylab.ylim(-0.2, 0.2)
pylab.show()
def rot_x(V, alpha):
cosa, sina = np.cos(alpha), np.sin(alpha)
M1 = np.array([[1,0,0,0],
[0,cosa,-sina,0],
[0,sina,cosa,0],
[0,0,0,0]])
return np.dot(M1, V)
def test_rot():
V = [1,0,0,1]
print(rot_x(V, np.pi/6))
V = [0,1,0,1]
print(rot_x(V, np.pi/6))
if __name__ == "__main__":
test_proj()<|fim▁end|> | |
<|file_name|>os.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/*!
* Higher-level interfaces to libc::* functions and operating system services.
*
* In general these take and return rust types, use rust idioms (enums,
* closures, vectors) rather than C idioms, and do more extensive safety
* checks.
*
* This module is not meant to only contain 1:1 mappings to libc entries; any
* os-interface code that is reasonably useful and broadly applicable can go
* here. Including utility routines that merely build on other os code.
*
* We assume the general case is that users do not care, and do not want to
* be made to care, which operating system they are on. While they may want
* to special case various special cases -- and so we will not _hide_ the
* facts of which OS the user is on -- they should be given the opportunity
* to write OS-ignorant code by default.
*/
#![allow(missing_doc)]
#[cfg(target_os = "macos")]
#[cfg(windows)]
use iter::range;
use clone::Clone;
use container::Container;
use libc;
use libc::{c_char, c_void, c_int};
use option::{Some, None, Option};
use os;
use ops::Drop;
use result::{Err, Ok, Result};
use ptr;
use str;
use str::{Str, StrSlice};
use fmt;
use sync::atomics::{AtomicInt, INIT_ATOMIC_INT, SeqCst};
use path::{Path, GenericPath};
use iter::Iterator;
use slice::{Vector, CloneableVector, ImmutableVector, MutableVector, OwnedVector};
use ptr::RawPtr;
#[cfg(unix)]
use c_str::ToCStr;
#[cfg(windows)]
use str::OwnedStr;
/// Delegates to the libc close() function, returning the same return value.
pub fn close(fd: int) -> int {
unsafe {
libc::close(fd as c_int) as int
}
}
pub static TMPBUF_SZ : uint = 1000u;
static BUF_BYTES : uint = 2048u;
/// Returns the current working directory.
#[cfg(unix)]
pub fn getcwd() -> Path {
use c_str::CString;
let mut buf = [0 as c_char, ..BUF_BYTES];
unsafe {
if libc::getcwd(buf.as_mut_ptr(), buf.len() as libc::size_t).is_null() {
fail!()
}
Path::new(CString::new(buf.as_ptr(), false))
}
}
/// Returns the current working directory.
#[cfg(windows)]
pub fn getcwd() -> Path {
use libc::DWORD;
use libc::GetCurrentDirectoryW;
let mut buf = [0 as u16, ..BUF_BYTES];
unsafe {
if libc::GetCurrentDirectoryW(buf.len() as DWORD, buf.as_mut_ptr()) == 0 as DWORD {
fail!();
}
}
Path::new(str::from_utf16(str::truncate_utf16_at_nul(buf))
.expect("GetCurrentDirectoryW returned invalid UTF-16"))
}
#[cfg(windows)]
pub mod win32 {
use libc::types::os::arch::extra::DWORD;
use libc;
use option::{None, Option};
use option;
use os::TMPBUF_SZ;
use str::StrSlice;
use str;
use slice::{MutableVector, ImmutableVector, OwnedVector};
use slice;
pub fn fill_utf16_buf_and_decode(f: |*mut u16, DWORD| -> DWORD)
-> Option<~str> {
unsafe {
let mut n = TMPBUF_SZ as DWORD;
let mut res = None;
let mut done = false;
while !done {
let mut buf = slice::from_elem(n as uint, 0u16);
let k = f(buf.as_mut_ptr(), n);
if k == (0 as DWORD) {
done = true;
} else if k == n &&
libc::GetLastError() ==
libc::ERROR_INSUFFICIENT_BUFFER as DWORD {
n *= 2 as DWORD;
} else if k >= n {
n = k;
} else {
done = true;
}
if k != 0 && done {
let sub = buf.slice(0, k as uint);
// We want to explicitly catch the case when the
// closure returned invalid UTF-16, rather than
// set `res` to None and continue.
let s = str::from_utf16(sub)
.expect("fill_utf16_buf_and_decode: closure created invalid UTF-16");
res = option::Some(s)
}
}
return res;
}
}
pub fn as_utf16_p<T>(s: &str, f: |*u16| -> T) -> T {
let mut t = s.to_utf16();
// Null terminate before passing on.
t.push(0u16);
f(t.as_ptr())
}
}
/*
Accessing environment variables is not generally threadsafe.
Serialize access through a global lock.
*/
fn with_env_lock<T>(f: || -> T) -> T {
use unstable::mutex::{StaticNativeMutex, NATIVE_MUTEX_INIT};
static mut lock: StaticNativeMutex = NATIVE_MUTEX_INIT;
unsafe {
let _guard = lock.lock();
f()
}
}
/// Returns a vector of (variable, value) pairs for all the environment
/// variables of the current process.
///
/// Invalid UTF-8 bytes are replaced with \uFFFD. See `str::from_utf8_lossy()`
/// for details.
pub fn env() -> ~[(~str,~str)] {
env_as_bytes().move_iter().map(|(k,v)| {
let k = str::from_utf8_lossy(k).into_owned();
let v = str::from_utf8_lossy(v).into_owned();
(k,v)
}).collect()
}
/// Returns a vector of (variable, value) byte-vector pairs for all the
/// environment variables of the current process.
pub fn env_as_bytes() -> ~[(~[u8],~[u8])] {
unsafe {
#[cfg(windows)]
unsafe fn get_env_pairs() -> ~[~[u8]] {
use c_str;
use str::StrSlice;
use libc::funcs::extra::kernel32::{
GetEnvironmentStringsA,
FreeEnvironmentStringsA
};
let ch = GetEnvironmentStringsA();
if ch as uint == 0 {
fail!("os::env() failure getting env string from OS: {}",
os::last_os_error());
}
let mut result = ~[];
c_str::from_c_multistring(ch as *c_char, None, |cstr| {
result.push(cstr.as_bytes_no_nul().to_owned());
});
FreeEnvironmentStringsA(ch);
result
}
#[cfg(unix)]
unsafe fn get_env_pairs() -> ~[~[u8]] {
use c_str::CString;
extern {
fn rust_env_pairs() -> **c_char;
}
let environ = rust_env_pairs();
if environ as uint == 0 {
fail!("os::env() failure getting env string from OS: {}",
os::last_os_error());
}
let mut result = ~[];
ptr::array_each(environ, |e| {
let env_pair = CString::new(e, false).as_bytes_no_nul().to_owned();
result.push(env_pair);
});
result
}
fn env_convert(input: ~[~[u8]]) -> ~[(~[u8], ~[u8])] {
let mut pairs = ~[];
for p in input.iter() {
let vs: ~[&[u8]] = p.splitn(1, |b| *b == '=' as u8).collect();
let key = vs[0].to_owned();
let val = if vs.len() < 2 { ~[] } else { vs[1].to_owned() };
pairs.push((key, val));
}
pairs
}
with_env_lock(|| {
let unparsed_environ = get_env_pairs();
env_convert(unparsed_environ)
})
}
}
#[cfg(unix)]
/// Fetches the environment variable `n` from the current process, returning
/// None if the variable isn't set.
///
/// Any invalid UTF-8 bytes in the value are replaced by \uFFFD. See
/// `str::from_utf8_lossy()` for details.
///
/// # Failure
///
/// Fails if `n` has any interior NULs.
pub fn getenv(n: &str) -> Option<~str> {
getenv_as_bytes(n).map(|v| str::from_utf8_lossy(v).into_owned())
}
#[cfg(unix)]
/// Fetches the environment variable `n` byte vector from the current process,
/// returning None if the variable isn't set.
///
/// # Failure
///
/// Fails if `n` has any interior NULs.
pub fn getenv_as_bytes(n: &str) -> Option<~[u8]> {
use c_str::CString;
unsafe {
with_env_lock(|| {
let s = n.with_c_str(|buf| libc::getenv(buf));
if s.is_null() {
None
} else {
Some(CString::new(s, false).as_bytes_no_nul().to_owned())
}
})
}
}
#[cfg(windows)]
/// Fetches the environment variable `n` from the current process, returning
/// None if the variable isn't set.
pub fn getenv(n: &str) -> Option<~str> {
unsafe {
with_env_lock(|| {
use os::win32::{as_utf16_p, fill_utf16_buf_and_decode};
as_utf16_p(n, |u| {
fill_utf16_buf_and_decode(|buf, sz| {
libc::GetEnvironmentVariableW(u, buf, sz)
})
})
})
}
}
#[cfg(windows)]
/// Fetches the environment variable `n` byte vector from the current process,
/// returning None if the variable isn't set.
pub fn getenv_as_bytes(n: &str) -> Option<~[u8]> {
getenv(n).map(|s| s.into_bytes())
}
#[cfg(unix)]
/// Sets the environment variable `n` to the value `v` for the currently running
/// process
///
/// # Failure
///
/// Fails if `n` or `v` have any interior NULs.
pub fn setenv(n: &str, v: &str) {
unsafe {
with_env_lock(|| {
n.with_c_str(|nbuf| {
v.with_c_str(|vbuf| {
libc::funcs::posix01::unistd::setenv(nbuf, vbuf, 1);
})
})
})
}
}
#[cfg(windows)]
/// Sets the environment variable `n` to the value `v` for the currently running
/// process
pub fn setenv(n: &str, v: &str) {
unsafe {
with_env_lock(|| {
use os::win32::as_utf16_p;
as_utf16_p(n, |nbuf| {
as_utf16_p(v, |vbuf| {
libc::SetEnvironmentVariableW(nbuf, vbuf);
})
})
})
}
}
/// Remove a variable from the environment entirely
///
/// # Failure
///
/// Fails (on unix) if `n` has any interior NULs.
pub fn unsetenv(n: &str) {
#[cfg(unix)]
fn _unsetenv(n: &str) {
unsafe {
with_env_lock(|| {
n.with_c_str(|nbuf| {
libc::funcs::posix01::unistd::unsetenv(nbuf);
})
})
}
}
#[cfg(windows)]
fn _unsetenv(n: &str) {
unsafe {
with_env_lock(|| {
use os::win32::as_utf16_p;
as_utf16_p(n, |nbuf| {
libc::SetEnvironmentVariableW(nbuf, ptr::null());
})
})
}
}
_unsetenv(n);
}
/// A low-level OS in-memory pipe.
pub struct Pipe {
/// A file descriptor representing the reading end of the pipe. Data written
/// on the `out` file descriptor can be read from this file descriptor.
input: c_int,
/// A file descriptor representing the write end of the pipe. Data written
/// to this file descriptor can be read from the `input` file descriptor.
out: c_int,
}
/// Creates a new low-level OS in-memory pipe.
#[cfg(unix)]
pub fn pipe() -> Pipe {
unsafe {
let mut fds = Pipe {input: 0,
out: 0};
assert_eq!(libc::pipe(&mut fds.input), 0);
return Pipe {input: fds.input, out: fds.out};
}
}
/// Creates a new low-level OS in-memory pipe.
#[cfg(windows)]
pub fn pipe() -> Pipe {
unsafe {
// Windows pipes work subtly differently than unix pipes, and their
// inheritance has to be handled in a different way that I do not
// fully understand. Here we explicitly make the pipe non-inheritable,
// which means to pass it to a subprocess they need to be duplicated
// first, as in std::run.
let mut fds = Pipe {input: 0,
out: 0};
let res = libc::pipe(&mut fds.input, 1024 as ::libc::c_uint,
(libc::O_BINARY | libc::O_NOINHERIT) as c_int);
assert_eq!(res, 0);
assert!((fds.input != -1 && fds.input != 0 ));
assert!((fds.out != -1 && fds.input != 0));
return Pipe {input: fds.input, out: fds.out};
}
}
/// Returns the proper dll filename for the given basename of a file.
pub fn dll_filename(base: &str) -> ~str {
format!("{}{}{}", consts::DLL_PREFIX, base, consts::DLL_SUFFIX)
}
/// Optionally returns the filesystem path of the current executable which is
/// running. If any failure occurs, None is returned.
pub fn self_exe_name() -> Option<Path> {
#[cfg(target_os = "freebsd")]
fn load_self() -> Option<~[u8]> {
unsafe {
use libc::funcs::bsd44::*;
use libc::consts::os::extra::*;
use slice;
let mib = ~[CTL_KERN as c_int,
KERN_PROC as c_int,
KERN_PROC_PATHNAME as c_int, -1 as c_int];
let mut sz: libc::size_t = 0;
let err = sysctl(mib.as_ptr(), mib.len() as ::libc::c_uint,
ptr::mut_null(), &mut sz, ptr::null(),
0u as libc::size_t);
if err != 0 { return None; }
if sz == 0 { return None; }
let mut v: ~[u8] = slice::with_capacity(sz as uint);
let err = sysctl(mib.as_ptr(), mib.len() as ::libc::c_uint,
v.as_mut_ptr() as *mut c_void, &mut sz, ptr::null(),
0u as libc::size_t);
if err != 0 { return None; }
if sz == 0 { return None; }
v.set_len(sz as uint - 1); // chop off trailing NUL
Some(v)
}
}
#[cfg(target_os = "linux")]
#[cfg(target_os = "android")]
fn load_self() -> Option<~[u8]> {
use std::io;
match io::fs::readlink(&Path::new("/proc/self/exe")) {
Ok(path) => Some(path.as_vec().to_owned()),
Err(..) => None
}
}
#[cfg(target_os = "macos")]
fn load_self() -> Option<~[u8]> {
unsafe {
use libc::funcs::extra::_NSGetExecutablePath;
use slice;
let mut sz: u32 = 0;
_NSGetExecutablePath(ptr::mut_null(), &mut sz);
if sz == 0 { return None; }
let mut v: ~[u8] = slice::with_capacity(sz as uint);
let err = _NSGetExecutablePath(v.as_mut_ptr() as *mut i8, &mut sz);
if err != 0 { return None; }
v.set_len(sz as uint - 1); // chop off trailing NUL
Some(v)
}
}
#[cfg(windows)]
fn load_self() -> Option<~[u8]> {
use str::OwnedStr;
unsafe {
use os::win32::fill_utf16_buf_and_decode;
fill_utf16_buf_and_decode(|buf, sz| {
libc::GetModuleFileNameW(0u as libc::DWORD, buf, sz)
}).map(|s| s.into_bytes())
}
}
load_self().and_then(Path::new_opt)
}
/// Optionally returns the filesystem path to the current executable which is
/// running. Like self_exe_name() but without the binary's name.
/// If any failure occurs, None is returned.
pub fn self_exe_path() -> Option<Path> {
self_exe_name().map(|mut p| { p.pop(); p })
}
/**
* Returns the path to the user's home directory, if known.
*
* On Unix, returns the value of the 'HOME' environment variable if it is set
* and not equal to the empty string.
*
* On Windows, returns the value of the 'HOME' environment variable if it is
* set and not equal to the empty string. Otherwise, returns the value of the
* 'USERPROFILE' environment variable if it is set and not equal to the empty
* string.
*
* Otherwise, homedir returns option::none.
*/
pub fn homedir() -> Option<Path> {
// FIXME (#7188): getenv needs a ~[u8] variant
return match getenv("HOME") {
Some(ref p) if !p.is_empty() => Path::new_opt(p.as_slice()),
_ => secondary()
};
#[cfg(unix)]
fn secondary() -> Option<Path> {
None
}
#[cfg(windows)]
fn secondary() -> Option<Path> {
getenv("USERPROFILE").and_then(|p| {
if !p.is_empty() {
Path::new_opt(p)
} else {
None
}
})
}
}
/**
* Returns the path to a temporary directory.
*
* On Unix, returns the value of the 'TMPDIR' environment variable if it is
* set and non-empty and '/tmp' otherwise.
* On Android, there is no global temporary folder (it is usually allocated
* per-app), hence returns '/data/tmp' which is commonly used.
*
* On Windows, returns the value of, in order, the 'TMP', 'TEMP',
* 'USERPROFILE' environment variable if any are set and not the empty
* string. Otherwise, tmpdir returns the path to the Windows directory.
*/
pub fn tmpdir() -> Path {
return lookup();
fn getenv_nonempty(v: &str) -> Option<Path> {
match getenv(v) {
Some(x) =>
if x.is_empty() {
None
} else {
Path::new_opt(x)
},
_ => None
}
}
#[cfg(unix)]
fn lookup() -> Path {
if cfg!(target_os = "android") {
Path::new("/data/tmp")
} else {
getenv_nonempty("TMPDIR").unwrap_or(Path::new("/tmp"))
}
}
#[cfg(windows)]
fn lookup() -> Path {
getenv_nonempty("TMP").or(
getenv_nonempty("TEMP").or(
getenv_nonempty("USERPROFILE").or(
getenv_nonempty("WINDIR")))).unwrap_or(Path::new("C:\\Windows"))
}
}
/**
* Convert a relative path to an absolute path
*
* If the given path is relative, return it prepended with the current working
* directory. If the given path is already an absolute path, return it
* as is.
*/
// NB: this is here rather than in path because it is a form of environment
// querying; what it does depends on the process working directory, not just
// the input paths.
pub fn make_absolute(p: &Path) -> Path {
if p.is_absolute() {
p.clone()
} else {
let mut ret = getcwd();
ret.push(p);
ret
}
}
/// Changes the current working directory to the specified path, returning
/// whether the change was completed successfully or not.
pub fn change_dir(p: &Path) -> bool {
return chdir(p);
#[cfg(windows)]
fn chdir(p: &Path) -> bool {
unsafe {
use os::win32::as_utf16_p;
return as_utf16_p(p.as_str().unwrap(), |buf| {
libc::SetCurrentDirectoryW(buf) != (0 as libc::BOOL)
});
}
}
#[cfg(unix)]
fn chdir(p: &Path) -> bool {
p.with_c_str(|buf| {
unsafe {
libc::chdir(buf) == (0 as c_int)
}
})
}
}
#[cfg(unix)]
/// Returns the platform-specific value of errno
pub fn errno() -> int {
#[cfg(target_os = "macos")]
#[cfg(target_os = "freebsd")]
fn errno_location() -> *c_int {
extern {
fn __error() -> *c_int;
}
unsafe {
__error()
}
}
#[cfg(target_os = "linux")]
#[cfg(target_os = "android")]
fn errno_location() -> *c_int {
extern {
fn __errno_location() -> *c_int;
}
unsafe {
__errno_location()
}
}
unsafe {
(*errno_location()) as int
}
}
#[cfg(windows)]
/// Returns the platform-specific value of errno
pub fn errno() -> uint {
use libc::types::os::arch::extra::DWORD;
#[link_name = "kernel32"]
extern "system" {
fn GetLastError() -> DWORD;
}
unsafe {
GetLastError() as uint
}
}
/// Get a string representing the platform-dependent last error
pub fn last_os_error() -> ~str {
#[cfg(unix)]
fn strerror() -> ~str {
#[cfg(target_os = "macos")]
#[cfg(target_os = "android")]
#[cfg(target_os = "freebsd")]
fn strerror_r(errnum: c_int, buf: *mut c_char, buflen: libc::size_t)
-> c_int {
extern {
fn strerror_r(errnum: c_int, buf: *mut c_char,
buflen: libc::size_t) -> c_int;
}
unsafe {
strerror_r(errnum, buf, buflen)
}
}
// GNU libc provides a non-compliant version of strerror_r by default
// and requires macros to instead use the POSIX compliant variant.
// So we just use __xpg_strerror_r which is always POSIX compliant
#[cfg(target_os = "linux")]
fn strerror_r(errnum: c_int, buf: *mut c_char,
buflen: libc::size_t) -> c_int {<|fim▁hole|> fn __xpg_strerror_r(errnum: c_int,
buf: *mut c_char,
buflen: libc::size_t)
-> c_int;
}
unsafe {
__xpg_strerror_r(errnum, buf, buflen)
}
}
let mut buf = [0 as c_char, ..TMPBUF_SZ];
let p = buf.as_mut_ptr();
unsafe {
if strerror_r(errno() as c_int, p, buf.len() as libc::size_t) < 0 {
fail!("strerror_r failure");
}
str::raw::from_c_str(p as *c_char)
}
}
#[cfg(windows)]
fn strerror() -> ~str {
use libc::types::os::arch::extra::DWORD;
use libc::types::os::arch::extra::LPWSTR;
use libc::types::os::arch::extra::LPVOID;
use libc::types::os::arch::extra::WCHAR;
#[link_name = "kernel32"]
extern "system" {
fn FormatMessageW(flags: DWORD,
lpSrc: LPVOID,
msgId: DWORD,
langId: DWORD,
buf: LPWSTR,
nsize: DWORD,
args: *c_void)
-> DWORD;
}
static FORMAT_MESSAGE_FROM_SYSTEM: DWORD = 0x00001000;
static FORMAT_MESSAGE_IGNORE_INSERTS: DWORD = 0x00000200;
// This value is calculated from the macro
// MAKELANGID(LANG_SYSTEM_DEFAULT, SUBLANG_SYS_DEFAULT)
let langId = 0x0800 as DWORD;
let err = errno() as DWORD;
let mut buf = [0 as WCHAR, ..TMPBUF_SZ];
unsafe {
let res = FormatMessageW(FORMAT_MESSAGE_FROM_SYSTEM |
FORMAT_MESSAGE_IGNORE_INSERTS,
ptr::mut_null(),
err,
langId,
buf.as_mut_ptr(),
buf.len() as DWORD,
ptr::null());
if res == 0 {
// Sometimes FormatMessageW can fail e.g. system doesn't like langId,
let fm_err = errno();
return format!("OS Error {} (FormatMessageW() returned error {})", err, fm_err);
}
let msg = str::from_utf16(str::truncate_utf16_at_nul(buf));
match msg {
Some(msg) => format!("OS Error {}: {}", err, msg),
None => format!("OS Error {} (FormatMessageW() returned invalid UTF-16)", err),
}
}
}
strerror()
}
static mut EXIT_STATUS: AtomicInt = INIT_ATOMIC_INT;
/**
* Sets the process exit code
*
* Sets the exit code returned by the process if all supervised tasks
* terminate successfully (without failing). If the current root task fails
* and is supervised by the scheduler then any user-specified exit status is
* ignored and the process exits with the default failure status.
*
* Note that this is not synchronized against modifications of other threads.
*/
pub fn set_exit_status(code: int) {
unsafe { EXIT_STATUS.store(code, SeqCst) }
}
/// Fetches the process's current exit code. This defaults to 0 and can change
/// by calling `set_exit_status`.
pub fn get_exit_status() -> int {
unsafe { EXIT_STATUS.load(SeqCst) }
}
#[cfg(target_os = "macos")]
unsafe fn load_argc_and_argv(argc: int, argv: **c_char) -> ~[~[u8]] {
use c_str::CString;
let mut args = ~[];
for i in range(0u, argc as uint) {
args.push(CString::new(*argv.offset(i as int), false).as_bytes_no_nul().to_owned())
}
args
}
/**
* Returns the command line arguments
*
* Returns a list of the command line arguments.
*/
#[cfg(target_os = "macos")]
fn real_args_as_bytes() -> ~[~[u8]] {
unsafe {
let (argc, argv) = (*_NSGetArgc() as int,
*_NSGetArgv() as **c_char);
load_argc_and_argv(argc, argv)
}
}
#[cfg(target_os = "linux")]
#[cfg(target_os = "android")]
#[cfg(target_os = "freebsd")]
fn real_args_as_bytes() -> ~[~[u8]] {
use rt;
match rt::args::clone() {
Some(args) => args,
None => fail!("process arguments not initialized")
}
}
#[cfg(not(windows))]
fn real_args() -> ~[~str] {
real_args_as_bytes().move_iter().map(|v| str::from_utf8_lossy(v).into_owned()).collect()
}
#[cfg(windows)]
fn real_args() -> ~[~str] {
use slice;
let mut nArgs: c_int = 0;
let lpArgCount: *mut c_int = &mut nArgs;
let lpCmdLine = unsafe { GetCommandLineW() };
let szArgList = unsafe { CommandLineToArgvW(lpCmdLine, lpArgCount) };
let mut args = ~[];
for i in range(0u, nArgs as uint) {
unsafe {
// Determine the length of this argument.
let ptr = *szArgList.offset(i as int);
let mut len = 0;
while *ptr.offset(len as int) != 0 { len += 1; }
// Push it onto the list.
let opt_s = slice::raw::buf_as_slice(ptr, len, |buf| {
str::from_utf16(str::truncate_utf16_at_nul(buf))
});
args.push(opt_s.expect("CommandLineToArgvW returned invalid UTF-16"));
}
}
unsafe {
LocalFree(szArgList as *c_void);
}
return args;
}
#[cfg(windows)]
fn real_args_as_bytes() -> ~[~[u8]] {
real_args().move_iter().map(|s| s.into_bytes()).collect()
}
type LPCWSTR = *u16;
#[cfg(windows)]
#[link_name="kernel32"]
extern "system" {
fn GetCommandLineW() -> LPCWSTR;
fn LocalFree(ptr: *c_void);
}
#[cfg(windows)]
#[link_name="shell32"]
extern "system" {
fn CommandLineToArgvW(lpCmdLine: LPCWSTR, pNumArgs: *mut c_int) -> **u16;
}
/// Returns the arguments which this program was started with (normally passed
/// via the command line).
///
/// The arguments are interpreted as utf-8, with invalid bytes replaced with \uFFFD.
/// See `str::from_utf8_lossy` for details.
pub fn args() -> ~[~str] {
real_args()
}
/// Returns the arguments which this program was started with (normally passed
/// via the command line) as byte vectors.
pub fn args_as_bytes() -> ~[~[u8]] {
real_args_as_bytes()
}
#[cfg(target_os = "macos")]
extern {
// These functions are in crt_externs.h.
pub fn _NSGetArgc() -> *c_int;
pub fn _NSGetArgv() -> ***c_char;
}
// Round up `from` to be divisible by `to`
fn round_up(from: uint, to: uint) -> uint {
let r = if from % to == 0 {
from
} else {
from + to - (from % to)
};
if r == 0 {
to
} else {
r
}
}
/// Returns the page size of the current architecture in bytes.
#[cfg(unix)]
pub fn page_size() -> uint {
unsafe {
libc::sysconf(libc::_SC_PAGESIZE) as uint
}
}
/// Returns the page size of the current architecture in bytes.
#[cfg(windows)]
pub fn page_size() -> uint {
unsafe {
let mut info = libc::SYSTEM_INFO::new();
libc::GetSystemInfo(&mut info);
return info.dwPageSize as uint;
}
}
/// A memory mapped file or chunk of memory. This is a very system-specific
/// interface to the OS's memory mapping facilities (`mmap` on POSIX,
/// `VirtualAlloc`/`CreateFileMapping` on win32). It makes no attempt at
/// abstracting platform differences, besides in error values returned. Consider
/// yourself warned.
///
/// The memory map is released (unmapped) when the destructor is run, so don't
/// let it leave scope by accident if you want it to stick around.
pub struct MemoryMap {
/// Pointer to the memory created or modified by this map.
data: *mut u8,
/// Number of bytes this map applies to
len: uint,
/// Type of mapping
kind: MemoryMapKind
}
/// Type of memory map
pub enum MemoryMapKind {
/// Virtual memory map. Usually used to change the permissions of a given
/// chunk of memory. Corresponds to `VirtualAlloc` on Windows.
MapFile(*u8),
/// Virtual memory map. Usually used to change the permissions of a given
/// chunk of memory, or for allocation. Corresponds to `VirtualAlloc` on
/// Windows.
MapVirtual
}
/// Options the memory map is created with
pub enum MapOption {
/// The memory should be readable
MapReadable,
/// The memory should be writable
MapWritable,
/// The memory should be executable
MapExecutable,
/// Create a map for a specific address range. Corresponds to `MAP_FIXED` on
/// POSIX.
MapAddr(*u8),
/// Create a memory mapping for a file with a given fd.
MapFd(c_int),
/// When using `MapFd`, the start of the map is `uint` bytes from the start
/// of the file.
MapOffset(uint),
/// On POSIX, this can be used to specify the default flags passed to
/// `mmap`. By default it uses `MAP_PRIVATE` and, if not using `MapFd`,
/// `MAP_ANON`. This will override both of those. This is platform-specific
/// (the exact values used) and ignored on Windows.
MapNonStandardFlags(c_int),
}
/// Possible errors when creating a map.
pub enum MapError {
/// ## The following are POSIX-specific
///
/// fd was not open for reading or, if using `MapWritable`, was not open for
/// writing.
ErrFdNotAvail,
/// fd was not valid
ErrInvalidFd,
/// Either the address given by `MapAddr` or offset given by `MapOffset` was
/// not a multiple of `MemoryMap::granularity` (unaligned to page size).
ErrUnaligned,
/// With `MapFd`, the fd does not support mapping.
ErrNoMapSupport,
/// If using `MapAddr`, the address + `min_len` was outside of the process's
/// address space. If using `MapFd`, the target of the fd didn't have enough
/// resources to fulfill the request.
ErrNoMem,
/// A zero-length map was requested. This is invalid according to
/// [POSIX](http://pubs.opengroup.org/onlinepubs/9699919799/functions/mmap.html).
/// Not all platforms obey this, but this wrapper does.
ErrZeroLength,
/// Unrecognized error. The inner value is the unrecognized errno.
ErrUnknown(int),
/// ## The following are win32-specific
///
/// Unsupported combination of protection flags
/// (`MapReadable`/`MapWritable`/`MapExecutable`).
ErrUnsupProt,
/// When using `MapFd`, `MapOffset` was given (Windows does not support this
/// at all)
ErrUnsupOffset,
/// When using `MapFd`, there was already a mapping to the file.
ErrAlreadyExists,
/// Unrecognized error from `VirtualAlloc`. The inner value is the return
/// value of GetLastError.
ErrVirtualAlloc(uint),
/// Unrecognized error from `CreateFileMapping`. The inner value is the
/// return value of `GetLastError`.
ErrCreateFileMappingW(uint),
/// Unrecognized error from `MapViewOfFile`. The inner value is the return
/// value of `GetLastError`.
ErrMapViewOfFile(uint)
}
impl fmt::Show for MapError {
fn fmt(&self, out: &mut fmt::Formatter) -> fmt::Result {
let str = match *self {
ErrFdNotAvail => "fd not available for reading or writing",
ErrInvalidFd => "Invalid fd",
ErrUnaligned => {
"Unaligned address, invalid flags, negative length or \
unaligned offset"
}
ErrNoMapSupport=> "File doesn't support mapping",
ErrNoMem => "Invalid address, or not enough available memory",
ErrUnsupProt => "Protection mode unsupported",
ErrUnsupOffset => "Offset in virtual memory mode is unsupported",
ErrAlreadyExists => "File mapping for specified file already exists",
ErrZeroLength => "Zero-length mapping not allowed",
ErrUnknown(code) => {
return write!(out.buf, "Unknown error = {}", code)
},
ErrVirtualAlloc(code) => {
return write!(out.buf, "VirtualAlloc failure = {}", code)
},
ErrCreateFileMappingW(code) => {
return write!(out.buf, "CreateFileMappingW failure = {}", code)
},
ErrMapViewOfFile(code) => {
return write!(out.buf, "MapViewOfFile failure = {}", code)
}
};
write!(out.buf, "{}", str)
}
}
#[cfg(unix)]
impl MemoryMap {
/// Create a new mapping with the given `options`, at least `min_len` bytes
/// long. `min_len` must be greater than zero; see the note on
/// `ErrZeroLength`.
pub fn new(min_len: uint, options: &[MapOption]) -> Result<MemoryMap, MapError> {
use libc::off_t;
use cmp::Equiv;
if min_len == 0 {
return Err(ErrZeroLength)
}
let mut addr: *u8 = ptr::null();
let mut prot = 0;
let mut flags = libc::MAP_PRIVATE;
let mut fd = -1;
let mut offset = 0;
let mut custom_flags = false;
let len = round_up(min_len, page_size());
for &o in options.iter() {
match o {
MapReadable => { prot |= libc::PROT_READ; },
MapWritable => { prot |= libc::PROT_WRITE; },
MapExecutable => { prot |= libc::PROT_EXEC; },
MapAddr(addr_) => {
flags |= libc::MAP_FIXED;
addr = addr_;
},
MapFd(fd_) => {
flags |= libc::MAP_FILE;
fd = fd_;
},
MapOffset(offset_) => { offset = offset_ as off_t; },
MapNonStandardFlags(f) => { custom_flags = true; flags = f },
}
}
if fd == -1 && !custom_flags { flags |= libc::MAP_ANON; }
let r = unsafe {
libc::mmap(addr as *c_void, len as libc::size_t, prot, flags, fd,
offset)
};
if r.equiv(&libc::MAP_FAILED) {
Err(match errno() as c_int {
libc::EACCES => ErrFdNotAvail,
libc::EBADF => ErrInvalidFd,
libc::EINVAL => ErrUnaligned,
libc::ENODEV => ErrNoMapSupport,
libc::ENOMEM => ErrNoMem,
code => ErrUnknown(code as int)
})
} else {
Ok(MemoryMap {
data: r as *mut u8,
len: len,
kind: if fd == -1 {
MapVirtual
} else {
MapFile(ptr::null())
}
})
}
}
/// Granularity that the offset or address must be for `MapOffset` and
/// `MapAddr` respectively.
pub fn granularity() -> uint {
page_size()
}
}
#[cfg(unix)]
impl Drop for MemoryMap {
/// Unmap the mapping. Fails the task if `munmap` fails.
fn drop(&mut self) {
if self.len == 0 { /* workaround for dummy_stack */ return; }
unsafe {
// FIXME: what to do if this fails?
let _ = libc::munmap(self.data as *c_void, self.len as libc::size_t);
}
}
}
#[cfg(windows)]
impl MemoryMap {
/// Create a new mapping with the given `options`, at least `min_len` bytes long.
pub fn new(min_len: uint, options: &[MapOption]) -> Result<MemoryMap, MapError> {
use libc::types::os::arch::extra::{LPVOID, DWORD, SIZE_T, HANDLE};
let mut lpAddress: LPVOID = ptr::mut_null();
let mut readable = false;
let mut writable = false;
let mut executable = false;
let mut fd: c_int = -1;
let mut offset: uint = 0;
let len = round_up(min_len, page_size());
for &o in options.iter() {
match o {
MapReadable => { readable = true; },
MapWritable => { writable = true; },
MapExecutable => { executable = true; }
MapAddr(addr_) => { lpAddress = addr_ as LPVOID; },
MapFd(fd_) => { fd = fd_; },
MapOffset(offset_) => { offset = offset_; },
MapNonStandardFlags(..) => {}
}
}
let flProtect = match (executable, readable, writable) {
(false, false, false) if fd == -1 => libc::PAGE_NOACCESS,
(false, true, false) => libc::PAGE_READONLY,
(false, true, true) => libc::PAGE_READWRITE,
(true, false, false) if fd == -1 => libc::PAGE_EXECUTE,
(true, true, false) => libc::PAGE_EXECUTE_READ,
(true, true, true) => libc::PAGE_EXECUTE_READWRITE,
_ => return Err(ErrUnsupProt)
};
if fd == -1 {
if offset != 0 {
return Err(ErrUnsupOffset);
}
let r = unsafe {
libc::VirtualAlloc(lpAddress,
len as SIZE_T,
libc::MEM_COMMIT | libc::MEM_RESERVE,
flProtect)
};
match r as uint {
0 => Err(ErrVirtualAlloc(errno())),
_ => Ok(MemoryMap {
data: r as *mut u8,
len: len,
kind: MapVirtual
})
}
} else {
let dwDesiredAccess = match (executable, readable, writable) {
(false, true, false) => libc::FILE_MAP_READ,
(false, true, true) => libc::FILE_MAP_WRITE,
(true, true, false) => libc::FILE_MAP_READ | libc::FILE_MAP_EXECUTE,
(true, true, true) => libc::FILE_MAP_WRITE | libc::FILE_MAP_EXECUTE,
_ => return Err(ErrUnsupProt) // Actually, because of the check above,
// we should never get here.
};
unsafe {
let hFile = libc::get_osfhandle(fd) as HANDLE;
let mapping = libc::CreateFileMappingW(hFile,
ptr::mut_null(),
flProtect,
0,
0,
ptr::null());
if mapping == ptr::mut_null() {
return Err(ErrCreateFileMappingW(errno()));
}
if errno() as c_int == libc::ERROR_ALREADY_EXISTS {
return Err(ErrAlreadyExists);
}
let r = libc::MapViewOfFile(mapping,
dwDesiredAccess,
((len as u64) >> 32) as DWORD,
(offset & 0xffff_ffff) as DWORD,
0);
match r as uint {
0 => Err(ErrMapViewOfFile(errno())),
_ => Ok(MemoryMap {
data: r as *mut u8,
len: len,
kind: MapFile(mapping as *u8)
})
}
}
}
}
/// Granularity of MapAddr() and MapOffset() parameter values.
/// This may be greater than the value returned by page_size().
pub fn granularity() -> uint {
unsafe {
let mut info = libc::SYSTEM_INFO::new();
libc::GetSystemInfo(&mut info);
return info.dwAllocationGranularity as uint;
}
}
}
#[cfg(windows)]
impl Drop for MemoryMap {
/// Unmap the mapping. Fails the task if any of `VirtualFree`,
/// `UnmapViewOfFile`, or `CloseHandle` fail.
fn drop(&mut self) {
use libc::types::os::arch::extra::{LPCVOID, HANDLE};
use libc::consts::os::extra::FALSE;
if self.len == 0 { return }
unsafe {
match self.kind {
MapVirtual => {
if libc::VirtualFree(self.data as *mut c_void, 0,
libc::MEM_RELEASE) == 0 {
println!("VirtualFree failed: {}", errno());
}
},
MapFile(mapping) => {
if libc::UnmapViewOfFile(self.data as LPCVOID) == FALSE {
println!("UnmapViewOfFile failed: {}", errno());
}
if libc::CloseHandle(mapping as HANDLE) == FALSE {
println!("CloseHandle failed: {}", errno());
}
}
}
}
}
}
/// Various useful system-specific constants.
pub mod consts {
#[cfg(unix)]
pub use os::consts::unix::*;
#[cfg(windows)]
pub use os::consts::windows::*;
#[cfg(target_os = "macos")]
pub use os::consts::macos::*;
#[cfg(target_os = "freebsd")]
pub use os::consts::freebsd::*;
#[cfg(target_os = "linux")]
pub use os::consts::linux::*;
#[cfg(target_os = "android")]
pub use os::consts::android::*;
#[cfg(target_os = "win32")]
pub use os::consts::win32::*;
#[cfg(target_arch = "x86")]
pub use os::consts::x86::*;
#[cfg(target_arch = "x86_64")]
pub use os::consts::x86_64::*;
#[cfg(target_arch = "arm")]
pub use os::consts::arm::*;
#[cfg(target_arch = "mips")]
pub use os::consts::mips::*;
/// Constants for Unix systems.
pub mod unix {
/// A string describing the family that this operating system belongs
/// to: in this case, `unix`.
pub static FAMILY: &'static str = "unix";
}
/// Constants for Windows systems.
pub mod windows {
/// A string describing the family that this operating system belongs
/// to: in this case, `windows`.
pub static FAMILY: &'static str = "windows";
}
/// Constants for Mac OS systems.
pub mod macos {
/// A string describing the specific operating system in use: in this
/// case, `macos`.
pub static SYSNAME: &'static str = "macos";
/// Specifies the filename prefix used for shared libraries on this
/// platform: in this case, `lib`.
pub static DLL_PREFIX: &'static str = "lib";
/// Specifies the filename suffix used for shared libraries on this
/// platform: in this case, `.dylib`.
pub static DLL_SUFFIX: &'static str = ".dylib";
/// Specifies the file extension used for shared libraries on this
/// platform that goes after the dot: in this case, `dylib`.
pub static DLL_EXTENSION: &'static str = "dylib";
/// Specifies the filename suffix used for executable binaries on this
/// platform: in this case, the empty string.
pub static EXE_SUFFIX: &'static str = "";
/// Specifies the file extension, if any, used for executable binaries
/// on this platform: in this case, the empty string.
pub static EXE_EXTENSION: &'static str = "";
}
/// Constants for FreeBSD systems.
pub mod freebsd {
/// A string describing the specific operating system in use: in this
/// case, `freebsd`.
pub static SYSNAME: &'static str = "freebsd";
/// Specifies the filename prefix used for shared libraries on this
/// platform: in this case, `lib`.
pub static DLL_PREFIX: &'static str = "lib";
/// Specifies the filename suffix used for shared libraries on this
/// platform: in this case, `.so`.
pub static DLL_SUFFIX: &'static str = ".so";
/// Specifies the file extension used for shared libraries on this
/// platform that goes after the dot: in this case, `so`.
pub static DLL_EXTENSION: &'static str = "so";
/// Specifies the filename suffix used for executable binaries on this
/// platform: in this case, the empty string.
pub static EXE_SUFFIX: &'static str = "";
/// Specifies the file extension, if any, used for executable binaries
/// on this platform: in this case, the empty string.
pub static EXE_EXTENSION: &'static str = "";
}
/// Constants for GNU/Linux systems.
pub mod linux {
/// A string describing the specific operating system in use: in this
/// case, `linux`.
pub static SYSNAME: &'static str = "linux";
/// Specifies the filename prefix used for shared libraries on this
/// platform: in this case, `lib`.
pub static DLL_PREFIX: &'static str = "lib";
/// Specifies the filename suffix used for shared libraries on this
/// platform: in this case, `.so`.
pub static DLL_SUFFIX: &'static str = ".so";
/// Specifies the file extension used for shared libraries on this
/// platform that goes after the dot: in this case, `so`.
pub static DLL_EXTENSION: &'static str = "so";
/// Specifies the filename suffix used for executable binaries on this
/// platform: in this case, the empty string.
pub static EXE_SUFFIX: &'static str = "";
/// Specifies the file extension, if any, used for executable binaries
/// on this platform: in this case, the empty string.
pub static EXE_EXTENSION: &'static str = "";
}
/// Constants for Android systems.
pub mod android {
/// A string describing the specific operating system in use: in this
/// case, `android`.
pub static SYSNAME: &'static str = "android";
/// Specifies the filename prefix used for shared libraries on this
/// platform: in this case, `lib`.
pub static DLL_PREFIX: &'static str = "lib";
/// Specifies the filename suffix used for shared libraries on this
/// platform: in this case, `.so`.
pub static DLL_SUFFIX: &'static str = ".so";
/// Specifies the file extension used for shared libraries on this
/// platform that goes after the dot: in this case, `so`.
pub static DLL_EXTENSION: &'static str = "so";
/// Specifies the filename suffix used for executable binaries on this
/// platform: in this case, the empty string.
pub static EXE_SUFFIX: &'static str = "";
/// Specifies the file extension, if any, used for executable binaries
/// on this platform: in this case, the empty string.
pub static EXE_EXTENSION: &'static str = "";
}
/// Constants for 32-bit or 64-bit Windows systems.
pub mod win32 {
/// A string describing the specific operating system in use: in this
/// case, `win32`.
pub static SYSNAME: &'static str = "win32";
/// Specifies the filename prefix used for shared libraries on this
/// platform: in this case, the empty string.
pub static DLL_PREFIX: &'static str = "";
/// Specifies the filename suffix used for shared libraries on this
/// platform: in this case, `.dll`.
pub static DLL_SUFFIX: &'static str = ".dll";
/// Specifies the file extension used for shared libraries on this
/// platform that goes after the dot: in this case, `dll`.
pub static DLL_EXTENSION: &'static str = "dll";
/// Specifies the filename suffix used for executable binaries on this
/// platform: in this case, `.exe`.
pub static EXE_SUFFIX: &'static str = ".exe";
/// Specifies the file extension, if any, used for executable binaries
/// on this platform: in this case, `exe`.
pub static EXE_EXTENSION: &'static str = "exe";
}
/// Constants for Intel Architecture-32 (x86) architectures.
pub mod x86 {
/// A string describing the architecture in use: in this case, `x86`.
pub static ARCH: &'static str = "x86";
}
/// Constants for Intel 64/AMD64 (x86-64) architectures.
pub mod x86_64 {
/// A string describing the architecture in use: in this case,
/// `x86_64`.
pub static ARCH: &'static str = "x86_64";
}
/// Constants for Advanced RISC Machine (ARM) architectures.
pub mod arm {
/// A string describing the architecture in use: in this case, `ARM`.
pub static ARCH: &'static str = "arm";
}
/// Constants for Microprocessor without Interlocked Pipeline Stages
/// (MIPS) architectures.
pub mod mips {
/// A string describing the architecture in use: in this case, `MIPS`.
pub static ARCH: &'static str = "mips";
}
}
#[cfg(test)]
mod tests {
use prelude::*;
use c_str::ToCStr;
use option;
use os::{env, getcwd, getenv, make_absolute, args};
use os::{setenv, unsetenv};
use os;
use rand::Rng;
use rand;
#[test]
pub fn last_os_error() {
debug!("{}", os::last_os_error());
}
#[test]
pub fn test_args() {
let a = args();
assert!(a.len() >= 1);
}
fn make_rand_name() -> ~str {
let mut rng = rand::task_rng();
let n = ~"TEST" + rng.gen_ascii_str(10u);
assert!(getenv(n).is_none());
n
}
#[test]
fn test_setenv() {
let n = make_rand_name();
setenv(n, "VALUE");
assert_eq!(getenv(n), option::Some(~"VALUE"));
}
#[test]
fn test_unsetenv() {
let n = make_rand_name();
setenv(n, "VALUE");
unsetenv(n);
assert_eq!(getenv(n), option::None);
}
#[test]
#[ignore]
fn test_setenv_overwrite() {
let n = make_rand_name();
setenv(n, "1");
setenv(n, "2");
assert_eq!(getenv(n), option::Some(~"2"));
setenv(n, "");
assert_eq!(getenv(n), option::Some(~""));
}
// Windows GetEnvironmentVariable requires some extra work to make sure
// the buffer the variable is copied into is the right size
#[test]
#[ignore]
fn test_getenv_big() {
let mut s = ~"";
let mut i = 0;
while i < 100 {
s = s + "aaaaaaaaaa";
i += 1;
}
let n = make_rand_name();
setenv(n, s);
debug!("{}", s.clone());
assert_eq!(getenv(n), option::Some(s));
}
#[test]
fn test_self_exe_name() {
let path = os::self_exe_name();
assert!(path.is_some());
let path = path.unwrap();
debug!("{:?}", path.clone());
// Hard to test this function
assert!(path.is_absolute());
}
#[test]
fn test_self_exe_path() {
let path = os::self_exe_path();
assert!(path.is_some());
let path = path.unwrap();
debug!("{:?}", path.clone());
// Hard to test this function
assert!(path.is_absolute());
}
#[test]
#[ignore]
fn test_env_getenv() {
let e = env();
assert!(e.len() > 0u);
for p in e.iter() {
let (n, v) = (*p).clone();
debug!("{:?}", n.clone());
let v2 = getenv(n);
// MingW seems to set some funky environment variables like
// "=C:=C:\MinGW\msys\1.0\bin" and "!::=::\" that are returned
// from env() but not visible from getenv().
assert!(v2.is_none() || v2 == option::Some(v));
}
}
#[test]
fn test_env_set_get_huge() {
let n = make_rand_name();
let s = "x".repeat(10000);
setenv(n, s);
assert_eq!(getenv(n), Some(s));
unsetenv(n);
assert_eq!(getenv(n), None);
}
#[test]
fn test_env_setenv() {
let n = make_rand_name();
let mut e = env();
setenv(n, "VALUE");
assert!(!e.contains(&(n.clone(), ~"VALUE")));
e = env();
assert!(e.contains(&(n, ~"VALUE")));
}
#[test]
fn test() {
assert!((!Path::new("test-path").is_absolute()));
let cwd = getcwd();
debug!("Current working directory: {}", cwd.display());
debug!("{:?}", make_absolute(&Path::new("test-path")));
debug!("{:?}", make_absolute(&Path::new("/usr/bin")));
}
#[test]
#[cfg(unix)]
fn homedir() {
let oldhome = getenv("HOME");
setenv("HOME", "/home/MountainView");
assert!(os::homedir() == Some(Path::new("/home/MountainView")));
setenv("HOME", "");
assert!(os::homedir().is_none());
for s in oldhome.iter() { setenv("HOME", *s) }
}
#[test]
#[cfg(windows)]
fn homedir() {
let oldhome = getenv("HOME");
let olduserprofile = getenv("USERPROFILE");
setenv("HOME", "");
setenv("USERPROFILE", "");
assert!(os::homedir().is_none());
setenv("HOME", "/home/MountainView");
assert!(os::homedir() == Some(Path::new("/home/MountainView")));
setenv("HOME", "");
setenv("USERPROFILE", "/home/MountainView");
assert!(os::homedir() == Some(Path::new("/home/MountainView")));
setenv("HOME", "/home/MountainView");
setenv("USERPROFILE", "/home/PaloAlto");
assert!(os::homedir() == Some(Path::new("/home/MountainView")));
for s in oldhome.iter() { setenv("HOME", *s) }
for s in olduserprofile.iter() { setenv("USERPROFILE", *s) }
}
#[test]
fn memory_map_rw() {
use result::{Ok, Err};
let chunk = match os::MemoryMap::new(16, [
os::MapReadable,
os::MapWritable
]) {
Ok(chunk) => chunk,
Err(msg) => fail!("{}", msg)
};
assert!(chunk.len >= 16);
unsafe {
*chunk.data = 0xBE;
assert!(*chunk.data == 0xBE);
}
}
#[test]
fn memory_map_file() {
use result::{Ok, Err};
use os::*;
use libc::*;
use io::fs;
#[cfg(unix)]
fn lseek_(fd: c_int, size: uint) {
unsafe {
assert!(lseek(fd, size as off_t, SEEK_SET) == size as off_t);
}
}
#[cfg(windows)]
fn lseek_(fd: c_int, size: uint) {
unsafe {
assert!(lseek(fd, size as c_long, SEEK_SET) == size as c_long);
}
}
let mut path = tmpdir();
path.push("mmap_file.tmp");
let size = MemoryMap::granularity() * 2;
let fd = unsafe {
let fd = path.with_c_str(|path| {
open(path, O_CREAT | O_RDWR | O_TRUNC, S_IRUSR | S_IWUSR)
});
lseek_(fd, size);
"x".with_c_str(|x| assert!(write(fd, x as *c_void, 1) == 1));
fd
};
let chunk = match MemoryMap::new(size / 2, [
MapReadable,
MapWritable,
MapFd(fd),
MapOffset(size / 2)
]) {
Ok(chunk) => chunk,
Err(msg) => fail!("{}", msg)
};
assert!(chunk.len > 0);
unsafe {
*chunk.data = 0xbe;
assert!(*chunk.data == 0xbe);
close(fd);
}
drop(chunk);
fs::unlink(&path).unwrap();
}
// More recursive_mkdir tests are in extra::tempfile
}<|fim▁end|> | extern { |
<|file_name|>cli.py<|end_file_name|><|fim▁begin|>"""
Command line interface for cobbler.
Copyright 2006-2009, Red Hat, Inc and Others
Michael DeHaan <michael.dehaan AT gmail>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301 USA
"""
import sys
import xmlrpclib
import traceback
import optparse
import exceptions
import time
import os
import utils
import module_loader
import item_distro
import item_profile
import item_system
import item_repo
import item_image
import item_mgmtclass
import item_package
import item_file
import settings
OBJECT_ACTIONS_MAP = {
"distro" : "add copy edit find list remove rename report".split(" "),
"profile" : "add copy dumpvars edit find getks list remove rename report".split(" "),
"system" : "add copy dumpvars edit find getks list remove rename report poweron poweroff powerstatus reboot".split(" "),
"image" : "add copy edit find list remove rename report".split(" "),
"repo" : "add copy edit find list remove rename report".split(" "),
"mgmtclass" : "add copy edit find list remove rename report".split(" "),
"package" : "add copy edit find list remove rename report".split(" "),
"file" : "add copy edit find list remove rename report".split(" "),
"setting" : "edit report".split(" "),
"signature" : "reload report update".split(" "),
}
OBJECT_TYPES = OBJECT_ACTIONS_MAP.keys()
# would like to use from_iterable here, but have to support python 2.4
OBJECT_ACTIONS = []
for actions in OBJECT_ACTIONS_MAP.values():
OBJECT_ACTIONS += actions
DIRECT_ACTIONS = "aclsetup buildiso import list replicate report reposync sync validateks version".split()
####################################################
def report_items(remote, otype):
if otype == "setting":
items = remote.get_settings()
keys = items.keys()
keys.sort()
for key in keys:
item = {'name':key, 'value':items[key]}
report_item(remote,otype,item=item)
elif otype == "signature":
items = remote.get_signatures()
total_breeds = 0
total_sigs = 0
if items.has_key("breeds"):
print "Currently loaded signatures:"
bkeys = items["breeds"].keys()
bkeys.sort()
total_breeds = len(bkeys)
for breed in bkeys:
print "%s:" % breed
oskeys = items["breeds"][breed].keys()
oskeys.sort()
if len(oskeys) > 0:
total_sigs += len(oskeys)
for osversion in oskeys:
print "\t%s" % osversion
else:
print "\t(none)"
print "\n%d breeds with %d total signatures loaded" % (total_breeds,total_sigs)
else:
print "No breeds found in the signature, a signature update is recommended"
sys.exit(1)
else:
items = remote.get_items(otype)
for x in items:
report_item(remote,otype,item=x)<|fim▁hole|> if otype == "setting":
cur_settings = remote.get_settings()
try:
item = {'name':name, 'value':cur_settings[name]}
except:
print "Setting not found: %s" % name
sys.exit(1)
elif otype == "signature":
items = remote.get_signatures()
total_sigs = 0
if items.has_key("breeds"):
print "Currently loaded signatures:"
if items["breeds"].has_key(name):
print "%s:" % name
oskeys = items["breeds"][name].keys()
oskeys.sort()
if len(oskeys) > 0:
total_sigs += len(oskeys)
for osversion in oskeys:
print "\t%s" % osversion
else:
print "\t(none)"
print "\nBreed '%s' has %d total signatures" % (name,total_sigs)
else:
print "No breed named '%s' found" % name
sys.exit(1)
else:
print "No breeds found in the signature, a signature update is recommended"
sys.exit(1)
return
else:
item = remote.get_item(otype, name)
if item == "~":
print "No %s found: %s" % (otype, name)
sys.exit(1)
if otype == "distro":
data = utils.printable_from_fields(item, item_distro.FIELDS)
elif otype == "profile":
data = utils.printable_from_fields(item, item_profile.FIELDS)
elif otype == "system":
data = utils.printable_from_fields(item, item_system.FIELDS)
elif otype == "repo":
data = utils.printable_from_fields(item, item_repo.FIELDS)
elif otype == "image":
data = utils.printable_from_fields(item, item_image.FIELDS)
elif otype == "mgmtclass":
data = utils.printable_from_fields(item,item_mgmtclass.FIELDS)
elif otype == "package":
data = utils.printable_from_fields(item,item_package.FIELDS)
elif otype == "file":
data = utils.printable_from_fields(item,item_file.FIELDS)
elif otype == "setting":
data = "%-40s: %s" % (item['name'],item['value'])
print data
def list_items(remote,otype):
items = remote.get_item_names(otype)
items.sort()
for x in items:
print " %s" % x
def n2s(data):
"""
Return spaces for None
"""
if data is None:
return ""
return data
def opt(options, k, defval=""):
"""
Returns an option from an Optparse values instance
"""
try:
data = getattr(options, k)
except:
# FIXME: debug only
# traceback.print_exc()
return defval
return n2s(data)
class BootCLI:
def __init__(self):
# Load server ip and ports from local config
self.url_cobbler_api = utils.local_get_cobbler_api_url()
self.url_cobbler_xmlrpc = utils.local_get_cobbler_xmlrpc_url()
# FIXME: allow specifying other endpoints, and user+pass
self.parser = optparse.OptionParser()
self.remote = xmlrpclib.Server(self.url_cobbler_api)
self.shared_secret = utils.get_shared_secret()
def start_task(self, name, options):
options = utils.strip_none(vars(options), omit_none=True)
fn = getattr(self.remote, "background_%s" % name)
return fn(options, self.token)
def get_object_type(self, args):
"""
If this is a CLI command about an object type, e.g. "cobbler distro add", return the type, like "distro"
"""
if len(args) < 2:
return None
elif args[1] in OBJECT_TYPES:
return args[1]
return None
def get_object_action(self, object_type, args):
"""
If this is a CLI command about an object type, e.g. "cobbler distro add", return the action, like "add"
"""
if object_type is None or len(args) < 3:
return None
if args[2] in OBJECT_ACTIONS_MAP[object_type]:
return args[2]
return None
def get_direct_action(self, object_type, args):
"""
If this is a general command, e.g. "cobbler hardlink", return the action, like "hardlink"
"""
if object_type is not None:
return None
elif len(args) < 2:
return None
elif args[1] == "--help":
return None
elif args[1] == "--version":
return "version"
else:
return args[1]
def check_setup(self):
"""
Detect permissions and service accessibility problems and provide
nicer error messages for them.
"""
s = xmlrpclib.Server(self.url_cobbler_xmlrpc)
try:
s.ping()
except:
print >> sys.stderr, "cobblerd does not appear to be running/accessible"
sys.exit(411)
s = xmlrpclib.Server(self.url_cobbler_api)
try:
s.ping()
except:
print >> sys.stderr, "httpd does not appear to be running and proxying cobbler, or SELinux is in the way. Original traceback:"
traceback.print_exc()
sys.exit(411)
if not os.path.exists("/var/lib/cobbler/web.ss"):
print >> sys.stderr, "Missing login credentials file. Has cobblerd failed to start?"
sys.exit(411)
if not os.access("/var/lib/cobbler/web.ss", os.R_OK):
print >> sys.stderr, "User cannot run command line, need read access to /var/lib/cobbler/web.ss"
sys.exit(411)
def run(self, args):
"""
Process the command line and do what the user asks.
"""
self.token = self.remote.login("", self.shared_secret)
object_type = self.get_object_type(args)
object_action = self.get_object_action(object_type, args)
direct_action = self.get_direct_action(object_type, args)
try:
if object_type is not None:
if object_action is not None:
self.object_command(object_type, object_action)
else:
self.print_object_help(object_type)
elif direct_action is not None:
self.direct_command(direct_action)
else:
self.print_help()
except xmlrpclib.Fault, err:
if err.faultString.find("cobbler.cexceptions.CX") != -1:
print self.cleanup_fault_string(err.faultString)
else:
print "### ERROR ###"
print "Unexpected remote error, check the server side logs for further info"
print err.faultString
sys.exit(1)
def cleanup_fault_string(self,str):
"""
Make a remote exception nicely readable by humans so it's not evident that is a remote
fault. Users should not have to understand tracebacks.
"""
if str.find(">:") != -1:
(first, rest) = str.split(">:",1)
if rest.startswith("\"") or rest.startswith("\'"):
rest = rest[1:]
if rest.endswith("\"") or rest.endswith("\'"):
rest = rest[:-1]
return rest
else:
return str
def get_fields(self, object_type):
"""
For a given name of an object type, return the FIELDS data structure.
"""
# FIXME: this should be in utils, or is it already?
if object_type == "distro":
return item_distro.FIELDS
elif object_type == "profile":
return item_profile.FIELDS
elif object_type == "system":
return item_system.FIELDS
elif object_type == "repo":
return item_repo.FIELDS
elif object_type == "image":
return item_image.FIELDS
elif object_type == "mgmtclass":
return item_mgmtclass.FIELDS
elif object_type == "package":
return item_package.FIELDS
elif object_type == "file":
return item_file.FIELDS
elif object_type == "setting":
return settings.FIELDS
def object_command(self, object_type, object_action):
"""
Process object-based commands such as "distro add" or "profile rename"
"""
task_id = -1 # if assigned, we must tail the logfile
fields = self.get_fields(object_type)
if object_action in [ "add", "edit", "copy", "rename", "find" ]:
utils.add_options_from_fields(object_type, self.parser, fields, object_action)
elif object_action in [ "list" ]:
pass
elif object_action not in ("reload","update"):
self.parser.add_option("--name", dest="name", help="name of object")
elif object_action == "reload":
self.parser.add_option("--filename", dest="filename", help="filename to load data from")
(options, args) = self.parser.parse_args()
# the first three don't require a name
if object_action == "report":
if options.name is not None:
report_item(self.remote,object_type,None,options.name)
else:
report_items(self.remote,object_type)
elif object_action == "list":
list_items(self.remote, object_type)
elif object_action == "find":
items = self.remote.find_items(object_type, utils.strip_none(vars(options), omit_none=True), "name", False)
for item in items:
print item
elif object_action in OBJECT_ACTIONS:
if opt(options, "name") == "" and object_action not in ("reload","update"):
print "--name is required"
sys.exit(1)
if object_action in [ "add", "edit", "copy", "rename", "remove" ]:
try:
if object_type == "setting":
settings = self.remote.get_settings()
if not settings.get('allow_dynamic_settings',False):
raise RuntimeError("Dynamic settings changes are not enabled. Change the allow_dynamic_settings to 1 and restart cobblerd to enable dynamic settings changes")
elif options.name == 'allow_dynamic_settings':
raise RuntimeError("Cannot modify that setting live")
elif self.remote.modify_setting(options.name,options.value,self.token):
raise RuntimeError("Changing the setting failed")
else:
self.remote.xapi_object_edit(object_type, options.name, object_action, utils.strip_none(vars(options), omit_none=True), self.token)
except xmlrpclib.Fault, (err):
(etype, emsg) = err.faultString.split(":",1)
print emsg[1:-1] # don't print the wrapping quotes
sys.exit(1)
except RuntimeError, (err):
print err.args[0]
sys.exit(1)
elif object_action == "getks":
if object_type == "profile":
data = self.remote.generate_kickstart(options.name,"")
elif object_type == "system":
data = self.remote.generate_kickstart("",options.name)
print data
elif object_action == "dumpvars":
if object_type == "profile":
data = self.remote.get_blended_data(options.name,"")
elif object_type == "system":
data = self.remote.get_blended_data("",options.name)
# FIXME: pretty-printing and sorting here
keys = data.keys()
keys.sort()
for x in keys:
print "%s : %s" % (x, data[x])
elif object_action in [ "poweron", "poweroff", "powerstatus", "reboot" ]:
power={}
power["power"] = object_action.replace("power","")
power["systems"] = [options.name]
task_id = self.remote.background_power_system(power, self.token)
elif object_action == "update":
task_id = self.remote.background_signature_update(utils.strip_none(vars(options),omit_none=True), self.token)
elif object_action == "reload":
filename = opt(options,"filename","/var/lib/cobbler/distro_signatures.json")
if not utils.load_signatures(filename,cache=True):
print "There was an error loading the signature data in %s." % filename
print "Please check the JSON file or run 'cobbler signature update'."
return False
else:
print "Signatures were successfully loaded"
else:
raise exceptions.NotImplementedError()
else:
raise exceptions.NotImplementedError()
# FIXME: add tail/polling code here
if task_id != -1:
self.print_task(task_id)
self.follow_task(task_id)
return True
# BOOKMARK
def direct_command(self, action_name):
"""
Process non-object based commands like "sync" and "hardlink"
"""
task_id = -1 # if assigned, we must tail the logfile
if action_name == "buildiso":
defaultiso = os.path.join(os.getcwd(), "generated.iso")
self.parser.add_option("--iso", dest="iso", default=defaultiso, help="(OPTIONAL) output ISO to this path")
self.parser.add_option("--profiles", dest="profiles", help="(OPTIONAL) use these profiles only")
self.parser.add_option("--systems", dest="systems", help="(OPTIONAL) use these systems only")
self.parser.add_option("--tempdir", dest="buildisodir", help="(OPTIONAL) working directory")
self.parser.add_option("--distro", dest="distro", help="(OPTIONAL) used with --standalone to create a distro-based ISO including all associated profiles/systems")
self.parser.add_option("--standalone", dest="standalone", action="store_true", help="(OPTIONAL) creates a standalone ISO with all required distro files on it")
self.parser.add_option("--source", dest="source", help="(OPTIONAL) used with --standalone to specify a source for the distribution files")
self.parser.add_option("--exclude-dns", dest="exclude_dns", action="store_true", help="(OPTIONAL) prevents addition of name server addresses to the kernel boot options")
self.parser.add_option("--mkisofs-opts", dest="mkisofs_opts", help="(OPTIONAL) extra options for mkisofs")
(options, args) = self.parser.parse_args()
task_id = self.start_task("buildiso",options)
elif action_name == "replicate":
self.parser.add_option("--master", dest="master", help="Cobbler server to replicate from.")
self.parser.add_option("--distros", dest="distro_patterns", help="patterns of distros to replicate")
self.parser.add_option("--profiles", dest="profile_patterns", help="patterns of profiles to replicate")
self.parser.add_option("--systems", dest="system_patterns", help="patterns of systems to replicate")
self.parser.add_option("--repos", dest="repo_patterns", help="patterns of repos to replicate")
self.parser.add_option("--image", dest="image_patterns", help="patterns of images to replicate")
self.parser.add_option("--mgmtclasses", dest="mgmtclass_patterns", help="patterns of mgmtclasses to replicate")
self.parser.add_option("--packages", dest="package_patterns", help="patterns of packages to replicate")
self.parser.add_option("--files", dest="file_patterns", help="patterns of files to replicate")
self.parser.add_option("--omit-data", dest="omit_data", action="store_true", help="do not rsync data")
self.parser.add_option("--sync-all", dest="sync_all", action="store_true", help="sync all data")
self.parser.add_option("--prune", dest="prune", action="store_true", help="remove objects (of all types) not found on the master")
(options, args) = self.parser.parse_args()
task_id = self.start_task("replicate",options)
elif action_name == "aclsetup":
self.parser.add_option("--adduser", dest="adduser", help="give acls to this user")
self.parser.add_option("--addgroup", dest="addgroup", help="give acls to this group")
self.parser.add_option("--removeuser", dest="removeuser", help="remove acls from this user")
self.parser.add_option("--removegroup", dest="removegroup", help="remove acls from this group")
(options, args) = self.parser.parse_args()
task_id = self.start_task("aclsetup",options)
elif action_name == "version":
version = self.remote.extended_version()
print "Cobbler %s" % version["version"]
print " source: %s, %s" % (version["gitstamp"], version["gitdate"])
print " build time: %s" % version["builddate"]
elif action_name == "hardlink":
(options, args) = self.parser.parse_args()
task_id = self.start_task("hardlink",options)
elif action_name == "reserialize":
(options, args) = self.parser.parse_args()
task_id = self.start_task("reserialize",options)
elif action_name == "status":
(options, args) = self.parser.parse_args()
print self.remote.get_status("text",self.token)
elif action_name == "validateks":
(options, args) = self.parser.parse_args()
task_id = self.start_task("validateks",options)
elif action_name == "get-loaders":
self.parser.add_option("--force", dest="force", action="store_true", help="overwrite any existing content in /var/lib/cobbler/loaders")
(options, args) = self.parser.parse_args()
task_id = self.start_task("dlcontent",options)
elif action_name == "import":
self.parser.add_option("--arch", dest="arch", help="OS architecture being imported")
self.parser.add_option("--breed", dest="breed", help="the breed being imported")
self.parser.add_option("--os-version", dest="os_version", help="the version being imported")
self.parser.add_option("--path", dest="path", help="local path or rsync location")
self.parser.add_option("--name", dest="name", help="name, ex 'RHEL-5'")
self.parser.add_option("--available-as", dest="available_as", help="tree is here, don't mirror")
self.parser.add_option("--kickstart", dest="kickstart_file", help="assign this kickstart file")
self.parser.add_option("--rsync-flags", dest="rsync_flags", help="pass additional flags to rsync")
(options, args) = self.parser.parse_args()
task_id = self.start_task("import",options)
elif action_name == "reposync":
self.parser.add_option("--only", dest="only", help="update only this repository name")
self.parser.add_option("--tries", dest="tries", help="try each repo this many times", default=1)
self.parser.add_option("--no-fail", dest="nofail", help="don't stop reposyncing if a failure occurs", action="store_true")
(options, args) = self.parser.parse_args()
task_id = self.start_task("reposync",options)
elif action_name == "aclsetup":
(options, args) = self.parser.parse_args()
# FIXME: missing options, add them here
task_id = self.start_task("aclsetup",options)
elif action_name == "check":
results = self.remote.check(self.token)
ct = 0
if len(results) > 0:
print "The following are potential configuration items that you may want to fix:\n"
for r in results:
ct = ct + 1
print "%s : %s" % (ct, r)
print "\nRestart cobblerd and then run 'cobbler sync' to apply changes."
else:
print "No configuration problems found. All systems go."
elif action_name == "sync":
(options, args) = self.parser.parse_args()
self.parser.add_option("--verbose", dest="verbose", action="store_true", help="run sync with more output")
task_id = self.start_task("sync",options)
elif action_name == "report":
(options, args) = self.parser.parse_args()
print "distros:\n=========="
report_items(self.remote,"distro")
print "\nprofiles:\n=========="
report_items(self.remote,"profile")
print "\nsystems:\n=========="
report_items(self.remote,"system")
print "\nrepos:\n=========="
report_items(self.remote,"repo")
print "\nimages:\n=========="
report_items(self.remote,"image")
print "\nmgmtclasses:\n=========="
report_items(self.remote,"mgmtclass")
print "\npackages:\n=========="
report_items(self.remote,"package")
print "\nfiles:\n=========="
report_items(self.remote,"file")
elif action_name == "list":
# no tree view like 1.6? This is more efficient remotely
# for large configs and prevents xfering the whole config
# though we could consider that...
(options, args) = self.parser.parse_args()
print "distros:"
list_items(self.remote,"distro")
print "\nprofiles:"
list_items(self.remote,"profile")
print "\nsystems:"
list_items(self.remote,"system")
print "\nrepos:"
list_items(self.remote,"repo")
print "\nimages:"
list_items(self.remote,"image")
print "\nmgmtclasses:"
list_items(self.remote,"mgmtclass")
print "\npackages:"
list_items(self.remote,"package")
print "\nfiles:"
list_items(self.remote,"file")
else:
print "No such command: %s" % action_name
sys.exit(1)
# FIXME: run here
# FIXME: add tail/polling code here
if task_id != -1:
self.print_task(task_id)
self.follow_task(task_id)
return True
def print_task(self, task_id):
print "task started: %s" % task_id
events = self.remote.get_events()
(etime, name, status, who_viewed) = events[task_id]
atime = time.asctime(time.localtime(etime))
print "task started (id=%s, time=%s)" % (name, atime)
def follow_task(self, task_id):
logfile = "/var/log/cobbler/tasks/%s.log" % task_id
# adapted from: http://code.activestate.com/recipes/157035/
file = open(logfile,'r')
#Find the size of the file and move to the end
#st_results = os.stat(filename)
#st_size = st_results[6]
#file.seek(st_size)
while 1:
where = file.tell()
line = file.readline()
if line.find("### TASK COMPLETE ###") != -1:
print "*** TASK COMPLETE ***"
sys.exit(0)
if line.find("### TASK FAILED ###") != -1:
print "!!! TASK FAILED !!!"
sys.exit(1)
if not line:
time.sleep(1)
file.seek(where)
else:
if line.find(" | "):
line = line.split(" | ")[-1]
print line, # already has newline
def print_object_help(self, object_type):
"""
Prints the subcommands for a given object, e.g. "cobbler distro --help"
"""
commands = OBJECT_ACTIONS_MAP[object_type]
commands.sort()
print "usage\n====="
for c in commands:
print "cobbler %s %s" % (object_type, c)
sys.exit(2)
def print_help(self):
"""
Prints general-top level help, e.g. "cobbler --help" or "cobbler" or "cobbler command-does-not-exist"
"""
print "usage\n====="
print "cobbler <distro|profile|system|repo|image|mgmtclass|package|file> ... "
print " [add|edit|copy|getks*|list|remove|rename|report] [options|--help]"
print "cobbler <%s> [options|--help]" % "|".join(DIRECT_ACTIONS)
sys.exit(2)
def main():
"""
CLI entry point
"""
cli = BootCLI()
cli.check_setup()
rc = cli.run(sys.argv)
if rc == True or rc is None:
sys.exit(0)
elif rc == False:
sys.exit(1)
return sys.exit(rc)
if __name__ == "__main__":
main()<|fim▁end|> |
def report_item(remote,otype,item=None,name=None):
if item is None: |
<|file_name|>main.cpp<|end_file_name|><|fim▁begin|>#include <SFML/Graphics.hpp>
#include <SFML/Audio.hpp>
#include <iostream>
int main()
{<|fim▁hole|>
sf::Music sound;
if (!sound.openFromFile("C:/Users/Alpha/Documents/Donguili/bin/Debug/test.ogg"))
return -1; // erreur
sound.play();
while (window.isOpen())
{
sf::Event event;
while (window.pollEvent(event))
{
if (event.type == sf::Event::Closed)
window.close();
}
// Presse de touche
if (sf::Keyboard::isKeyPressed(sf::Keyboard::Space))
{
std::cout << "Musique en pause" << std::endl;
sound.pause();
}
if (sf::Keyboard::isKeyPressed(sf::Keyboard::P))
{
std::cout << "Musique lancé" << std::endl;
sound.play();
}
// Fin Presse touche
window.clear();
window.display();
}
return 0;
}<|fim▁end|> | sf::RenderWindow window(sf::VideoMode(200, 200), "Donguili"); |
<|file_name|>authorization_client.go<|end_file_name|><|fim▁begin|>/*
Copyright The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Code generated by client-gen. DO NOT EDIT.
package v1
import (
v1 "k8s.io/api/authorization/v1"
serializer "k8s.io/apimachinery/pkg/runtime/serializer"
"k8s.io/client-go/kubernetes/scheme"
rest "k8s.io/client-go/rest"
)
type AuthorizationV1Interface interface {
RESTClient() rest.Interface
LocalSubjectAccessReviewsGetter
SelfSubjectAccessReviewsGetter
SelfSubjectRulesReviewsGetter
SubjectAccessReviewsGetter
}
// AuthorizationV1Client is used to interact with features provided by the authorization.k8s.io group.
type AuthorizationV1Client struct {
restClient rest.Interface
}
func (c *AuthorizationV1Client) LocalSubjectAccessReviews(namespace string) LocalSubjectAccessReviewInterface {
return newLocalSubjectAccessReviews(c, namespace)
}
func (c *AuthorizationV1Client) SelfSubjectAccessReviews() SelfSubjectAccessReviewInterface {
return newSelfSubjectAccessReviews(c)
}
func (c *AuthorizationV1Client) SelfSubjectRulesReviews() SelfSubjectRulesReviewInterface {
return newSelfSubjectRulesReviews(c)
}
func (c *AuthorizationV1Client) SubjectAccessReviews() SubjectAccessReviewInterface {
return newSubjectAccessReviews(c)
}
// NewForConfig creates a new AuthorizationV1Client for the given config.
func NewForConfig(c *rest.Config) (*AuthorizationV1Client, error) {
config := *c
if err := setConfigDefaults(&config); err != nil {
return nil, err
}
client, err := rest.RESTClientFor(&config)
if err != nil {
return nil, err<|fim▁hole|>}
// NewForConfigOrDie creates a new AuthorizationV1Client for the given config and
// panics if there is an error in the config.
func NewForConfigOrDie(c *rest.Config) *AuthorizationV1Client {
client, err := NewForConfig(c)
if err != nil {
panic(err)
}
return client
}
// New creates a new AuthorizationV1Client for the given RESTClient.
func New(c rest.Interface) *AuthorizationV1Client {
return &AuthorizationV1Client{c}
}
func setConfigDefaults(config *rest.Config) error {
gv := v1.SchemeGroupVersion
config.GroupVersion = &gv
config.APIPath = "/apis"
config.NegotiatedSerializer = serializer.DirectCodecFactory{CodecFactory: scheme.Codecs}
if config.UserAgent == "" {
config.UserAgent = rest.DefaultKubernetesUserAgent()
}
return nil
}
// RESTClient returns a RESTClient that is used to communicate
// with API server by this client implementation.
func (c *AuthorizationV1Client) RESTClient() rest.Interface {
if c == nil {
return nil
}
return c.restClient
}<|fim▁end|> | }
return &AuthorizationV1Client{client}, nil |
<|file_name|>just_detach_bem.py<|end_file_name|><|fim▁begin|>import lms_code.lib.rep2 as rep2
from lms_code.analysis.run_bem import bemify, boundary_conditions,\
assemble, constrain, solve, evaluate_surface_disp
from lms_code.analysis.simplified_bem import create_surface_mesh, \
set_params
from codim1.core import simple_line_mesh, combine_meshes, ray_mesh
def create_fault_mesh(d):
top_fault_vert = [0, -1e9]
top = d['intersection_pt']
joint = [4.20012e5 + 1.6, -2.006e4 - 5]
bottom = [3.09134e5 + 1.1, -2.3376e4 - 3]
detach = simple_line_mesh(d['fault_elements'], bottom, joint)<|fim▁hole|>
if __name__ == "__main__":
d = dict()
set_params(d)
create_fault_mesh(d)
create_surface_mesh(d)
bemify(d)
boundary_conditions(d)
assemble(d)
# constrain(d)
solve(d)
evaluate_surface_disp(d)
rep2.save("bem_just_detach", d)<|fim▁end|> | d['fault_mesh'] = detach |
<|file_name|>config.py<|end_file_name|><|fim▁begin|>import os
import sys
import imp
import logging
from collections import namedtuple
"""
Objects used to configure Glue at runtime.
"""
__all__ = ['Registry', 'SettingRegistry', 'ExporterRegistry',
'ColormapRegistry', 'DataFactoryRegistry', 'QtClientRegistry',
'LinkFunctionRegistry', 'LinkHelperRegistry',
'ProfileFitterRegistry',
'qt_client', 'data_factory', 'link_function', 'link_helper',
'colormaps',
'exporters', 'settings', 'fit_plugin']
class Registry(object):
"""Container to hold groups of objects or settings.
Registry instances are used by Glue to track objects
used for various tasks like data linking, widget creation, etc.
They have the following properties:
- A `members` property, which lists each item in the registry
- A `default_members` function, which can be overridden to lazily
initialize the members list
- A call interface, allowing the instance to be used as a decorator
for users to add new items to the registry in their config files
"""
def __init__(self):
self._members = []
self._loaded = False
@property
def members(self):
""" A list of the members in the registry.
The return value is a list. The contents of the list
are specified in each subclass"""
if not self._loaded:
self._members = self.default_members() + self._members
self._loaded = True
return self._members
def default_members(self):
"""The member items provided by default. These are put in this
method so that code is only imported when needed"""
return []
def add(self, value):
""" Add a new item to the registry """
self._members.append(value)
def __iter__(self):
return iter(self.members)
def __len__(self):
return len(self.members)
def __contains__(self, value):
return value in self.members
def __call__(self, arg):
"""This is provided so that registry instances can be used
as decorators. The decorators should add the decorated
code object to the registry, and return the original function"""
self.add(arg)
return arg
class SettingRegistry(Registry):
"""Stores key/value settings that code can use to customize Glue
Each member is a tuple of 3 items:
- key: the setting name [str]
- value: the default setting [object]
- validator: A function which tests whether the input is a valid value,
and raises a ValueError if invalid. On valid input,
returns the (possibly sanitized) setting value.
"""
def add(self, key, value, validator=str):
self.members.append((key, value, validator))
class ExporterRegistry(Registry):
"""Stores functions which can export an applocation to an output file
The members property is a list of exporters, each represented
as a (label, save_function, can_save_function, outmode) tuple.
save_function takes an (application, path) as input, and saves
the session
can_save_function takes an application as input, and raises an
exception if saving this session is not possible
outmode is a string, with one of 3 values:
'file': indicates that exporter creates a file
'directory': exporter creates a directory
'label': exporter doesn't write to disk, but needs a label
"""
def default_members(self):
return []
def add(self, label, exporter, checker, outmode='file'):
"""
Add a new exporter
:param label: Short label for the exporter
:type label: str
:param exporter: exporter function
:type exporter: function(application, path)
:param checker: function that checks if save is possible
:type exporter: function(application)
``exporter`` should raise an exception if export isn't possible.
:param outmode: What kind of output is created?
:type outmode: str ('file' | 'directory' | 'label')
"""
self.members.append((label, exporter, checker, outmode))
class ColormapRegistry(Registry):
"""Stores colormaps for the Image Viewer. The members property is
a list of colormaps, each represented as a [name,cmap] pair.
"""
def default_members(self):
import matplotlib.cm as cm
members = []
members.append(['Gray', cm.gray])
members.append(['Purple-Blue', cm.PuBu])
members.append(['Yellow-Green-Blue', cm.YlGnBu])
members.append(['Yellow-Orange-Red', cm.YlOrRd])
members.append(['Red-Purple', cm.RdPu])
members.append(['Blue-Green', cm.BuGn])
members.append(['Hot', cm.hot])
members.append(['Red-Blue', cm.RdBu])
members.append(['Red-Yellow-Blue', cm.RdYlBu])
members.append(['Purple-Orange', cm.PuOr])
members.append(['Purple-Green', cm.PRGn])
return members
def add(self, label, cmap):
"""
Add colormap *cmap* with label *label*.
"""
self.members.append([label, cmap])
class DataFactoryRegistry(Registry):
"""Stores data factories. Data factories take filenames as input,
and return :class:`~glue.core.data.Data` instances
The members property returns a list of (function, label, identifier)
namedtuples:
- Function is the factory that creates the data object
- label is a short human-readable description of the factory
- identifier is a function that takes ``(filename, **kwargs)`` as input
and returns True if the factory can open the file
New data factories can be registered via::
@data_factory('label_name', identifier, default='txt')
def new_factory(file_name):
...
This has the additional side-effect of associating
this this factory with filenames ending in ``txt`` by default
"""
item = namedtuple('DataFactory', 'function label identifier')
def default_members(self):
from .core.data_factories import __factories__
return [self.item(f, f.label, f.identifier) for f in __factories__]
def __call__(self, label, identifier, default=''):
from .core.data_factories import set_default_factory
def adder(func):
set_default_factory(default, func)
self.add(self.item(func, label, identifier))
return func
return adder
class QtClientRegistry(Registry):
"""Stores QT widgets to visualize data.
The members property is a list of Qt widget classes
New widgets can be registered via::
@qt_client
class CustomWidget(QMainWindow):
...
"""
def default_members(self):
try:
from .qt.widgets.scatter_widget import ScatterWidget
from .qt.widgets.image_widget import ImageWidget
from .qt.widgets.histogram_widget import HistogramWidget
return [ScatterWidget, ImageWidget, HistogramWidget]
except ImportError:
logging.getLogger(__name__).warning(
"could not import glue.qt in ConfigObject")
return []
class LinkFunctionRegistry(Registry):
"""Stores functions to convert between quantities
The members properety is a list of (function, info_string,
output_labels) namedtuples. `info_string` is describes what the
function does. `output_labels` is a list of names for each output.
New link functions can be registered via
@link_function(info="maps degrees to arcseconds",
output_labels=['arcsec'])
def degrees2arcsec(degrees):
return degress * 3600
Link functions are expected to receive and return numpy arrays
"""
item = namedtuple('LinkFunction', 'function info output_labels')
def default_members(self):
from .core import link_helpers
return list(self.item(l, "", l.output_args)
for l in link_helpers.__LINK_FUNCTIONS__)
def __call__(self, info="", output_labels=None):
out = output_labels or []
def adder(func):
self.add(self.item(func, info, out))
return func
return adder
class LinkHelperRegistry(Registry):
"""Stores helper objects that compute many ComponentLinks at once
The members property is a list of (object, info_string,
input_labels) tuples. `Object` is the link helper. `info_string`
describes what `object` does. `input_labels` is a list labeling
the inputs.
Each link helper takes a list of ComponentIDs as inputs, and
returns an iterable object (e.g. list) of ComponentLinks.
New helpers can be registered via
@link_helper('Links degrees and arcseconds in both directions',
['degree', 'arcsecond'])
def new_helper(degree, arcsecond):
return [ComponentLink([degree], arcsecond, using=lambda d: d*3600),
ComponentLink([arcsecond], degree, using=lambda a: a/3600)]
"""
item = namedtuple('LinkHelper', 'helper info input_labels')
def default_members(self):
from .core.link_helpers import __LINK_HELPERS__ as helpers
return list(self.item(l, l.info_text, l.input_args)
for l in helpers)
def __call__(self, info, input_labels):
def adder(func):
self.add(self.item(func, info, input_labels))
return func
return adder
class ProfileFitterRegistry(Registry):
item = namedtuple('ProfileFitter', 'cls')
def add(self, cls):
"""
Add colormap *cmap* with label *label*.
"""
self.members.append(cls)
def default_members(self):
from .core.fitters import __FITTERS__
return list(__FITTERS__)
qt_client = QtClientRegistry()
data_factory = DataFactoryRegistry()
link_function = LinkFunctionRegistry()
link_helper = LinkHelperRegistry()
colormaps = ColormapRegistry()
exporters = ExporterRegistry()
settings = SettingRegistry()
fit_plugin = ProfileFitterRegistry()
def load_configuration(search_path=None):
''' Find and import a config.py file
Returns:
The module object
Raises:
Exception, if no module was found
'''
search_order = search_path or _default_search_order()
result = imp.new_module('config')
for config_file in search_order:
dir = os.path.dirname(config_file)
try:
sys.path.append(dir)
config = imp.load_source('config', config_file)
result = config
except IOError:
pass
except Exception as e:
raise Exception("Error loading config file %s:\n%s" %
(config_file, e))
finally:
sys.path.remove(dir)
return result
def _default_search_order():
"""<|fim▁hole|> * current working directory
* environ var GLUERC
* HOME/.glue/config.py
* Glue's own default config
"""
search_order = [os.path.join(os.getcwd(), 'config.py')]
if 'GLUERC' in os.environ:
search_order.append(os.environ['GLUERC'])
search_order.append(os.path.expanduser('~/.glue/config.py'))
return search_order[::-1]<|fim▁end|> | The default configuration file search order:
|
<|file_name|>default_logger_test.go<|end_file_name|><|fim▁begin|>package log
import (
"bytes"
"io"
stdlog "log"
"strings"
"sync"
"testing"
"time"
)
// NOTES:
// - Run "go test" to run tests
// - Run "gocov test | gocov report" to report on test converage by file
// - Run "gocov test | gocov annotate -" to report on all code and functions, those ,marked with "MISS" were never called
//
// or
// -- may be a good idea to change to output path to somewherelike /tmp
// go test -coverprofile cover.out && go tool cover -html=cover.out -o cover.html
func TestDefaultLogger(t *testing.T) {
SetExitFunc(func(int) {})
tests := getConsoleLoggerColorTests()
buff := new(buffer)
defaultLoggerWriter = buff
defaultLoggerTimeFormat = ""
cLog := newDefaultLogger()
defer func() { cLog.Close() }()
AddHandler(cLog, AllLevels...)
for i, tt := range tests {
buff.Reset()
var l Entry
if tt.flds != nil {
l = l.WithFields(tt.flds...)
}
switch tt.lvl {
case DebugLevel:
if len(tt.printf) == 0 {
l.Debug(tt.msg)
} else {
l.Debugf(tt.printf, tt.msg)
}
case InfoLevel:
if len(tt.printf) == 0 {
l.Info(tt.msg)
} else {
l.Infof(tt.printf, tt.msg)
}
case NoticeLevel:
if len(tt.printf) == 0 {
l.Notice(tt.msg)
} else {
l.Noticef(tt.printf, tt.msg)
}
case WarnLevel:
if len(tt.printf) == 0 {
l.Warn(tt.msg)
} else {
l.Warnf(tt.printf, tt.msg)
}
case ErrorLevel:
if len(tt.printf) == 0 {
l.Error(tt.msg)
} else {
l.Errorf(tt.printf, tt.msg)
}
case PanicLevel:
func() {
defer func() {
_ = recover()
}()
if len(tt.printf) == 0 {
l.Panic(tt.msg)
} else {
l.Panicf(tt.printf, tt.msg)
}
}()
case AlertLevel:
if len(tt.printf) == 0 {
l.Alert(tt.msg)
} else {
l.Alertf(tt.printf, tt.msg)
}
}
if buff.String() != tt.want {
t.Errorf("test %d: Expected '%s' Got '%s'", i, tt.want, buff.String())
}
}
}
func TestConsoleSTDLogCapturing(t *testing.T) {
buff := new(buffer)
defaultLoggerWriter = buff
defaultLoggerTimeFormat = "MST"
cLog := newDefaultLogger()
defer func() { cLog.Close() }()
AddHandler(cLog, AllLevels...)
stdlog.Println("STD LOG message")
time.Sleep(1000 * time.Millisecond)
s := buff.String()
expected := "STD LOG message"
if !strings.Contains(s, expected) {
t.Errorf("Expected '%s' Got '%s'", expected, s)
}
}
func getConsoleLoggerColorTests() []test {
return []test{
{
lvl: DebugLevel,
msg: "debugf",
printf: "%s",
flds: nil,
want: " [32m DEBUG[0m debugf\n",
},
{
lvl: DebugLevel,
msg: "debug",
flds: nil,
want: " [32m DEBUG[0m debug\n",
},
{
lvl: InfoLevel,
msg: "infof",
printf: "%s",
flds: nil,
want: " [34m INFO[0m infof\n",
},
{
lvl: InfoLevel,
msg: "info",
flds: nil,
want: " [34m INFO[0m info\n",
},
{
lvl: NoticeLevel,
msg: "noticef",
printf: "%s",
flds: nil,
want: " [36;1mNOTICE[0m noticef\n",
},
{<|fim▁hole|> },
{
lvl: WarnLevel,
msg: "warnf",
printf: "%s",
flds: nil,
want: " [33;1m WARN[0m warnf\n",
},
{
lvl: WarnLevel,
msg: "warn",
flds: nil,
want: " [33;1m WARN[0m warn\n",
},
{
lvl: ErrorLevel,
msg: "errorf",
printf: "%s",
flds: nil,
want: " [31;1m ERROR[0m errorf\n",
},
{
lvl: ErrorLevel,
msg: "error",
flds: nil,
want: " [31;1m ERROR[0m error\n",
},
{
lvl: AlertLevel,
msg: "alertf",
printf: "%s",
flds: nil,
want: " [31m[4m ALERT[0m alertf\n",
},
{
lvl: AlertLevel,
msg: "alert",
flds: nil,
want: " [31m[4m ALERT[0m alert\n",
},
{
lvl: PanicLevel,
msg: "panicf",
printf: "%s",
flds: nil,
want: " [31m PANIC[0m panicf\n",
},
{
lvl: PanicLevel,
msg: "panic",
flds: nil,
want: " [31m PANIC[0m panic\n",
},
{
lvl: DebugLevel,
msg: "debugf",
printf: "%s",
flds: []Field{
F("key", "value"),
},
want: " [32m DEBUG[0m debugf [32mkey[0m=value\n",
},
{
lvl: DebugLevel,
msg: "debug",
flds: []Field{
F("key", "value"),
},
want: " [32m DEBUG[0m debug [32mkey[0m=value\n",
},
{
lvl: InfoLevel,
msg: "infof",
printf: "%s",
flds: []Field{
F("key", "value"),
},
want: " [34m INFO[0m infof [34mkey[0m=value\n",
},
{
lvl: InfoLevel,
msg: "info",
flds: []Field{
F("key", "value"),
},
want: " [34m INFO[0m info [34mkey[0m=value\n",
},
{
lvl: NoticeLevel,
msg: "noticef",
printf: "%s",
flds: []Field{
F("key", "value"),
},
want: " [36;1mNOTICE[0m noticef [36;1mkey[0m=value\n",
},
{
lvl: NoticeLevel,
msg: "notice",
flds: []Field{
F("key", "value"),
},
want: " [36;1mNOTICE[0m notice [36;1mkey[0m=value\n",
},
{
lvl: WarnLevel,
msg: "warnf",
printf: "%s",
flds: []Field{
F("key", "value"),
},
want: " [33;1m WARN[0m warnf [33;1mkey[0m=value\n",
},
{
lvl: WarnLevel,
msg: "warn",
flds: []Field{
F("key", "value"),
},
want: " [33;1m WARN[0m warn [33;1mkey[0m=value\n",
},
{
lvl: ErrorLevel,
msg: "errorf",
printf: "%s",
flds: []Field{
F("key", "value"),
},
want: " [31;1m ERROR[0m errorf [31;1mkey[0m=value\n",
},
{
lvl: ErrorLevel,
msg: "error",
flds: []Field{
F("key", "value"),
},
want: " [31;1m ERROR[0m error [31;1mkey[0m=value\n",
},
{
lvl: AlertLevel,
msg: "alertf",
printf: "%s",
flds: []Field{
F("key", "value"),
},
want: " [31m[4m ALERT[0m alertf [31m[4mkey[0m=value\n",
},
{
lvl: AlertLevel,
msg: "alert",
flds: []Field{
F("key", "value"),
},
want: " [31m[4m ALERT[0m alert [31m[4mkey[0m=value\n",
},
{
lvl: PanicLevel,
msg: "panicf",
printf: "%s",
flds: []Field{
F("key", "value"),
},
want: " [31m PANIC[0m panicf [31mkey[0m=value\n",
},
{
lvl: PanicLevel,
msg: "panic",
flds: []Field{
F("key", "value"),
},
want: " [31m PANIC[0m panic [31mkey[0m=value\n",
},
{
lvl: DebugLevel,
msg: "debug",
flds: []Field{
F("key", "string"),
F("key", int(1)),
F("key", int8(2)),
F("key", int16(3)),
F("key", int32(4)),
F("key", int64(5)),
F("key", uint(1)),
F("key", uint8(2)),
F("key", uint16(3)),
F("key", uint32(4)),
F("key", uint64(5)),
F("key", float32(5.33)),
F("key", float64(5.34)),
F("key", true),
F("key", struct{ value string }{"struct"}),
},
want: " [32m DEBUG[0m debug [32mkey[0m=string [32mkey[0m=1 [32mkey[0m=2 [32mkey[0m=3 [32mkey[0m=4 [32mkey[0m=5 [32mkey[0m=1 [32mkey[0m=2 [32mkey[0m=3 [32mkey[0m=4 [32mkey[0m=5 [32mkey[0m=5.33 [32mkey[0m=5.34 [32mkey[0m=true [32mkey[0m={struct}\n",
},
}
}
type buffer struct {
b bytes.Buffer
m sync.Mutex
}
func (b *buffer) Read(p []byte) (n int, err error) {
b.m.Lock()
defer b.m.Unlock()
return b.b.Read(p)
}
func (b *buffer) Write(p []byte) (n int, err error) {
b.m.Lock()
defer b.m.Unlock()
return b.b.Write(p)
}
func (b *buffer) String() string {
b.m.Lock()
defer b.m.Unlock()
return b.b.String()
}
func (b *buffer) Bytes() []byte {
b.m.Lock()
defer b.m.Unlock()
return b.b.Bytes()
}
func (b *buffer) Cap() int {
b.m.Lock()
defer b.m.Unlock()
return b.b.Cap()
}
func (b *buffer) Grow(n int) {
b.m.Lock()
defer b.m.Unlock()
b.b.Grow(n)
}
func (b *buffer) Len() int {
b.m.Lock()
defer b.m.Unlock()
return b.b.Len()
}
func (b *buffer) Next(n int) []byte {
b.m.Lock()
defer b.m.Unlock()
return b.b.Next(n)
}
func (b *buffer) ReadByte() (c byte, err error) {
b.m.Lock()
defer b.m.Unlock()
return b.b.ReadByte()
}
func (b *buffer) ReadBytes(delim byte) (line []byte, err error) {
b.m.Lock()
defer b.m.Unlock()
return b.b.ReadBytes(delim)
}
func (b *buffer) ReadFrom(r io.Reader) (n int64, err error) {
b.m.Lock()
defer b.m.Unlock()
return b.b.ReadFrom(r)
}
func (b *buffer) ReadRune() (r rune, size int, err error) {
b.m.Lock()
defer b.m.Unlock()
return b.b.ReadRune()
}
func (b *buffer) ReadString(delim byte) (line string, err error) {
b.m.Lock()
defer b.m.Unlock()
return b.b.ReadString(delim)
}
func (b *buffer) Reset() {
b.m.Lock()
defer b.m.Unlock()
b.b.Reset()
}
func (b *buffer) Truncate(n int) {
b.m.Lock()
defer b.m.Unlock()
b.b.Truncate(n)
}
func (b *buffer) UnreadByte() error {
b.m.Lock()
defer b.m.Unlock()
return b.b.UnreadByte()
}
func (b *buffer) UnreadRune() error {
b.m.Lock()
defer b.m.Unlock()
return b.b.UnreadRune()
}
func (b *buffer) WriteByte(c byte) error {
b.m.Lock()
defer b.m.Unlock()
return b.b.WriteByte(c)
}
func (b *buffer) WriteRune(r rune) (n int, err error) {
b.m.Lock()
defer b.m.Unlock()
return b.b.WriteRune(r)
}
func (b *buffer) WriteString(s string) (n int, err error) {
b.m.Lock()
defer b.m.Unlock()
return b.b.WriteString(s)
}
func (b *buffer) WriteTo(w io.Writer) (n int64, err error) {
b.m.Lock()
defer b.m.Unlock()
return b.b.WriteTo(w)
}<|fim▁end|> | lvl: NoticeLevel,
msg: "notice",
flds: nil,
want: " [36;1mNOTICE[0m notice\n", |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.