prompt
large_stringlengths
70
991k
completion
large_stringlengths
0
1.02k
<|file_name|>webpack.config.js<|end_file_name|><|fim▁begin|>var path = require("path"); var webpack = require("webpack"); var ExtractTextPlugin = require('extract-text-webpack-plugin'); var pkg = require(path.join(process.cwd(), 'package.json')); var assetsPath = path.join(__dirname, "public","js"); var publicPath = "/public/"; var commonLoaders = [ { test: /\.js$|\.jsx$/, loaders: ["babel"], include: path.join(__dirname, "app") }, { test: /\.css$/, loader: ExtractTextPlugin.extract( 'css?sourceMap&-restructuring!' + 'autoprefixer-loader' ) }, { test: /\.less$/, loader: ExtractTextPlugin.extract( 'css?sourceMap!' + 'autoprefixer-loader!' + 'less?{"sourceMap":true,"modifyVars":' + JSON.stringify(pkg.theme || {})+'}' ) } ]; module.exports = { // The configuration for the client name: "browser", /* The entry point of the bundle * Entry points for multi page app could be more complex * A good example of entry points would be: * entry: { * pageA: "./pageA", * pageB: "./pageB", * pageC: "./pageC", * adminPageA: "./adminPageA", * adminPageB: "./adminPageB", * adminPageC: "./adminPageC" * } * * We can then proceed to optimize what are the common chunks * plugins: [ * new CommonsChunkPlugin("admin-commons.js", ["adminPageA", "adminPageB"]), * new CommonsChunkPlugin("common.js", ["pageA", "pageB", "admin-commons.js"], 2), * new CommonsChunkPlugin("c-commons.js", ["pageC", "adminPageC"]); * ] */ context: path.join(__dirname, "app"), entry: { app: "./app.js" }, output: { // The output directory as absolute path path: assetsPath, // The filename of the entry chunk as relative path inside the output.path directory filename: "bundle.js", // The output path from the view of the Javascript publicPath: publicPath }, module: { loaders: commonLoaders }, plugins: [ new webpack.HotModuleReplacementPlugin(), new webpack.NoErrorsPlugin(), new ExtractTextPlugin('ant.css', { disable: false, <|fim▁hole|> }), ] };<|fim▁end|>
allChunks: true
<|file_name|>handsontable.full.min.js<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
version https://git-lfs.github.com/spec/v1 oid sha256:59e6f2fa6c70c504d839d897c45f9a84348faf82342a31fb5818b1deb13861fa size 294301
<|file_name|>model_mutation.go<|end_file_name|><|fim▁begin|>// Copyright 2015 The LUCI Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package tumble import ( "bytes" "context" "crypto/sha1" "encoding/binary" "encoding/gob" "fmt" "math" "reflect" "strings" "sync" "time" "go.chromium.org/luci/appengine/meta" "go.chromium.org/luci/common/clock" "go.chromium.org/luci/common/logging" "go.chromium.org/luci/common/tsmon/field" "go.chromium.org/luci/common/tsmon/metric" ds "go.chromium.org/luci/gae/service/datastore" "go.chromium.org/luci/gae/service/info" ) var registry = map[string]reflect.Type{} var metricCreated = metric.NewCounter( "luci/tumble/mutations/created", "The number of mutations created in tumble", nil, field.String("namespace"), ) // Register allows |mut| to be played by the tumble backend. This should be // called at init() time once for every Mutation implementation. // // This will also gob.Register your mutation implementation. // // Example: // Register((*MyMutationImpl)(nil)) func Register(mut Mutation) { gob.Register(mut) t := reflect.TypeOf(mut) registry[t.String()] = t } // Mutation is the interface that your tumble mutations must implement. // // Mutation implementations can be registered with the Register function. type Mutation interface { // Root returns a datastore.Key which will be used to derive the Key for the // entity group which this Mutation will operate on. This is used to batch // together Entries for more efficient processing. // // Returning nil is an error. Root(c context.Context) *ds.Key // RollForward performs the action of the Mutation. // // It is only considered successful if it returns nil. If it returns non-nil, // then it will be retried at a later time. If it never returns nil, then it // will never be flushed from tumble's queue, and you'll have to manually // delete it or fix the code so that it can be handled without error. // // This method runs inside of a single-group transaction. It must modify only // the entity group specified by Root(). // // As a side effect, RollForward may return new arbitrary Mutations. These // will be committed in the same transaction as RollForward. // // The context contains an implementation of "luci/gae/service/datastore", // using the "luci/gae/filter/txnBuf" transaction buffer. This means that // all functionality (including additional transactions) is available, with // the limitations mentioned by that package (notably, no cursors are // allowed). RollForward(c context.Context) ([]Mutation, error) } // DelayedMutation is a Mutation which allows you to defer its processing // until a certain absolute time. // // As a side effect, tumble will /mostly/ process items in their chronological // ProcessAfter order, instead of the undefined order. // // Your tumble configuration must have DelayedMutations set, and you must have // added the appropriate index to use these. If DelayedMutations is not set, // then tumble will ignore the ProcessAfter and HighPriorty values here, and // process mutations as quickly as possible in no particular order. type DelayedMutation interface { Mutation // ProcessAfter will be called once when scheduling this Mutation. The // mutation will be recorded to datastore immediately, but tumble will skip it // for processing until at least the time that's returned here. Multiple calls // to this method should always return the same time. // // A Time value in the past will get reset to "next available time slot", // unless HighPriority() returns true. ProcessAfter() time.Time // HighPriority indicates that this mutation should be processed before // others, if possible, and must be set in conjunction with a ProcessAfter // timestamp that occurs in the past. // // Tumble works by processing Mutations in the order of their creation, or // ProcessAfter times, whichever is later. If HighPriority is true, then a // ProcessAfter time in the past will take precedence over Mutations which // may actually have been recorded after this one, in the event that tumble // is processing tasks slower than they're being created. HighPriority() bool } type realMutation struct { // TODO(riannucci): add functionality to luci/gae/service/datastore so that // GetMeta/SetMeta may be overridden by the struct. _kind string `gae:"$kind,tumble.Mutation"` ID string `gae:"$id"` Parent *ds.Key `gae:"$parent"` ExpandedShard int64 ProcessAfter time.Time TargetRoot *ds.Key Version string Type string Data []byte `gae:",noindex"` } func (r *realMutation) shard(cfg *Config) taskShard { shardCount := cfg.TotalShardCount(r.TargetRoot.Namespace()) expandedShardsPerShard := math.MaxUint64 / shardCount ret := uint64(r.ExpandedShard-math.MinInt64) / expandedShardsPerShard // account for rounding errors on the last shard. if ret >= shardCount { ret = shardCount - 1 } return taskShard{ret, mkTimestamp(cfg, r.ProcessAfter)} } func putMutations(c context.Context, cfg *Config, fromRoot *ds.Key, muts []Mutation, round uint64) ( shardSet map[taskShard]struct{}, mutKeys []*ds.Key, err error) { if len(muts) == 0 { return } version, err := meta.GetEntityGroupVersion(c, fromRoot) if err != nil { return } now := clock.Now(c).UTC() shardSet = map[taskShard]struct{}{} toPut := make([]*realMutation, len(muts)) mutKeys = make([]*ds.Key, len(muts)) for i, m := range muts { id := fmt.Sprintf("%016x_%08x_%08x", version, round, i) toPut[i], err = newRealMutation(c, cfg, id, fromRoot, m, now) if err != nil { logging.Errorf(c, "error creating real mutation for %v: %s", m, err) return } mutKeys[i] = ds.KeyForObj(c, toPut[i]) shardSet[toPut[i].shard(cfg)] = struct{}{} } if err = ds.Put(c, toPut); err != nil { logging.Errorf(c, "error putting %d new mutations: %s", len(toPut), err) } else { metricCreated.Add(c, int64(len(toPut)), fromRoot.Namespace()) } return } var appVersion = struct { sync.Once version string }{} func getAppVersion(c context.Context) string { appVersion.Do(func() { appVersion.version = info.VersionID(c) // AppEngine version is <app-yaml-version>.<unique-upload-id> // // The upload ID prevents version consistency between different AppEngine // modules, which will necessarily have different IDs, so we base our // comparable version off of the app.yaml-supplied value. if idx := strings.LastIndex(appVersion.version, "."); idx > 0 { appVersion.version = appVersion.version[:idx] } }) return appVersion.version } func newRealMutation(c context.Context, cfg *Config, id string, parent *ds.Key, m Mutation, now time.Time) (*realMutation, error) { when := now<|fim▁hole|> when = targetTime } } } t := reflect.TypeOf(m).String() if _, ok := registry[t]; !ok { return nil, fmt.Errorf("Attempting to add unregistered mutation %v: %v", t, m) } buf := &bytes.Buffer{} err := gob.NewEncoder(buf).Encode(m) if err != nil { return nil, err } root := m.Root(c).Root() hash := sha1.Sum([]byte(root.Encode())) eshard := int64(binary.BigEndian.Uint64(hash[:])) return &realMutation{ ID: id, Parent: parent, ExpandedShard: eshard, ProcessAfter: when, TargetRoot: root, Version: getAppVersion(c), Type: t, Data: buf.Bytes(), }, nil } func (r *realMutation) GetMutation() (Mutation, error) { typ, ok := registry[r.Type] if !ok { return nil, fmt.Errorf("unable to load reflect.Type for %q", r.Type) } ret := reflect.New(typ) if err := gob.NewDecoder(bytes.NewBuffer(r.Data)).DecodeValue(ret); err != nil { return nil, err } return ret.Elem().Interface().(Mutation), nil }<|fim▁end|>
if cfg.DelayedMutations { if dm, ok := m.(DelayedMutation); ok { targetTime := dm.ProcessAfter() if dm.HighPriority() || targetTime.After(now) {
<|file_name|>stop.rs<|end_file_name|><|fim▁begin|>use async_trait::async_trait; use cucumber::given; use mimir::adapters::secondary::elasticsearch::remote::connection_test_pool; use mimir::domain::ports::secondary::remote::Remote; use snafu::ResultExt; use crate::error::{self, Error}; use crate::state::{GlobalState, State, Step, StepStatus}; use crate::steps::admin::IndexCosmogony; use crate::steps::download::download_ntfs; use mimir::adapters::secondary::elasticsearch::ElasticsearchStorageConfig; use tests::ntfs; #[given(regex = r"ntfs file has been indexed for ([^\s]+) as ([^\s]+)$")] async fn index_ntfs(state: &mut GlobalState, region: String, dataset: String) { state .execute(IndexNTFS { region, dataset }) .await .expect("failed to index NTFS file"); } #[given(regex = r"stops have been indexed for ([^\s]+) as ([^\s]+)$")] async fn stops_available(state: &mut GlobalState, region: String, dataset: String) { download_ntfs(state, region.clone()).await; index_ntfs(state, region, dataset).await; } /// Index an NTFS file for a given region into Elasticsearch. /// /// This will require to import admins first. #[derive(Debug, PartialEq)] pub struct IndexNTFS { pub region: String, pub dataset: String, } #[async_trait(?Send)] impl Step for IndexNTFS { async fn execute(&mut self, state: &State) -> Result<StepStatus, Error> { let Self { region, dataset } = self; let client = connection_test_pool() .conn(ElasticsearchStorageConfig::default_testing()) .await .expect("Could not establish connection to Elasticsearch"); <|fim▁hole|> }) .expect("You must index admins before indexing stops"); ntfs::index_stops(&client, region, dataset, false) .await .map(|status| status.into()) .context(error::IndexNTFSSnafu) } }<|fim▁end|>
state .status_of(&IndexCosmogony { region: region.to_string(), dataset: dataset.to_string(),
<|file_name|>JavascriptCacheableList.java<|end_file_name|><|fim▁begin|>/******************************************************************************* * Este arquivo é parte do Biblivre5. * * Biblivre5 é um software livre; você pode redistribuí-lo e/ou * modificá-lo dentro dos termos da Licença Pública Geral GNU como * publicada pela Fundação do Software Livre (FSF); na versão 3 da * Licença, ou (caso queira) qualquer versão posterior. * * Este programa é distribuído na esperança de que possa ser útil, * mas SEM NENHUMA GARANTIA; nem mesmo a garantia implícita de * MERCANTIBILIDADE OU ADEQUAÇÃO PARA UM FIM PARTICULAR. Veja a * Licença Pública Geral GNU para maiores detalhes. * * Você deve ter recebido uma cópia da Licença Pública Geral GNU junto * com este programa, Se não, veja em <http://www.gnu.org/licenses/>. * * @author Alberto Wagner <[email protected]> * @author Danniel Willian <[email protected]> ******************************************************************************/ package biblivre.core; import java.io.File; import java.util.LinkedList; import org.json.JSONArray; public class JavascriptCacheableList<T extends IFJson> extends LinkedList<T> implements IFCacheableJavascript { private static final long serialVersionUID = 1L; private String variable; private String prefix; private String suffix;<|fim▁hole|> public JavascriptCacheableList(String variable, String prefix, String suffix) { this.variable = variable; this.prefix = prefix; this.suffix = suffix; } @Override public String getCacheFileNamePrefix() { return this.prefix; } @Override public String getCacheFileNameSuffix() { return this.suffix; } @Override public String toJavascriptString() { JSONArray array = new JSONArray(); for (T el : this) { array.put(el.toJSONObject()); } return this.variable + " = " + array.toString() + ";"; } @Override public File getCacheFile() { if (this.cache == null) { this.cache = new JavascriptCache(this); } return this.cache.getCacheFile(); } @Override public String getCacheFileName() { if (this.cache == null) { this.cache = new JavascriptCache(this); } return this.cache.getFileName(); } @Override public void invalidateCache() { this.cache = null; } }<|fim▁end|>
private JavascriptCache cache;
<|file_name|>bitsnoop.py<|end_file_name|><|fim▁begin|>from xbmctorrentV2 import plugin from xbmctorrentV2.scrapers import scraper from xbmctorrentV2.ga import tracked from xbmctorrentV2.caching import cached_route from xbmctorrentV2.utils import ensure_fanart from xbmctorrentV2.library import library_context BASE_URL = "%s/" % plugin.get_setting("base_bitsnoop") HEADERS = { "Referer": BASE_URL, } # Cache TTLs DEFAULT_TTL = 24 * 3600 # 24 hours @scraper("BitSnoop - Search Engine", "%s/i/logo.png" % BASE_URL) @plugin.route("/bitsnoop") @ensure_fanart @tracked def bitsnoop_index(): plugin.redirect(plugin.url_for("bitsnoop_search")) @plugin.route("/bitsnoop/browse/<root>/<page>") @library_context @ensure_fanart @tracked def bitsnoop_page(root, page): from urlparse import urljoin from xbmctorrentV2.scrapers import rss from xbmctorrentV2.utils import url_get content_type = plugin.request.args_dict.get("content_type") if content_type: plugin.set_content(content_type) page = int(page) page_data = url_get(urljoin(BASE_URL, "%s/%d/" % (root, page)), headers=HEADERS, params={<|fim▁hole|> "sort": "n_s", "dir": "desc", }) return rss.parse(page_data) @plugin.route("/bitsnoop/search") @tracked def bitsnoop_search(): import urllib from xbmctorrentV2.utils import first query = plugin.request.args_dict.pop("query", None) if not query: query = plugin.keyboard("", "xbmctorrentV2 - Bitsnoop - Search") if query: plugin.redirect(plugin.url_for("bitsnoop_page", root="/search/video/%s/c/d/" % urllib.quote("%s safe:no" % query, safe=""), page=1, **plugin.request.args_dict))<|fim▁end|>
"fmt": "rss",
<|file_name|>day23.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python """ Solve day 23 of Advent of Code. http://adventofcode.com/day/23 """ class Computer: def __init__(self): """ Our computer has 2 registers, a and b, and an instruction pointer so that we know which instruction to fetch next. """ self.a = 0 self.b = 0 self.ip = 0 # Ye olde instruction pointer def run_program(self, program): """ Run a list of program instructions until we try to move the instruction pointer beyond the bounds of the instruction list. """ while True: try: instruction, args = self.parse_instruction(program[self.ip]) except IndexError: return getattr(self, instruction)(*args) def parse_instruction(self, line): """ Parse a line of the program into the instruction and its arguments. """ instruction, *args = line.strip().replace(',', '').split() return instruction, args def hlf(self, register): """ Set the register to half its current value, then increment the instruction pointer. """ setattr(self, register, getattr(self, register)//2) self.ip += 1 def tpl(self, register): """ Set the register to triple its current value, then increment the instruction pointer. """ setattr(self, register, getattr(self, register)*3) self.ip += 1 def inc(self, register): """ Increment the value in the register, then increment the instruction pointer. """ setattr(self, register, getattr(self, register) + 1) self.ip += 1 def jmp(self, offset): """ Jump the instruction pointer by a particular offset. """ self.ip += int(offset) def jie(self, register, offset): """ Jump the instruction pointer by an offset if the value in the register is even. """ if getattr(self, register) % 2 == 0: self.jmp(offset) else: self.ip += 1 def jio(self, register, offset): """ Jump the instruction pointer by an offset if the value in the register is one. """<|fim▁hole|> if __name__ == '__main__': with open('input.txt') as f: program = f.readlines() computer = Computer() # Part 1 - start with a=0, b=0 computer.run_program(program) print("Part 1:", computer.b) # Part 2 - now start with a=1, b=0 computer = Computer() computer.a = 1 computer.run_program(program) print("Part 2:", computer.b)<|fim▁end|>
if getattr(self, register) == 1: self.jmp(offset) else: self.ip += 1
<|file_name|>SearchResults.js<|end_file_name|><|fim▁begin|>import React, { Fragment } from 'react'; import PropTypes from 'prop-types'; import { connect } from 'react-redux'; import pluralize from 'common/utils/pluralize'; import search from './actions'; import { selectSearchResultIds, selectIsSearching, selectIsSearchComplete } from './selectors'; import Element from 'common/components/Element'; import H3 from 'common/components/H3'; import Button from 'common/components/Button'; import VocabList from 'common/components/VocabList'; import { blue, orange } from 'common/styles/colors'; SearchResults.propTypes = { ids: PropTypes.arrayOf(PropTypes.number), isSearching: PropTypes.bool, isSearchComplete: PropTypes.bool, onReset: PropTypes.func.isRequired, }; SearchResults.defaultProps = {<|fim▁hole|> export function SearchResults({ ids, isSearching, isSearchComplete, onReset }) { const tooBroad = ids.length >= 50; const amount = `${ids.length}${tooBroad ? '+' : ''}`; const wordsFoundText = `${amount} ${pluralize('word', ids.length)} found${ tooBroad ? '. Try refining your search keywords.' : '' }`; return ( (isSearching || isSearchComplete) && ( <Fragment> <Element flexRow flexCenter> <H3>{(isSearching && 'Searching...') || wordsFoundText}</H3> {isSearchComplete && ( <Button bgColor={orange[5]} colorHover={orange[5]} onClick={onReset}> Clear Results </Button> )} </Element> <VocabList ids={ids} bgColor={blue[5]} showSecondary showFuri /> </Fragment> ) ); } const mapStateToProps = (state, props) => ({ ids: selectSearchResultIds(state, props), isSearching: selectIsSearching(state, props), isSearchComplete: selectIsSearchComplete(state, props), }); const mapDispatchToProps = { onReset: search.clear, }; export default connect(mapStateToProps, mapDispatchToProps)(SearchResults);<|fim▁end|>
ids: [], isSearching: false, isSearchComplete: false, };
<|file_name|>splitBySelector.ts<|end_file_name|><|fim▁begin|><|fim▁hole|> onlyChildren = false }: { selector: string onlyChildren?: boolean }) => (markup: string) => { const rootNode = domify(markup) const markups = onlyChildren ? Array.from(rootNode.children) .filter(node => node.matches(selector)) .map(node => node.innerHTML) : Array.from(rootNode.querySelectorAll(selector)).map( node => node.innerHTML ) return markups } export default splitBySelector<|fim▁end|>
import domify from '../domify/domify' const splitBySelector = ({ selector,
<|file_name|>bg.js<|end_file_name|><|fim▁begin|>/** * Todo: * - Allow hiding of trades (seperates upcoming and finished games) * - Allow auto-retry for when bots are down * - Allow auto-accept offers * - Create popup for browser action (next X games, my winnings) */ // CONSTANTS var GREEN = "#76EE00", ORANGE = "#FFA500", RED = "#FF0000", IMG_PATHS_19 = ["imgs/action/bad_19.png", "imgs/action/unstable_19.png", "imgs/action/good_19.png", "imgs/action/empty_19.png"], IMG_PATHS_38 = ["imgs/action/bad_38.png", "imgs/action/unstable_38.png", "imgs/action/good_38.png", "imgs/action/empty_38.png"]; // VARIABLES var lastError; chrome.runtime.onStartup.addListener(function(){ console.log("onStartup called"); set_icon(3); }) // INIT function init() { console.log("Starting extension"); chrome.alarms.clearAll(); // create alarm to call icon updater every minute get_status(status_loop); chrome.alarms.create("loopAlarm", {periodInMinutes: 1}); chrome.alarms.onAlarm.addListener(function(a){ var d = new Date(); console.log("Checking status ("+d.getHours()+":"+d.getMinutes()+":"+d.getSeconds()+")"); get_status(status_loop); }); } chrome.runtime.onInstalled.addListener(init); /** * Listens for message */ chrome.runtime.onMessage.addListener(function(request,sender,callback) { // if we're getting upcoming matches if (request.get === "games") { get_games(request.num, (function(callback){ return function(arr){ callback(arr); } })(callback)); } // if we're getting status if (request.get === "status") { if (!callback) callback = status_loop; chrome.storage.local.get("status", callback); } // if we're asked to highlight a tab if (request.post === "highlight") { chrome.tabs.highlight({tabs: [sender.tab.index]}, callback); } return true; }); /** * Get the next X games * Can return less than X games, if no more exist * @param {int} num - number of games to return * @param {function} callback - callback function * Calls callback with one parameter: * {array} - array of match objects, each formatted as follows: * {time: string, * link: string * team1: { * name: string, * percent: int, * imgUrl: string * }, * team2: { * name: string, * percent: int, * imgUrl: string * }} */ function get_games(num, callback) { // since we need logic in the callback, we gotta do it like this var func = (function(num,callback){return function(){ // this function is called by XMLHttpRequest // all logic is in here // if site failed to load, error if (this.status !== 200) { callback({error: this.statusText, errno: this.status}); return; } else { // note: to save a bit on DOM parsing, I extract a substring first // this might break in the future; possible TODO: create more reliable method // extract the relevant part of the markup var str = this.responseText, startInd = str.indexOf("<article class=\"standard\" id=\"bets"), endInd = str.indexOf("<div id=\"modalPreview", startInd), containerStr = this.responseText.substring(startInd,endInd); // parse var parser = new DOMParser(), doc = parser.parseFromString(containerStr, "text/html"), matches = doc.querySelectorAll(".matchmain"); // TODO: add error handling var output = []; // loop through all matches for (var i = 0, j = matches.length; i < j && output.length < num; i++) { var match = matches[i], finished = match.querySelector(".match").className.indexOf("notaviable") !== -1, timeStr = match.querySelector(".matchheader div:first-child").innerHTML.replace('"', '\"').trim(); // if match is over or live, skip it if (finished || timeStr.indexOf("LIVE") !== -1) continue; // extract data var matchLink = "http://csgolounge.com/"+match.querySelector(".matchleft > a").getAttribute("href"),<|fim▁hole|> team1Container = match.querySelector(".match a > div:first-child"), team1 = {name: team1Container.querySelector("b:first-child").textContent, percent: parseInt(team1Container.querySelector("i").textContent), imgUrl: /url\('(.*?)'\)/.exec(team1Container.querySelector(".team").getAttribute("style"))[1]}, team2Container = match.querySelector(".match a > div:nth-child(3)"), team2 = {name: team2Container.querySelector("b:first-child").textContent, percent: parseInt(team2Container.querySelector("i").textContent), imgUrl: /url\('(.*?)'\)/.exec(team2Container.querySelector(".team").getAttribute("style"))[1]}; // format object, and push to output output.push({time: timeStr, link: matchLink, team1: team1, team2: team2}); } // end callback(output); } }})(num, callback); get("http://csgolounge.com/", func); } /** * Repeatedly checks the status of the lounge bots * Only to be used as callback for get_status */ function status_loop(vals) { if (!vals) console.error("status_loop should only be used as callback for get_status"); var iconNum = vals.error ? 3 : // if error, change to grey (vals.status.indexOf(ORANGE) === -1 && vals.status.indexOf(RED) === -1) ? 2 : // if good, change to green (!vals.offline) ? 1 : // if bots are online, but service(s) are down 0; // if down, change to red if (vals.error) console.error("Failed to get status: [#"+vals.errno+"] "+vals.error); set_icon(iconNum); } /** * Get the current bot status on CSGOLounge * @param {function} callback - callback function * Calls callback with one parameter: * {array} - array of color values for top-most row in status table (see csgolounge.com/status) */ function get_status(callback) { // since we need logic in the callback, we gotta do it like this var func = (function(callback, timeout){return function(){ // this function is called by XMLHttpRequest // all logic is in here // clear timeout, so icon isn't changed to grey clearTimeout(timeout); // if site failed to load, error if (this.status !== 200) { callback({error: this.statusText, errno: this.status}); return; } else { // extract colors var response = this.responseText.replace(/\s/g,""), // remove whitespace offline = (response.indexOf("BOTSAREOFFLINE") !== -1), tableReg = /<tablealign="center"cellpadding="7"[0-9a-z=%"]+>.*?<\/table>/, table = tableReg.exec(response)[0], colorReg = /#[0-8A-F]+/g, colors = table.match(colorReg); // extract color codes from table // save to status, so we don't need to retrieve again on popup chrome.storage.local.set({status: colors}, function(){}); // call actual callback callback({offline: offline, status: colors}); } }})(callback, setTimeout(function(){set_icon(3)}, 5000)); // request status page, running the above function get("http://csgolounge.com/status", func); } /* =============================================================== * * Helper functions * =============================================================== */ /** * Sets the browserAction icon * @param {int} type - icon to change to: red (0), orange (1), green (2) or grey (3) */ function set_icon(type) { chrome.browserAction.setIcon({path: {19: IMG_PATHS_19[type], 38: IMG_PATHS_38[type]}}); } /** * Perform a GET request to a url * @param {string} url - The URL to request to * @param {function} callback - The function to call once the request is performed * @param {object} headers - a header object in the format {header: value} */ function get(url, callback, headers) { // create xmlhttprequest instance var xhr = new XMLHttpRequest(); // init xhr.addEventListener("load", callback); xhr.open("GET", url, true); // set headers for (var h in headers) { if (headers.hasOwnProperty(h)) xhr.setRequestHeader(h, headers[h]); } // send xhr.send(); } /** * Perform a POST request to a url * @param {string} url - The URL to request to * @param {object} data - the POST data * @param {function} callback - The function to call once the request is performed * @param {object} headers - a header object in the format {header: value} */ function post(url, data, callback, headers) { // create xmlhttprequest instance var xhr = new XMLHttpRequest(), formatted = []; if (typeof data === "object") { for (var k in data) { formatted.push(encodeURIComponent(k) + "=" + encodeURIComponent(data[k])); } formatted = formatted.join("&"); } else { formatted = data; } // init xhr.addEventListener("load", callback); xhr.open("POST", url, true); xhr.setRequestHeader("Content-Type", "application/x-www-form-urlencoded; charset=UTF-8"); // set headers for (var h in headers) { if (headers.hasOwnProperty(h)) xhr.setRequestHeader(h, headers[h]); } // send xhr.send(formatted); }<|fim▁end|>
<|file_name|>signatures.go<|end_file_name|><|fim▁begin|>// // Copyright 2021 The Sigstore Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software<|fim▁hole|> package mutate import ( v1 "github.com/google/go-containerregistry/pkg/v1" "github.com/google/go-containerregistry/pkg/v1/empty" "github.com/google/go-containerregistry/pkg/v1/mutate" "github.com/sigstore/cosign/pkg/oci" ) // AppendSignatures produces a new oci.Signatures with the provided signatures // appended to the provided base signatures. func AppendSignatures(base oci.Signatures, sigs ...oci.Signature) (oci.Signatures, error) { adds := make([]mutate.Addendum, 0, len(sigs)) for _, sig := range sigs { ann, err := sig.Annotations() if err != nil { return nil, err } adds = append(adds, mutate.Addendum{ Layer: sig, Annotations: ann, }) } img, err := mutate.Append(base, adds...) if err != nil { return nil, err } return &sigAppender{ Image: img, base: base, sigs: sigs, }, nil } // ReplaceSignatures produces a new oci.Signatures provided by the base signatures // replaced with the new oci.Signatures. func ReplaceSignatures(base oci.Signatures) (oci.Signatures, error) { sigs, err := base.Get() if err != nil { return nil, err } adds := make([]mutate.Addendum, 0, len(sigs)) for _, sig := range sigs { ann, err := sig.Annotations() if err != nil { return nil, err } adds = append(adds, mutate.Addendum{ Layer: sig, Annotations: ann, }) } img, err := mutate.Append(empty.Image, adds...) if err != nil { return nil, err } return &sigAppender{ Image: img, base: base, sigs: sigs, }, nil } type sigAppender struct { v1.Image base oci.Signatures sigs []oci.Signature } var _ oci.Signatures = (*sigAppender)(nil) // Get implements oci.Signatures func (sa *sigAppender) Get() ([]oci.Signature, error) { sl, err := sa.base.Get() if err != nil { return nil, err } return append(sl, sa.sigs...), nil }<|fim▁end|>
// distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License.
<|file_name|>backtrace.rs<|end_file_name|><|fim▁begin|>// Copyright 2014-2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. /// Backtrace support built on libgcc with some extra OS-specific support /// /// Some methods of getting a backtrace: /// /// * The backtrace() functions on unix. It turns out this doesn't work very /// well for green threads on OSX, and the address to symbol portion of it /// suffers problems that are described below. /// /// * Using libunwind. This is more difficult than it sounds because libunwind /// isn't installed everywhere by default. It's also a bit of a hefty library, /// so possibly not the best option. When testing, libunwind was excellent at /// getting both accurate backtraces and accurate symbols across platforms. /// This route was not chosen in favor of the next option, however. /// /// * We're already using libgcc_s for exceptions in rust (triggering task /// unwinding and running destructors on the stack), and it turns out that it /// conveniently comes with a function that also gives us a backtrace. All of /// these functions look like _Unwind_*, but it's not quite the full /// repertoire of the libunwind API. Due to it already being in use, this was /// the chosen route of getting a backtrace. /// /// After choosing libgcc_s for backtraces, the sad part is that it will only /// give us a stack trace of instruction pointers. Thankfully these instruction /// pointers are accurate (they work for green and native threads), but it's /// then up to us again to figure out how to translate these addresses to /// symbols. As with before, we have a few options. Before, that, a little bit /// of an interlude about symbols. This is my very limited knowledge about /// symbol tables, and this information is likely slightly wrong, but the /// general idea should be correct. /// /// When talking about symbols, it's helpful to know a few things about where /// symbols are located. Some symbols are located in the dynamic symbol table /// of the executable which in theory means that they're available for dynamic /// linking and lookup. Other symbols end up only in the local symbol table of /// the file. This loosely corresponds to pub and priv functions in Rust. /// /// Armed with this knowledge, we know that our solution for address to symbol /// translation will need to consult both the local and dynamic symbol tables. /// With that in mind, here's our options of translating an address to /// a symbol. /// /// * Use dladdr(). The original backtrace()-based idea actually uses dladdr() /// behind the scenes to translate, and this is why backtrace() was not used. /// Conveniently, this method works fantastically on OSX. It appears dladdr() /// uses magic to consult the local symbol table, or we're putting everything /// in the dynamic symbol table anyway. Regardless, for OSX, this is the /// method used for translation. It's provided by the system and easy to do.o /// /// Sadly, all other systems have a dladdr() implementation that does not /// consult the local symbol table. This means that most functions are blank /// because they don't have symbols. This means that we need another solution. /// /// * Use unw_get_proc_name(). This is part of the libunwind api (not the /// libgcc_s version of the libunwind api), but involves taking a dependency /// to libunwind. We may pursue this route in the future if we bundle /// libunwind, but libunwind was unwieldy enough that it was not chosen at /// this time to provide this functionality. /// /// * Shell out to a utility like `readelf`. Crazy though it may sound, it's a /// semi-reasonable solution. The stdlib already knows how to spawn processes, /// so in theory it could invoke readelf, parse the output, and consult the /// local/dynamic symbol tables from there. This ended up not getting chosen /// due to the craziness of the idea plus the advent of the next option. /// /// * Use `libbacktrace`. It turns out that this is a small library bundled in /// the gcc repository which provides backtrace and symbol translation /// functionality. All we really need from it is the backtrace functionality, /// and we only really need this on everything that's not OSX, so this is the /// chosen route for now. /// /// In summary, the current situation uses libgcc_s to get a trace of stack /// pointers, and we use dladdr() or libbacktrace to translate these addresses /// to symbols. This is a bit of a hokey implementation as-is, but it works for /// all unix platforms we support right now, so it at least gets the job done. use prelude::v1::*; use os::unix::prelude::*; use ffi::{CStr, AsOsStr}; use old_io::IoResult; use libc; use mem; use str; use sync::{StaticMutex, MUTEX_INIT}; use sys_common::backtrace::*; /// As always - iOS on arm uses SjLj exceptions and /// _Unwind_Backtrace is even not available there. Still, /// backtraces could be extracted using a backtrace function, /// which thanks god is public /// /// As mentioned in a huge comment block above, backtrace doesn't /// play well with green threads, so while it is extremely nice /// and simple to use it should be used only on iOS devices as the /// only viable option. #[cfg(all(target_os = "ios", target_arch = "arm"))] #[inline(never)] pub fn write(w: &mut Writer) -> IoResult<()> { use result; extern {<|fim▁hole|> // while it doesn't requires lock for work as everything is // local, it still displays much nicer backtraces when a // couple of tasks panic simultaneously static LOCK: StaticMutex = MUTEX_INIT; let _g = unsafe { LOCK.lock() }; try!(writeln!(w, "stack backtrace:")); // 100 lines should be enough const SIZE: uint = 100; let mut buf: [*mut libc::c_void; SIZE] = unsafe {mem::zeroed()}; let cnt = unsafe { backtrace(buf.as_mut_ptr(), SIZE as libc::c_int) as uint}; // skipping the first one as it is write itself let iter = (1..cnt).map(|i| { print(w, i as int, buf[i], buf[i]) }); result::fold(iter, (), |_, _| ()) } #[cfg(not(all(target_os = "ios", target_arch = "arm")))] #[inline(never)] // if we know this is a function call, we can skip it when // tracing pub fn write(w: &mut Writer) -> IoResult<()> { use old_io::IoError; struct Context<'a> { idx: int, writer: &'a mut (Writer+'a), last_error: Option<IoError>, } // When using libbacktrace, we use some necessary global state, so we // need to prevent more than one thread from entering this block. This // is semi-reasonable in terms of printing anyway, and we know that all // I/O done here is blocking I/O, not green I/O, so we don't have to // worry about this being a native vs green mutex. static LOCK: StaticMutex = MUTEX_INIT; let _g = unsafe { LOCK.lock() }; try!(writeln!(w, "stack backtrace:")); let mut cx = Context { writer: w, last_error: None, idx: 0 }; return match unsafe { uw::_Unwind_Backtrace(trace_fn, &mut cx as *mut Context as *mut libc::c_void) } { uw::_URC_NO_REASON => { match cx.last_error { Some(err) => Err(err), None => Ok(()) } } _ => Ok(()), }; extern fn trace_fn(ctx: *mut uw::_Unwind_Context, arg: *mut libc::c_void) -> uw::_Unwind_Reason_Code { let cx: &mut Context = unsafe { mem::transmute(arg) }; let mut ip_before_insn = 0; let mut ip = unsafe { uw::_Unwind_GetIPInfo(ctx, &mut ip_before_insn) as *mut libc::c_void }; if !ip.is_null() && ip_before_insn == 0 { // this is a non-signaling frame, so `ip` refers to the address // after the calling instruction. account for that. ip = (ip as usize - 1) as *mut _; } // dladdr() on osx gets whiny when we use FindEnclosingFunction, and // it appears to work fine without it, so we only use // FindEnclosingFunction on non-osx platforms. In doing so, we get a // slightly more accurate stack trace in the process. // // This is often because panic involves the last instruction of a // function being "call std::rt::begin_unwind", with no ret // instructions after it. This means that the return instruction // pointer points *outside* of the calling function, and by // unwinding it we go back to the original function. let symaddr = if cfg!(target_os = "macos") || cfg!(target_os = "ios") { ip } else { unsafe { uw::_Unwind_FindEnclosingFunction(ip) } }; // Don't print out the first few frames (they're not user frames) cx.idx += 1; if cx.idx <= 0 { return uw::_URC_NO_REASON } // Don't print ginormous backtraces if cx.idx > 100 { match write!(cx.writer, " ... <frames omitted>\n") { Ok(()) => {} Err(e) => { cx.last_error = Some(e); } } return uw::_URC_FAILURE } // Once we hit an error, stop trying to print more frames if cx.last_error.is_some() { return uw::_URC_FAILURE } match print(cx.writer, cx.idx, ip, symaddr) { Ok(()) => {} Err(e) => { cx.last_error = Some(e); } } // keep going return uw::_URC_NO_REASON } } #[cfg(any(target_os = "macos", target_os = "ios"))] fn print(w: &mut Writer, idx: int, addr: *mut libc::c_void, _symaddr: *mut libc::c_void) -> IoResult<()> { use intrinsics; #[repr(C)] struct Dl_info { dli_fname: *const libc::c_char, dli_fbase: *mut libc::c_void, dli_sname: *const libc::c_char, dli_saddr: *mut libc::c_void, } extern { fn dladdr(addr: *const libc::c_void, info: *mut Dl_info) -> libc::c_int; } let mut info: Dl_info = unsafe { intrinsics::init() }; if unsafe { dladdr(addr, &mut info) == 0 } { output(w, idx,addr, None) } else { output(w, idx, addr, Some(unsafe { CStr::from_ptr(info.dli_sname).to_bytes() })) } } #[cfg(not(any(target_os = "macos", target_os = "ios")))] fn print(w: &mut Writer, idx: int, addr: *mut libc::c_void, symaddr: *mut libc::c_void) -> IoResult<()> { use env; use ptr; //////////////////////////////////////////////////////////////////////// // libbacktrace.h API //////////////////////////////////////////////////////////////////////// type backtrace_syminfo_callback = extern "C" fn(data: *mut libc::c_void, pc: libc::uintptr_t, symname: *const libc::c_char, symval: libc::uintptr_t, symsize: libc::uintptr_t); type backtrace_full_callback = extern "C" fn(data: *mut libc::c_void, pc: libc::uintptr_t, filename: *const libc::c_char, lineno: libc::c_int, function: *const libc::c_char) -> libc::c_int; type backtrace_error_callback = extern "C" fn(data: *mut libc::c_void, msg: *const libc::c_char, errnum: libc::c_int); enum backtrace_state {} #[link(name = "backtrace", kind = "static")] #[cfg(not(test))] extern {} extern { fn backtrace_create_state(filename: *const libc::c_char, threaded: libc::c_int, error: backtrace_error_callback, data: *mut libc::c_void) -> *mut backtrace_state; fn backtrace_syminfo(state: *mut backtrace_state, addr: libc::uintptr_t, cb: backtrace_syminfo_callback, error: backtrace_error_callback, data: *mut libc::c_void) -> libc::c_int; fn backtrace_pcinfo(state: *mut backtrace_state, addr: libc::uintptr_t, cb: backtrace_full_callback, error: backtrace_error_callback, data: *mut libc::c_void) -> libc::c_int; } //////////////////////////////////////////////////////////////////////// // helper callbacks //////////////////////////////////////////////////////////////////////// type FileLine = (*const libc::c_char, libc::c_int); extern fn error_cb(_data: *mut libc::c_void, _msg: *const libc::c_char, _errnum: libc::c_int) { // do nothing for now } extern fn syminfo_cb(data: *mut libc::c_void, _pc: libc::uintptr_t, symname: *const libc::c_char, _symval: libc::uintptr_t, _symsize: libc::uintptr_t) { let slot = data as *mut *const libc::c_char; unsafe { *slot = symname; } } extern fn pcinfo_cb(data: *mut libc::c_void, _pc: libc::uintptr_t, filename: *const libc::c_char, lineno: libc::c_int, _function: *const libc::c_char) -> libc::c_int { if !filename.is_null() { let slot = data as *mut &mut [FileLine]; let buffer = unsafe {ptr::read(slot)}; // if the buffer is not full, add file:line to the buffer // and adjust the buffer for next possible calls to pcinfo_cb. if !buffer.is_empty() { buffer[0] = (filename, lineno); unsafe { ptr::write(slot, &mut buffer[1..]); } } } 0 } // The libbacktrace API supports creating a state, but it does not // support destroying a state. I personally take this to mean that a // state is meant to be created and then live forever. // // I would love to register an at_exit() handler which cleans up this // state, but libbacktrace provides no way to do so. // // With these constraints, this function has a statically cached state // that is calculated the first time this is requested. Remember that // backtracing all happens serially (one global lock). // // An additionally oddity in this function is that we initialize the // filename via self_exe_name() to pass to libbacktrace. It turns out // that on Linux libbacktrace seamlessly gets the filename of the // current executable, but this fails on freebsd. by always providing // it, we make sure that libbacktrace never has a reason to not look up // the symbols. The libbacktrace API also states that the filename must // be in "permanent memory", so we copy it to a static and then use the // static as the pointer. // // FIXME: We also call self_exe_name() on DragonFly BSD. I haven't // tested if this is required or not. unsafe fn init_state() -> *mut backtrace_state { static mut STATE: *mut backtrace_state = 0 as *mut backtrace_state; static mut LAST_FILENAME: [libc::c_char; 256] = [0; 256]; if !STATE.is_null() { return STATE } let selfname = if cfg!(target_os = "freebsd") || cfg!(target_os = "dragonfly") || cfg!(target_os = "bitrig") || cfg!(target_os = "openbsd") { env::current_exe().ok() } else { None }; let filename = match selfname { Some(path) => { let bytes = path.as_os_str().as_bytes(); if bytes.len() < LAST_FILENAME.len() { let i = bytes.iter(); for (slot, val) in LAST_FILENAME.iter_mut().zip(i) { *slot = *val as libc::c_char; } LAST_FILENAME.as_ptr() } else { ptr::null() } } None => ptr::null(), }; STATE = backtrace_create_state(filename, 0, error_cb, ptr::null_mut()); return STATE } //////////////////////////////////////////////////////////////////////// // translation //////////////////////////////////////////////////////////////////////// // backtrace errors are currently swept under the rug, only I/O // errors are reported let state = unsafe { init_state() }; if state.is_null() { return output(w, idx, addr, None) } let mut data = ptr::null(); let data_addr = &mut data as *mut *const libc::c_char; let ret = unsafe { backtrace_syminfo(state, symaddr as libc::uintptr_t, syminfo_cb, error_cb, data_addr as *mut libc::c_void) }; if ret == 0 || data.is_null() { try!(output(w, idx, addr, None)); } else { try!(output(w, idx, addr, Some(unsafe { CStr::from_ptr(data).to_bytes() }))); } // pcinfo may return an arbitrary number of file:line pairs, // in the order of stack trace (i.e. inlined calls first). // in order to avoid allocation, we stack-allocate a fixed size of entries. const FILELINE_SIZE: usize = 32; let mut fileline_buf = [(ptr::null(), -1); FILELINE_SIZE]; let ret; let fileline_count; { let mut fileline_win: &mut [FileLine] = &mut fileline_buf; let fileline_addr = &mut fileline_win as *mut &mut [FileLine]; ret = unsafe { backtrace_pcinfo(state, addr as libc::uintptr_t, pcinfo_cb, error_cb, fileline_addr as *mut libc::c_void) }; fileline_count = FILELINE_SIZE - fileline_win.len(); } if ret == 0 { for (i, &(file, line)) in fileline_buf[..fileline_count].iter().enumerate() { if file.is_null() { continue; } // just to be sure let file = unsafe { CStr::from_ptr(file).to_bytes() }; try!(output_fileline(w, file, line, i == FILELINE_SIZE - 1)); } } Ok(()) } // Finally, after all that work above, we can emit a symbol. fn output(w: &mut Writer, idx: int, addr: *mut libc::c_void, s: Option<&[u8]>) -> IoResult<()> { try!(write!(w, " {:2}: {:2$?} - ", idx, addr, HEX_WIDTH)); match s.and_then(|s| str::from_utf8(s).ok()) { Some(string) => try!(demangle(w, string)), None => try!(write!(w, "<unknown>")), } w.write_all(&['\n' as u8]) } #[allow(dead_code)] fn output_fileline(w: &mut Writer, file: &[u8], line: libc::c_int, more: bool) -> IoResult<()> { let file = str::from_utf8(file).ok().unwrap_or("<unknown>"); // prior line: " ##: {:2$} - func" try!(write!(w, " {:3$}at {}:{}", "", file, line, HEX_WIDTH)); if more { try!(write!(w, " <... and possibly more>")); } w.write_all(&['\n' as u8]) } /// Unwind library interface used for backtraces /// /// Note that dead code is allowed as here are just bindings /// iOS doesn't use all of them it but adding more /// platform-specific configs pollutes the code too much #[allow(non_camel_case_types)] #[allow(non_snake_case)] #[allow(dead_code)] mod uw { pub use self::_Unwind_Reason_Code::*; use libc; #[repr(C)] pub enum _Unwind_Reason_Code { _URC_NO_REASON = 0, _URC_FOREIGN_EXCEPTION_CAUGHT = 1, _URC_FATAL_PHASE2_ERROR = 2, _URC_FATAL_PHASE1_ERROR = 3, _URC_NORMAL_STOP = 4, _URC_END_OF_STACK = 5, _URC_HANDLER_FOUND = 6, _URC_INSTALL_CONTEXT = 7, _URC_CONTINUE_UNWIND = 8, _URC_FAILURE = 9, // used only by ARM EABI } pub enum _Unwind_Context {} pub type _Unwind_Trace_Fn = extern fn(ctx: *mut _Unwind_Context, arg: *mut libc::c_void) -> _Unwind_Reason_Code; extern { // No native _Unwind_Backtrace on iOS #[cfg(not(all(target_os = "ios", target_arch = "arm")))] pub fn _Unwind_Backtrace(trace: _Unwind_Trace_Fn, trace_argument: *mut libc::c_void) -> _Unwind_Reason_Code; // available since GCC 4.2.0, should be fine for our purpose #[cfg(all(not(all(target_os = "android", target_arch = "arm")), not(all(target_os = "linux", target_arch = "arm"))))] pub fn _Unwind_GetIPInfo(ctx: *mut _Unwind_Context, ip_before_insn: *mut libc::c_int) -> libc::uintptr_t; #[cfg(all(not(target_os = "android"), not(all(target_os = "linux", target_arch = "arm"))))] pub fn _Unwind_FindEnclosingFunction(pc: *mut libc::c_void) -> *mut libc::c_void; } // On android, the function _Unwind_GetIP is a macro, and this is the // expansion of the macro. This is all copy/pasted directly from the // header file with the definition of _Unwind_GetIP. #[cfg(any(all(target_os = "android", target_arch = "arm"), all(target_os = "linux", target_arch = "arm")))] pub unsafe fn _Unwind_GetIP(ctx: *mut _Unwind_Context) -> libc::uintptr_t { #[repr(C)] enum _Unwind_VRS_Result { _UVRSR_OK = 0, _UVRSR_NOT_IMPLEMENTED = 1, _UVRSR_FAILED = 2, } #[repr(C)] enum _Unwind_VRS_RegClass { _UVRSC_CORE = 0, _UVRSC_VFP = 1, _UVRSC_FPA = 2, _UVRSC_WMMXD = 3, _UVRSC_WMMXC = 4, } #[repr(C)] enum _Unwind_VRS_DataRepresentation { _UVRSD_UINT32 = 0, _UVRSD_VFPX = 1, _UVRSD_FPAX = 2, _UVRSD_UINT64 = 3, _UVRSD_FLOAT = 4, _UVRSD_DOUBLE = 5, } type _Unwind_Word = libc::c_uint; extern { fn _Unwind_VRS_Get(ctx: *mut _Unwind_Context, klass: _Unwind_VRS_RegClass, word: _Unwind_Word, repr: _Unwind_VRS_DataRepresentation, data: *mut libc::c_void) -> _Unwind_VRS_Result; } let mut val: _Unwind_Word = 0; let ptr = &mut val as *mut _Unwind_Word; let _ = _Unwind_VRS_Get(ctx, _Unwind_VRS_RegClass::_UVRSC_CORE, 15, _Unwind_VRS_DataRepresentation::_UVRSD_UINT32, ptr as *mut libc::c_void); (val & !1) as libc::uintptr_t } // This function doesn't exist on Android or ARM/Linux, so make it same // to _Unwind_GetIP #[cfg(any(all(target_os = "android", target_arch = "arm"), all(target_os = "linux", target_arch = "arm")))] pub unsafe fn _Unwind_GetIPInfo(ctx: *mut _Unwind_Context, ip_before_insn: *mut libc::c_int) -> libc::uintptr_t { *ip_before_insn = 0; _Unwind_GetIP(ctx) } // This function also doesn't exist on Android or ARM/Linux, so make it // a no-op #[cfg(any(target_os = "android", all(target_os = "linux", target_arch = "arm")))] pub unsafe fn _Unwind_FindEnclosingFunction(pc: *mut libc::c_void) -> *mut libc::c_void { pc } }<|fim▁end|>
fn backtrace(buf: *mut *mut libc::c_void, sz: libc::c_int) -> libc::c_int; }
<|file_name|>utils.rs<|end_file_name|><|fim▁begin|>//! Utilities useful for various generations tasks. use std::ops::{Index, IndexMut}; use std::mem; use std::iter; use std::collections::HashMap; use std::collections::hash_map::Entry; use num::Float; use na; use na::{Pnt3, Dim, Cross, Orig}; use ncollide_utils::{HashablePartialEq, AsBytes}; use math::{Scalar, Point, Vect}; // FIXME: remove that in favor of `push_xy_circle` ? /// Pushes a discretized counterclockwise circle to a buffer. #[inline] pub fn push_circle<N: Scalar>(radius: N, nsubdiv: u32, dtheta: N, y: N, out: &mut Vec<Pnt3<N>>) { let mut curr_theta: N = na::zero(); for _ in 0 .. nsubdiv { out.push(Pnt3::new(curr_theta.cos() * radius, y.clone(), curr_theta.sin() * radius)); curr_theta = curr_theta + dtheta; } } /// Pushes a discretized counterclockwise circle to a buffer. /// The circle is contained on the plane spanned by the `x` and `y` axis. #[inline] pub fn push_xy_arc<N, P>(radius: N, nsubdiv: u32, dtheta: N, out: &mut Vec<P>) where N: Scalar, P: Dim + Orig + Index<usize, Output = N> + IndexMut<usize, Output = N> { assert!(na::dim::<P>() >= 2); let mut curr_theta: N = na::zero(); for _ in 0 .. nsubdiv { let mut pt = na::orig::<P>(); pt[0] = curr_theta.cos() * radius; pt[1] = curr_theta.sin() * radius; out.push(pt); curr_theta = curr_theta + dtheta; } } /// Creates the faces from two circles with the same discretization. #[inline] pub fn push_ring_indices(base_lower_circle: u32, base_upper_circle: u32, nsubdiv: u32, out: &mut Vec<Pnt3<u32>>) { push_open_ring_indices(base_lower_circle, base_upper_circle, nsubdiv, out); // adjust the last two triangles push_rectangle_indices(base_upper_circle, base_upper_circle + nsubdiv - 1, base_lower_circle, base_lower_circle + nsubdiv - 1, out); } /// Creates the faces from two circles with the same discretization. #[inline] pub fn push_open_ring_indices(base_lower_circle: u32, base_upper_circle: u32, nsubdiv: u32, out: &mut Vec<Pnt3<u32>>) { assert!(nsubdiv > 0); for i in 0 .. nsubdiv - 1 { let bli = base_lower_circle + i; let bui = base_upper_circle + i; push_rectangle_indices(bui + 1, bui, bli + 1, bli, out); } } /// Creates the faces from a circle and a point that is shared by all triangle. #[inline] pub fn push_degenerate_top_ring_indices(base_circle: u32, point: u32, nsubdiv: u32, out: &mut Vec<Pnt3<u32>>) { push_degenerate_open_top_ring_indices(base_circle, point, nsubdiv, out); out.push(Pnt3::new(base_circle + nsubdiv - 1, point, base_circle)); } /// Creates the faces from a circle and a point that is shared by all triangle. #[inline] pub fn push_degenerate_open_top_ring_indices(base_circle: u32, point: u32, nsubdiv: u32, out: &mut Vec<Pnt3<u32>>) { assert!(nsubdiv > 0); <|fim▁hole|> } } /// Pushes indices so that a circle is filled with triangles. Each triangle will have the /// `base_circle` point in common. /// Pushes `nsubdiv - 2` elements to `out`. #[inline] pub fn push_filled_circle_indices(base_circle: u32, nsubdiv: u32, out: &mut Vec<Pnt3<u32>>) { for i in base_circle + 1 .. base_circle + nsubdiv - 1 { out.push(Pnt3::new(base_circle, i, i + 1)); } } /// Given four corner points, pushes to two counterclockwise triangles to `out`. /// /// # Arguments: /// * `ul` - the up-left point. /// * `dl` - the down-left point. /// * `dr` - the down-left point. /// * `ur` - the up-left point. #[inline] pub fn push_rectangle_indices<T: Clone>(ul: T, ur: T, dl: T, dr: T, out: &mut Vec<Pnt3<T>>) { out.push(Pnt3::new(ul.clone(), dl, dr.clone())); out.push(Pnt3::new(dr , ur, ul)); } /// Reverses the clockwising of a set of faces. #[inline] pub fn reverse_clockwising(indices: &mut [Pnt3<u32>]) { for i in indices.iter_mut() { mem::swap(&mut i.x, &mut i.y); } } /// Duplicates the indices of each triangle on the given index buffer. /// /// For example: [ (0.0, 1.0, 2.0) ] becomes: [ (0.0, 0.0, 0.0), (1.0, 1.0, 1.0), (2.0, 2.0, 2.0)]. #[inline] pub fn split_index_buffer(indices: &[Pnt3<u32>]) -> Vec<Pnt3<Pnt3<u32>>> { let mut resi = Vec::new(); for vertex in indices.iter() { resi.push( Pnt3::new( Pnt3::new(vertex.x, vertex.x, vertex.x), Pnt3::new(vertex.y, vertex.y, vertex.y), Pnt3::new(vertex.z, vertex.z, vertex.z) ) ); } resi } /// Duplicates the indices of each triangle on the given index buffer, giving the same id to each /// identical vertex. #[inline] pub fn split_index_buffer_and_recover_topology<P: PartialEq + AsBytes + Clone>( indices: &[Pnt3<u32>], coords: &[P]) -> (Vec<Pnt3<Pnt3<u32>>>, Vec<P>) { let mut vtx_to_id = HashMap::new(); let mut new_coords = Vec::with_capacity(coords.len()); let mut out = Vec::with_capacity(indices.len()); fn resolve_coord_id<P: PartialEq + AsBytes + Clone>( coord: &P, vtx_to_id: &mut HashMap<HashablePartialEq<P>, u32>, new_coords: &mut Vec<P>) -> u32 { let key = unsafe { HashablePartialEq::new(coord.clone()) }; let id = match vtx_to_id.entry(key) { Entry::Occupied(entry) => entry.into_mut(), Entry::Vacant(entry) => entry.insert(new_coords.len() as u32) }; if *id == new_coords.len() as u32 { new_coords.push(coord.clone()); } *id } for t in indices.iter() { let va = resolve_coord_id(&coords[t.x as usize], &mut vtx_to_id, &mut new_coords); let oa = t.x; let vb = resolve_coord_id(&coords[t.y as usize], &mut vtx_to_id, &mut new_coords); let ob = t.y; let vc = resolve_coord_id(&coords[t.z as usize], &mut vtx_to_id, &mut new_coords); let oc = t.z; out.push( Pnt3::new( Pnt3::new(va, oa, oa), Pnt3::new(vb, ob, ob), Pnt3::new(vc, oc, oc) ) ); } new_coords.shrink_to_fit(); (out, new_coords) } /// Computes the normals of a set of vertices. #[inline] pub fn compute_normals<P>(coordinates: &[P], faces: &[Pnt3<u32>], normals: &mut Vec<P::Vect>) where P: Point, P::Vect: Vect + Cross<CrossProductType = <P as Point>::Vect> { let mut divisor: Vec<<P::Vect as Vect>::Scalar> = iter::repeat(na::zero()).take(coordinates.len()).collect(); // Shrink the output buffer if it is too big. if normals.len() > coordinates.len() { normals.truncate(coordinates.len()) } // Reinit all normals to zero. normals.clear(); normals.extend(iter::repeat(na::zero::<P::Vect>()).take(coordinates.len())); // Accumulate normals ... for f in faces.iter() { let edge1 = coordinates[f.y as usize] - coordinates[f.x as usize]; let edge2 = coordinates[f.z as usize] - coordinates[f.x as usize]; let cross = na::cross(&edge1, &edge2); let normal; if !na::is_zero(&cross) { normal = na::normalize(&cross) } else { normal = cross } normals[f.x as usize] = normals[f.x as usize] + normal; normals[f.y as usize] = normals[f.y as usize] + normal; normals[f.z as usize] = normals[f.z as usize] + normal; divisor[f.x as usize] = divisor[f.x as usize] + na::one(); divisor[f.y as usize] = divisor[f.y as usize] + na::one(); divisor[f.z as usize] = divisor[f.z as usize] + na::one(); } // ... and compute the mean for (n, divisor) in normals.iter_mut().zip(divisor.iter()) { *n = *n / *divisor } }<|fim▁end|>
for i in 0 .. nsubdiv - 1 { out.push(Pnt3::new(base_circle + i, point, base_circle + i + 1));
<|file_name|>home.client.controller.js<|end_file_name|><|fim▁begin|>'use strict'; angular.module('core').controller('HomeController', ['$scope', 'Authentication', function($scope, Authentication) { // This provides Authentication context. $scope.authentication = Authentication; $scope.alerts = [ { icon:'glyphicon-user', color:'btn-success', total:'20,408', description:'TOTAL CUSTOMERS' }, {<|fim▁hole|> total:'8,382', description:'UPCOMING EVENTS' }, { icon:'glyphicon-edit', color:'btn-success', total:'527', description:'NEW CUSTOMERS IN 24H' }, { icon:'glyphicon-record', color:'btn-info', total:'85,000', description:'EMAILS SENT' }, { icon:'glyphicon-eye-open', color:'btn-warning', total:'20,408', description:'FOLLOW UPS REQUIRED' }, { icon:'glyphicon-flag', color:'btn-danger', total:'348', description:'REFERRALS TO MODERATE' } ]; } ]);<|fim▁end|>
icon:'glyphicon-calendar', color:'btn-primary',
<|file_name|>HealPetPet.py<|end_file_name|><|fim▁begin|>from neolib.plots.Step import Step from neolib.NST import NST import time class HealPetPet(Step): _paths = { 'links': '//*[@id="content"]/table/tr/td[2]//a/@href', 'img': '//*[@id="content"]/table/tr/td[2]/div/img/@src', 'cert': '//area/@href', } _HEALS = { 'http://images.neopets.com/altador/misc/petpet_act_b_ffabe6bc57.gif': 0, 'http://images.neopets.com/altador/misc/petpet_act_a_2a605ae262.gif': 1, 'http://images.neopets.com/altador/misc/petpet_act_c_5f4438778c.gif': 2, 'http://images.neopets.com/altador/misc/petpet_act_d_42b934a33b.gif': 3, } def __init__(self, usr): super().__init__(usr, '', '', False) # Setup link self.link = ['http://www.neopets.com/altador/petpet.phtml?ppheal=1', 'http://www.neopets.com/altador/petpet.phtml?ppheal=1&sthv=%s'] # Setup checks self._checks = [''] def execute(self, last_pg=None): # Heal the PetPet 10 times to get the certificate check = '' for i in range(0, 11): if check: pg = self._usr.get_page(check) else: pg = self._usr.get_page(self.link[0]) f = open('test.html', 'w', encoding='utf-8') f.write(pg.content) f.close() if len(self._xpath('cert', pg)) > 0: print('Found certificate!') url = self._base_url + self._xpath('cert', pg)[0] pg = self._usr.get_page(url) f = open('test.html', 'w', encoding='utf-8') f.write(pg.content) f.close() print('Saved page') exit() links = self._xpath('links', pg) action = self._HEALS[self._xpath('img', pg)[0]] url = self._base_url + links[action] print('URL: ' + url) pg = self._usr.get_page(url)<|fim▁hole|> links = self._xpath('links', pg) check = self._base_url + links[4] f = open('test.html', 'w', encoding='utf-8') f.write(pg.content) f.close() if len(self._xpath('cert', pg)) > 0: print('Found certificate!') url = self._base_url + self._xpath('cert', pg)[0] pg = self._usr.get_page(url) f = open('test.html', 'w', encoding='utf-8') f.write(pg.content) f.close() print('Saved page') exit() # Wait till the next minute to check on the petpet wait = (60 - NST.sec) + 1 print('Waiting ' + str(wait) + ' seconds') time.sleep(wait)<|fim▁end|>
<|file_name|>orphan.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! Orphan checker: every impl either implements a trait defined in this //! crate or pertains to a type defined in this crate. use middle::traits; use middle::ty; use syntax::ast::{Item, ItemImpl}; use syntax::ast; use syntax::ast_util; use syntax::codemap::Span; use syntax::visit; use util::ppaux::{Repr, UserString}; pub fn check(tcx: &ty::ctxt) { let mut orphan = OrphanChecker { tcx: tcx }; visit::walk_crate(&mut orphan, tcx.map.krate()); } struct OrphanChecker<'cx, 'tcx:'cx> { tcx: &'cx ty::ctxt<'tcx> } impl<'cx, 'tcx> OrphanChecker<'cx, 'tcx> { fn check_def_id(&self, span: Span, def_id: ast::DefId) { if def_id.krate != ast::LOCAL_CRATE { span_err!(self.tcx.sess, span, E0116, "cannot associate methods with a type outside the \ crate the type is defined in; define and implement \ a trait or new type instead"); } } } impl<'cx, 'tcx,'v> visit::Visitor<'v> for OrphanChecker<'cx, 'tcx> { fn visit_item(&mut self, item: &'v ast::Item) { let def_id = ast_util::local_def(item.id); match item.node { ast::ItemImpl(_, _, _, None, _, _) => { // For inherent impls, self type must be a nominal type // defined in this crate. debug!("coherence2::orphan check: inherent impl {}", item.repr(self.tcx)); let self_ty = ty::lookup_item_type(self.tcx, def_id).ty; match self_ty.sty { ty::ty_enum(def_id, _) | ty::ty_struct(def_id, _) => { self.check_def_id(item.span, def_id); } ty::ty_trait(ref data) => { self.check_def_id(item.span, data.principal_def_id()); } ty::ty_uniq(..) => { self.check_def_id(item.span, self.tcx.lang_items.owned_box() .unwrap()); } _ => { span_err!(self.tcx.sess, item.span, E0118, "no base type found for inherent implementation; \ implement a trait or new type instead"); } } } ast::ItemImpl(_, _, _, Some(_), _, _) => { // "Trait" impl debug!("coherence2::orphan check: trait impl {}", item.repr(self.tcx)); let trait_def_id = ty::impl_trait_ref(self.tcx, def_id).unwrap().def_id; match traits::orphan_check(self.tcx, def_id) { Ok(()) => { } Err(traits::OrphanCheckErr::NoLocalInputType) => { if !ty::has_attr(self.tcx, trait_def_id, "old_orphan_check") { let self_ty = ty::lookup_item_type(self.tcx, def_id).ty; span_err!( self.tcx.sess, item.span, E0117,<|fim▁hole|> "the type `{}` does not reference any \ types defined in this crate; \ only traits defined in the current crate can be \ implemented for arbitrary types", self_ty.user_string(self.tcx)); } } Err(traits::OrphanCheckErr::UncoveredTy(param_ty)) => { if !ty::has_attr(self.tcx, trait_def_id, "old_orphan_check") { self.tcx.sess.span_err( item.span, format!( "type parameter `{}` is not constrained by any local type; \ only traits defined in the current crate can be implemented \ for a type parameter", param_ty.user_string(self.tcx)).as_slice()); self.tcx.sess.span_note( item.span, format!("for a limited time, you can add \ `#![feature(old_orphan_check)]` to your crate \ to disable this rule").as_slice()); } } } } _ => { // Not an impl } } visit::walk_item(self, item); } }<|fim▁end|>
<|file_name|>BaseMvpFragment.java<|end_file_name|><|fim▁begin|>/* * Copyright (C) 2017 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.seongil.mvplife.fragment; import android.os.Bundle; import android.view.View; import com.seongil.mvplife.base.MvpPresenter; import com.seongil.mvplife.base.MvpView; import com.seongil.mvplife.delegate.MvpDelegateCallback; import com.seongil.mvplife.delegate.fragment.MvpFragmentDelegate; import com.seongil.mvplife.delegate.fragment.MvpFragmentDelegateImpl; /** * Abstract class for the fragment which is holding a reference of the {@link MvpPresenter} * Also, holding a {@link MvpFragmentDelegate} which is handling the lifecycle of the fragment. * * @param <V> The type of {@link MvpView} * @param <P> The type of {@link MvpPresenter} * * @author seong-il, kim * @since 17. 1. 6 */ public abstract class BaseMvpFragment<V extends MvpView, P extends MvpPresenter<V>> extends CoreFragment implements MvpView, MvpDelegateCallback<V, P> { // ======================================================================== // Constants // ======================================================================== // ======================================================================== // Fields // ======================================================================== private MvpFragmentDelegate mFragmentDelegate; private P mPresenter; // ======================================================================== // Constructors // ======================================================================== // ======================================================================== // Getter & Setter // ======================================================================== // ======================================================================== // Methods for/from SuperClass/Interfaces<|fim▁hole|> @Override public P getPresenter() { return mPresenter; } @Override public void setPresenter(P presenter) { mPresenter = presenter; } @Override @SuppressWarnings("unchecked") public V getMvpView() { return (V) this; } @Override public void onViewCreated(View view, Bundle savedInstanceState) { super.onViewCreated(view, savedInstanceState); getMvpDelegate().onViewCreated(view, savedInstanceState); } @Override public void onDestroyView() { getMvpDelegate().onDestroyView(); super.onDestroyView(); } @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); getMvpDelegate().onCreate(savedInstanceState); } @Override public void onDestroy() { super.onDestroy(); getMvpDelegate().onDestroy(); } // ======================================================================== // Methods // ======================================================================== protected MvpFragmentDelegate getMvpDelegate() { if (mFragmentDelegate == null) { mFragmentDelegate = new MvpFragmentDelegateImpl<>(this); } return mFragmentDelegate; } // ======================================================================== // Inner and Anonymous Classes // ======================================================================== }<|fim▁end|>
// ======================================================================== @Override public abstract P createPresenter();
<|file_name|>test_surface.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python import unittest import os import random import numpy as np from pymatgen.core.structure import Structure from pymatgen.core.lattice import Lattice from pymatgen.core.surface import Slab, SlabGenerator, generate_all_slabs, \ get_symmetrically_distinct_miller_indices from pymatgen.symmetry.groups import SpaceGroup from pymatgen.util.testing import PymatgenTest def get_path(path_str): cwd = os.path.abspath(os.path.dirname(__file__)) path = os.path.join(cwd, "..", "..", "..", "test_files", "surface_tests", path_str) return path class SlabTest(PymatgenTest): def setUp(self): zno1 = Structure.from_file(get_path("ZnO-wz.cif"), primitive=False) zno55 = SlabGenerator(zno1, [1, 0, 0], 5, 5, lll_reduce=False, center_slab=False).get_slab() self.zno1 = zno1 self.zno55 = zno55 self.h = Structure(Lattice.cubic(3), ["H"], [[0, 0, 0]]) self.libcc = Structure(Lattice.cubic(3.51004), ["Li", "Li"], [[0, 0, 0], [0.5, 0.5, 0.5]]) def test_init(self): zno_slab = Slab(self.zno55.lattice, self.zno55.species, self.zno55.frac_coords, self.zno55.miller_index, self.zno55.oriented_unit_cell, 0, self.zno55.scale_factor) m =self.zno55.lattice.matrix area = np.linalg.norm(np.cross(m[0], m[1])) self.assertAlmostEqual(zno_slab.surface_area, area) self.assertEqual(zno_slab.lattice.lengths_and_angles, self.zno55.lattice.lengths_and_angles) self.assertEqual(zno_slab.oriented_unit_cell.composition, self.zno1.composition) self.assertEqual(len(zno_slab), 8) def test_add_adsorbate_atom(self): zno_slab = Slab(self.zno55.lattice, self.zno55.species, self.zno55.frac_coords, self.zno55.miller_index, self.zno55.oriented_unit_cell, 0, self.zno55.scale_factor) zno_slab.add_adsorbate_atom([1], 'H', 1) self.assertEqual(len(zno_slab), 9) self.assertEqual(str(zno_slab[8].specie), 'H') self.assertAlmostEqual(zno_slab.get_distance(1, 8), 1.0) self.assertTrue(zno_slab[8].c > zno_slab[0].c) m = self.zno55.lattice.matrix area = np.linalg.norm(np.cross(m[0], m[1])) self.assertAlmostEqual(zno_slab.surface_area, area) self.assertEqual(zno_slab.lattice.lengths_and_angles, self.zno55.lattice.lengths_and_angles) def test_get_sorted_structure(self): species = [str(site.specie) for site in self.zno55.get_sorted_structure()] self.assertEqual(species, ["Zn2+"] * 4 + ["O2-"] * 4) def test_methods(self): #Test various structure methods self.zno55.get_primitive_structure() def test_as_from_dict(self): d = self.zno55.as_dict() obj = Slab.from_dict(d) self.assertEqual(obj.miller_index, (1, 0, 0)) def test_dipole_and_is_polar(self): self.assertArrayAlmostEqual(self.zno55.dipole, [0, 0, 0]) self.assertFalse(self.zno55.is_polar()) cscl = self.get_structure("CsCl") cscl.add_oxidation_state_by_element({"Cs": 1, "Cl": -1}) slab = SlabGenerator(cscl, [1, 0, 0], 5, 5, lll_reduce=False, center_slab=False).get_slab() self.assertArrayAlmostEqual(slab.dipole, [-4.209, 0, 0]) self.assertTrue(slab.is_polar()) class SlabGeneratorTest(PymatgenTest): def test_get_slab(self): s = self.get_structure("LiFePO4") gen = SlabGenerator(s, [0, 0, 1], 10, 10) s = gen.get_slab(0.25) self.assertAlmostEqual(s.lattice.abc[2], 20.820740000000001) fcc = Structure.from_spacegroup("Fm-3m", Lattice.cubic(3), ["Fe"], [[0, 0, 0]]) gen = SlabGenerator(fcc, [1, 1, 1], 10, 10) slab = gen.get_slab() gen = SlabGenerator(fcc, [1, 1, 1], 10, 10, primitive=False) slab_non_prim = gen.get_slab() self.assertEqual(len(slab), 6) self.assertEqual(len(slab_non_prim), len(slab) * 4) #Some randomized testing of cell vectors for i in range(1, 231): i = random.randint(1, 230) sg = SpaceGroup.from_int_number(i) if sg.crystal_system == "hexagonal" or (sg.crystal_system == \ "trigonal" and sg.symbol.endswith("H")): latt = Lattice.hexagonal(5, 10)<|fim▁hole|> miller = (0, 0, 0) while miller == (0, 0, 0): miller = (random.randint(0, 6), random.randint(0, 6), random.randint(0, 6)) gen = SlabGenerator(s, miller, 10, 10) a, b, c = gen.oriented_unit_cell.lattice.matrix self.assertAlmostEqual(np.dot(a, gen._normal), 0) self.assertAlmostEqual(np.dot(b, gen._normal), 0) def test_normal_search(self): fcc = Structure.from_spacegroup("Fm-3m", Lattice.cubic(3), ["Fe"], [[0, 0, 0]]) for miller in [(1, 0, 0), (1, 1, 0), (1, 1, 1), (2, 1, 1)]: gen = SlabGenerator(fcc, miller, 10, 10) gen_normal = SlabGenerator(fcc, miller, 10, 10, max_normal_search=max(miller)) slab = gen_normal.get_slab() self.assertAlmostEqual(slab.lattice.alpha, 90) self.assertAlmostEqual(slab.lattice.beta, 90) self.assertGreaterEqual(len(gen_normal.oriented_unit_cell), len(gen.oriented_unit_cell)) graphite = self.get_structure("Graphite") for miller in [(1, 0, 0), (1, 1, 0), (0, 0, 1), (2, 1, 1)]: gen = SlabGenerator(graphite, miller, 10, 10) gen_normal = SlabGenerator(graphite, miller, 10, 10, max_normal_search=max(miller)) self.assertGreaterEqual(len(gen_normal.oriented_unit_cell), len(gen.oriented_unit_cell)) sc = Structure(Lattice.hexagonal(3.32, 5.15), ["Sc", "Sc"], [[1/3, 2/3, 0.25], [2/3, 1/3, 0.75]]) gen = SlabGenerator(sc, (1, 1, 1), 10, 10, max_normal_search=1) self.assertAlmostEqual(gen.oriented_unit_cell.lattice.angles[1], 90) def test_get_slabs(self): gen = SlabGenerator(self.get_structure("CsCl"), [0, 0, 1], 10, 10) #Test orthogonality of some internal variables. a, b, c = gen.oriented_unit_cell.lattice.matrix self.assertAlmostEqual(np.dot(a, gen._normal), 0) self.assertAlmostEqual(np.dot(b, gen._normal), 0) self.assertEqual(len(gen.get_slabs()), 1) s = self.get_structure("LiFePO4") gen = SlabGenerator(s, [0, 0, 1], 10, 10) self.assertEqual(len(gen.get_slabs()), 5) self.assertEqual(len(gen.get_slabs(bonds={("P", "O"): 3})), 2) # There are no slabs in LFP that does not break either P-O or Fe-O # bonds for a miller index of [0, 0, 1]. self.assertEqual(len(gen.get_slabs( bonds={("P", "O"): 3, ("Fe", "O"): 3})), 0) #If we allow some broken bonds, there are a few slabs. self.assertEqual(len(gen.get_slabs( bonds={("P", "O"): 3, ("Fe", "O"): 3}, max_broken_bonds=2)), 2) # At this threshold, only the origin and center Li results in # clustering. All other sites are non-clustered. So the of # slabs is of sites in LiFePO4 unit cell - 2 + 1. self.assertEqual(len(gen.get_slabs(tol=1e-4)), 15) LiCoO2 = Structure.from_file(get_path("icsd_LiCoO2.cif"), primitive=False) gen = SlabGenerator(LiCoO2, [0, 0, 1], 10, 10) lco = gen.get_slabs(bonds={("Co", "O"): 3}) self.assertEqual(len(lco), 1) a, b, c = gen.oriented_unit_cell.lattice.matrix self.assertAlmostEqual(np.dot(a, gen._normal), 0) self.assertAlmostEqual(np.dot(b, gen._normal), 0) scc = Structure.from_spacegroup("Pm-3m", Lattice.cubic(3), ["Fe"], [[0, 0, 0]]) gen = SlabGenerator(scc, [0, 0, 1], 10, 10) slabs = gen.get_slabs() self.assertEqual(len(slabs), 1) gen = SlabGenerator(scc, [1, 1, 1], 10, 10, max_normal_search=1) slabs = gen.get_slabs() self.assertEqual(len(slabs), 1) def test_triclinic_TeI(self): # Test case for a triclinic structure of TeI. Only these three # Miller indices are used because it is easier to identify which # atoms should be in a surface together. The closeness of the sites # in other Miller indices can cause some ambiguity when choosing a # higher tolerance. numb_slabs = {(0, 0, 1): 5, (0, 1, 0): 3, (1, 0, 0): 7} TeI = Structure.from_file(get_path("icsd_TeI.cif"), primitive=False) for k, v in numb_slabs.items(): trclnc_TeI = SlabGenerator(TeI, k, 10, 10) TeI_slabs = trclnc_TeI.get_slabs() self.assertEqual(v, len(TeI_slabs)) def test_get_orthogonal_c_slab(self): TeI = Structure.from_file(get_path("icsd_TeI.cif"), primitive=False) trclnc_TeI = SlabGenerator(TeI, (0, 0, 1), 10, 10) TeI_slabs = trclnc_TeI.get_slabs() slab = TeI_slabs[0] norm_slab = slab.get_orthogonal_c_slab() self.assertAlmostEqual(norm_slab.lattice.angles[0], 90) self.assertAlmostEqual(norm_slab.lattice.angles[1], 90) class FuncTest(PymatgenTest): def setUp(self): self.cscl = self.get_structure("CsCl") self.lifepo4 = self.get_structure("LiFePO4") self.tei = Structure.from_file(get_path("icsd_TeI.cif"), primitive=False) self.LiCoO2 = Structure.from_file(get_path("icsd_LiCoO2.cif"), primitive=False) self.p1 = Structure(Lattice.from_parameters(3, 4, 5, 31, 43, 50), ["H", "He"], [[0, 0, 0], [0.1, 0.2, 0.3]]) self.graphite = self.get_structure("Graphite") def test_get_symmetrically_distinct_miller_indices(self): indices = get_symmetrically_distinct_miller_indices(self.cscl, 1) self.assertEqual(len(indices), 3) indices = get_symmetrically_distinct_miller_indices(self.cscl, 2) self.assertEqual(len(indices), 6) self.assertEqual(len(get_symmetrically_distinct_miller_indices( self.lifepo4, 1)), 7) # The TeI P-1 structure should have 13 unique millers (only inversion # symmetry eliminates pairs) indices = get_symmetrically_distinct_miller_indices(self.tei, 1) self.assertEqual(len(indices), 13) # P1 and P-1 should have the same # of miller indices since surfaces # always have inversion symmetry. indices = get_symmetrically_distinct_miller_indices(self.p1, 1) self.assertEqual(len(indices), 13) indices = get_symmetrically_distinct_miller_indices(self.graphite, 2) self.assertEqual(len(indices), 12) def test_generate_all_slabs(self): slabs = generate_all_slabs(self.cscl, 1, 10, 10) # Only three possible slabs, one each in (100), (110) and (111). self.assertEqual(len(slabs), 3) slabs = generate_all_slabs(self.cscl, 1, 10, 10, bonds={("Cs", "Cl"): 4}) # No slabs if we don't allow broken Cs-Cl self.assertEqual(len(slabs), 0) slabs = generate_all_slabs(self.cscl, 1, 10, 10, bonds={("Cs", "Cl"): 4}, max_broken_bonds=100) self.assertEqual(len(slabs), 3) slabs1 = generate_all_slabs(self.lifepo4, 1, 10, 10, tol=0.1, bonds={("P", "O"): 3}) self.assertEqual(len(slabs1), 4) slabs2 = generate_all_slabs(self.lifepo4, 1, 10, 10, bonds={("P", "O"): 3, ("Fe", "O"): 3}) self.assertEqual(len(slabs2), 0) # There should be only one possible stable surfaces, all of which are # in the (001) oriented unit cell slabs3 = generate_all_slabs(self.LiCoO2, 1, 10, 10, bonds={("Co", "O"): 3}) self.assertEqual(len(slabs3), 1) mill = (0, 0, 1) for s in slabs3: self.assertEqual(s.miller_index, mill) if __name__ == "__main__": unittest.main()<|fim▁end|>
else: #Cubic lattice is compatible with all other space groups. latt = Lattice.cubic(5) s = Structure.from_spacegroup(i, latt, ["H"], [[0, 0, 0]])
<|file_name|>lifecycle-core.js<|end_file_name|><|fim▁begin|>/* * Copyright (c) 2005-2014, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ var LifecycleAPI = {}; var LifecycleUtils = {}; (function() { var configMap = {}; var eventMap = {}; var dataMap = {}; var lifecycleImpl = {}; var currentLifecycle; var constants = LifecycleAPI.constants = {}; //Sgort hand reference constants.API_ENDPOINT = 'lifecycle-api'; constants.API_LC_DEFINITION = 'lifecycle-definition-api'; constants.API_BASE = 'apiLCBase'; constants.API_CHANGE_STATE = 'apiChangeState'; constants.API_FETCH_STATE = 'apiFetchState'; constants.API_FETCH_HISTORY = 'apiFetchHistory'; constants.API_UPDATE_CHECKLIST = 'apiUpdateChecklist'; constants.UI_LIFECYCLE_SELECT_ID = '#lifecycle-selector'; constants.UI_LIFECYCLE_SELECT_BOX = 'ul.lifecycle-dropdown-menu li a'; constants.CONTAINER_SVG = 'svgContainer'; constants.CONTAINER_GRAPH = 'graphContainer'; constants.CONTAINER_LC_ACTION_AREA = 'lifecycleActionArea'; constants.CONTAINER_RENDERING_AREA = 'lifecycleRenderingArea'; constants.CONTAINER_CHECKLIST_AREA = 'lifecycleChecklistArea'; constants.CONTAINER_CHECKLIST_OVERLAY = 'lifecycleChecklistBlock'; constants.CONTAINER_LC_ACTION_OVERLAY = 'lifecycleActionOverlay'; constants.CONTAINER_HISTORY_AREA = 'lifecycleHistoryArea'; constants.CONTAINER_INFORMATION_AREA = 'lifecycleInformationArea'; constants.CONTAINER_TRANSITION_UI_AREA = 'lifecycleTransitionUIArea'; constants.CONTAINER_LC_NOTIFICATIONS_AREA = 'lifecycleNotificationsArea'; constants.CONTAINER_LC_GLOBAL_NOTIFICATIONS_AREA = 'lifecycleGlobalNotificationsArea'; constants.CONTAINER_LC_TRANSITION_INPUTS_FIELDS_AREA = 'lifecycleTransitionInputsFieldsArea'; constants.CONTAINER_LC_TRANSITION_INPUTS_ACTIONS_AREA = 'lifecycleTransitionInputsActionsArea'; constants.CONTAINER_LC_TRANSITION_INPUTS_FIELDS_FORM = 'lifecycleTRansitionInputsFieldsForm'; constants.TEMPLATE_NOTIFICATION_ERROR = 'lifecycleTemplateNotficationError'; constants.TEMPLATE_NOTIFICATION_INFO = 'lifecycleTemplateNotificationInfo'; constants.TEMPLATE_NOTIFICATION_WARN = 'lifecycleTemplateNotificationWarn'; constants.TEMPLATE_NOTIFICATION_SUCCESS = 'lifecycleTemplateNotificationSuccess'; constants.INPUT_TEXTAREA_LC_COMMENT = 'lifecycleCommentTextArea'; constants.EVENT_LC_LOAD = 'event.lc.loaded'; constants.EVENT_LC_UNLOAD = 'event.lc.unload'; constants.EVENT_FETCH_STATE_START = 'event.fetch.state.start'; constants.EVENT_FETCH_STATE_SUCCESS = 'event.fetch.state.success'; constants.EVENT_FETCH_STATE_FAILED = 'event.fetch.state.failed'; constants.EVENT_STATE_CHANGE = 'event.state.change'; constants.EVENT_ACTION_START = 'event.action.invoked'; constants.EVENT_ACTION_FAILED = 'event.action.failed'; constants.EVENT_ACTION_SUCCESS = 'event.action.success'; constants.EVENT_UPDATE_CHECKLIST_START = 'event.update.checklist.start'; constants.EVENT_UPDATE_CHECKLIST_SUCCESS = 'event.update.checklist.success'; constants.EVENT_UPDATE_CHECKLIST_FAILED = 'event.update.checklist.failed'; constants.EVENT_FETCH_HISTORY_START = 'event.fetch.history.start'; constants.EVENT_FETCH_HISTORY_SUCCESS = 'event.fetch.history.success'; constants.EVENT_FETCH_HISTORY_FAILED = 'event.fetch.history.failed'; constants.HISTORY_ACTION_TRANSITION = 'transition'; constants.HISTORY_ACTION_CHECKITEM = 'check.item'; constants.NOTIFICATION_INFO = 'info'; constants.NOTIFICATION_ERROR = 'error'; constants.NOTIFICATION_WARN = 'warn'; constants.NOTIFICATION_SUCCESS = 'success'; constants.MSG_WARN_CANNOT_CHANGE_STATE = 'msgCannotChangeState'; constants.MSG_SUCCESS_STATE_CHANGE = 'msgStateChangedSuccess'; constants.MSG_ERROR_STATE_CHANGE = 'msgStateChangeError'; constants.MSG_SUCCESS_CHECKLIST_UPDATE = 'msgChecklistUpdateSuccess'; constants.MSG_ERROR_CHECKLIST_UPDATE = 'msgChecklistUpdateError'; constants.CONTAINER_DELETE_ACTION_AREA = 'deleteActionArea'; var id = function(name) { return '#' + name; }; var config = function(key) { return LifecycleAPI.configs(key); };<|fim▁hole|> var partial = function(name) { return '/themes/' + caramel.themer + '/partials/' + name + '.hbs'; }; var renderPartial = function(partialKey, containerKey, data, fn) { fn = fn || function() {}; var partialName = config(partialKey); var containerName = config(containerKey); if (!partialName) { throw 'A template name has not been specified for template key ' + partialKey; } if (!containerName) { throw 'A container name has not been specified for container key ' + containerKey; } var obj = {}; obj[partialName] = partial(partialName); caramel.partials(obj, function() { var template = Handlebars.partials[partialName](data); $(id(containerName)).html(template); fn(containerName); }); }; var processCheckItems = function(stateDetails, datamodel) { if (!stateDetails.hasOwnProperty('datamodel')) { stateDetails.datamodel = {}; } stateDetails.datamodel.checkItems = datamodel.item; for (var index = 0; index < datamodel.item.length; index++) { datamodel.item[index].checked = false; datamodel.item[index].index = index; } }; var processTransitionUI = function(stateDetails, datamodel) { if (!stateDetails.hasOwnProperty('datamodel')) { stateDetails.datamodel = {}; } var ui = datamodel.ui || []; var transitions; stateDetails.datamodel.transitionUIs = []; if (ui.length >= 0) { stateDetails.datamodel.transitionUIs = ui; } transitions = stateDetails.datamodel.transitionUIs; for (var index = 0; index < transitions.length; index++) { transition = transitions[index]; transition.action = transition.forEvent; delete transition.forEvent; } }; var processTransitionInputs = function(stateDetails, datamodel) { if (!stateDetails.hasOwnProperty('datamodel')) { stateDetails.datamodel = {}; } var transitions; var transition; var entry; var executions; var execution; var form; var forms; var map; forms = stateDetails.datamodel.transitionInputs = {}; executions = datamodel.execution || []; for (var index = 0; index < executions.length; index++) { execution = executions[index]; if (execution.transitionInputs) { form = execution.transitionInputs[0]; map = forms[execution.forEvent.toLowerCase()] = {}; map.action = execution.forEvent; map.inputs = form.input; } } }; var processDataModel = function(stateDetails, datamodel) { switch (datamodel.name) { case 'checkItems': processCheckItems(stateDetails, datamodel); break; case 'transitionUI': processTransitionUI(stateDetails, datamodel); break; case 'transitionExecution': processTransitionInputs(stateDetails, datamodel); break; default: break; } }; var triggerEvent = function(eventName, eventCb) { if (eventMap.hasOwnProperty(eventName)) { eventCb = eventCb || {}; eventCallbacks = eventMap[eventName]; console.log('emiting event::' + eventName + ' [active lifecycle: ' + LifecycleAPI.currentLifecycle() + ' ]'); for (var index = 0; index < eventCallbacks.length; index++) { eventCallbacks[index](eventCb); } } else { console.log('no event listeners for event :: ' + eventName); } }; /** * Converts the JSON definition returned by the lifecycles API * into a well structured JSOn object * @param {[type]} data [description] * @return {[type]} [description] */ LifecycleUtils.buildStateMapFromDefinition = function(data) { var definition = data.data.definition.configuration.lifecycle.scxml.state; var initialState = data.data.definition.configuration.lifecycle.scxml.initialstate; var stateMap = {}; var state; var stateDetails; var nodeCount = 0; var datamodels; var datamodel; var transition; stateMap.states = {}; stateMap.initialState = initialState ? initialState.toLowerCase() : initialState; for (var stateKey in definition) { stateDetails = definition[stateKey]; state = stateMap.states[stateKey] = {}; state.id = stateKey; state.label = stateDetails.id; state.transitions = stateDetails.transition || []; stateDetails.datamodel = stateDetails.datamodel ? stateDetails.datamodel : []; datamodels = stateDetails.datamodel.data || []; //Convert the target states to lower case for (var index = 0; index < state.transitions.length; index++) { transition = state.transitions[index]; transition.target = transition.target.toLowerCase(); } //Process the data model for (var dIndex = 0; dIndex < datamodels.length; dIndex++) { datamodel = datamodels[dIndex]; processDataModel(state, datamodel); } nodeCount++; } return stateMap; }; /** * Returns meta information on the current asset * @return {[type]} [description] */ LifecycleUtils.currentAsset = function() { return store.publisher.lifecycle; }; LifecycleUtils.config = function(key) { return LifecycleAPI.configs(key); }; LifecycleAPI.configs = function() { if ((arguments.length == 1) && (typeof arguments[0] == 'object')) { configMap = arguments[0] } else if ((arguments.length = 1) && (typeof arguments[0] == 'string')) { return configMap[arguments[0]]; } else { return configMap; } }; LifecycleAPI.event = function() { var eventName = arguments[0]; var eventCb = arguments[1]; var eventCallbacks; if (arguments.length === 1) { triggerEvent(eventName, eventCb); } else if ((arguments.length === 2) && (typeof eventCb === 'object')) { triggerEvent(eventName, eventCb); } else if ((arguments.length === 2) && (typeof eventCb === 'function')) { if (!eventMap.hasOwnProperty(eventName)) { eventMap[eventName] = []; } eventMap[eventName].push(eventCb); } }; LifecycleAPI.data = function() { var dataKey = arguments[0]; var data = arguments[1]; if (arguments.length == 1) { return dataMap[dataKey]; } else if (arguments.length == 2) { dataMap[dataKey] = data; } }; LifecycleAPI.lifecycle = function() { var name = arguments[0]; var impl = arguments[1]; if (arguments.length == 0) { var currentLC = LifecycleAPI.currentLifecycle(); return LifecycleAPI.lifecycle(currentLC); } else if ((arguments.length === 1) && (typeof name === 'string')) { var impl = LifecycleAPI.data(name); if (!impl) { impl = new LifecycleImpl({ name: name }); LifecycleAPI.data(name, impl); } return impl; } else if ((arguments.length === 2) && (typeof impl === 'object')) { //Allow method overiding } else { throw 'Invalid lifecycle name provided'; } }; LifecycleAPI.currentLifecycle = function() { if (arguments.length === 1) { currentLifecycle = arguments[0]; } else { return currentLifecycle; } }; LifecycleAPI.notify = function(msg, options) { options = options || {}; var global = options.global ? options.global : false; var container = constants.CONTAINER_LC_NOTIFICATIONS_AREA; var notificationType = options.type ? options.type : constants.NOTIFICATION_INFO; var partial = constants.TEMPLATE_NOTIFICATION_INFO; if (global) { container = constants.CONTAINER_LC_GLOBAL_NOTIFICATIONS_AREA; } switch (notificationType) { case constants.NOTIFICATION_WARN: partial = constants.TEMPLATE_NOTIFICATION_WARN; break; case constants.NOTIFICATION_ERROR: partial = constants.TEMPLATE_NOTIFICATION_ERROR; break; case constants.NOTIFICATION_SUCCESS: partial = constants.TEMPLATE_NOTIFICATION_SUCCESS; default: break; } //Clear existing content $(id(container)).html(''); renderPartial(partial, container, { msg: msg }, function(container) { $(id(container)).fadeIn(5000); }); }; function LifecycleImpl(options) { options = options || {}; this.lifecycleName = options.name ? options.name : null; this.currentState = null; this.previousState = null; this.rawAPIDefinition = null; this.stateMap = null; this.dagreD3GraphObject = null; this.renderingSite; this.history = []; } LifecycleImpl.prototype.load = function() { var promise; if (!this.rawAPIDefinition) { var that = this; //Fetch the lifecycle definition promise = $.ajax({ url: this.queryDefinition(), success: function(data) { that.rawAPIDefinition = data; that.processDefinition(); that.currentState = that.stateMap.initialState; LifecycleAPI.currentLifecycle(that.lifecycleName); //Obtain the asset current state from the code block,if not set it to the initial state LifecycleAPI.event(constants.EVENT_LC_LOAD, { lifecycle: that.lifecycleName }); that.fetchState(); }, error: function() { alert('Failed to load definition'); } }); } else { LifecycleAPI.currentLifecycle(this.lifecycleName); //If the definition is present then the lifecycle has already been loaded LifecycleAPI.event(constants.EVENT_LC_LOAD, { lifecycle: this.lifecycleName }); this.fetchState(); } }; LifecycleAPI.unloadActiveLifecycle = function() { LifecycleAPI.event(constants.EVENT_LC_UNLOAD); }; LifecycleImpl.prototype.resolveRenderingSite = function() { this.renderingSite = {}; this.renderingSite.svgContainer = LifecycleAPI.configs(constants.CONTAINER_SVG); this.renderingSite.graphContainer = LifecycleAPI.configs(constants.CONTAINER_GRAPH); }; LifecycleImpl.prototype.processDefinition = function() { this.stateMap = LifecycleUtils.buildStateMapFromDefinition(this.rawAPIDefinition); }; LifecycleImpl.prototype.render = function() { this.resolveRenderingSite(); this.renderInit(); this.fillGraphData(); this.style(); this.renderFinish(); }; LifecycleImpl.prototype.renderInit = function() { this.dagreD3GraphObject = new dagreD3.graphlib.Graph().setGraph({}); if (!this.renderingSite) { throw 'Unable to render lifecycle as renderingSite details has not been provided'; } }; LifecycleImpl.prototype.renderFinish = function() { var g = this.dagreD3GraphObject; var svgContainer = this.renderingSite.svgContainer; var graphContainer = this.renderingSite.graphContainer; d3.select(svgContainer).append(graphContainer); var svg = d3.select(svgContainer), inner = svg.select(graphContainer); // Set up zoom support var zoom = d3.behavior.zoom().on("zoom", function() { inner.attr("transform", "translate(" + d3.event.translate + ")" + "scale(" + d3.event.scale + ")"); }); svg.call(zoom); // Create the renderer var render = new dagreD3.render(); // Run the renderer. This is what draws the final graph. render(inner, g); // Center the graph var initialScale = 1.2; zoom.translate([($(svgContainer).width() - g.graph().width * initialScale) / 2, 20]).scale(initialScale).event(svg); svg.attr('height', g.graph().height * initialScale + 40); }; LifecycleImpl.prototype.fillGraphData = function() { var state; var transition; var source; var stateMap = this.stateMap; var g = this.dagreD3GraphObject; for (var key in stateMap.states) { state = stateMap.states[key]; g.setNode(key, { label: state.id.toUpperCase(), shape: 'rect', labelStyle: 'font-size: 12px;font-weight: lighter;fill: rgb(51, 51, 51);' }); } //Add the edges for (key in stateMap.states) { state = stateMap.states[key]; source = key; for (var index = 0; index < state.transitions.length; index++) { transition = state.transitions[index]; g.setEdge(source, transition.target, { label: transition.event.toUpperCase(), lineInterpolate: 'basis', labelStyle: 'font-size: 12px;font-weight: lighter;fill: rgb(255, 255, 255);' }); } } }; LifecycleImpl.prototype.style = function() { var g = this.dagreD3GraphObject; // Set some general styles g.nodes().forEach(function(v) { var node = g.node(v); node.rx = node.ry = 0; }); }; LifecycleImpl.prototype.queryDefinition = function() { var baseURL = LifecycleAPI.configs(constants.API_LC_DEFINITION); return caramel.context + baseURL + '/' + this.lifecycleName; }; LifecycleImpl.prototype.urlChangeState = function(data) { var apiBase = LifecycleUtils.config(constants.API_BASE); var apiChangeState = LifecycleUtils.config(constants.API_CHANGE_STATE); var asset = LifecycleUtils.currentAsset(); if ((!asset) || (!asset.id)) { throw 'Unable to locate details about asset'; } return caramel.url(apiBase + '/' + asset.id + apiChangeState + '?type=' + asset.type + '&lifecycle=' + this.lifecycleName + '&nextAction=' + data.nextAction); }; LifecycleImpl.prototype.urlFetchState = function() { var apiBase = LifecycleUtils.config(constants.API_BASE); var apiChangeState = LifecycleUtils.config(constants.API_FETCH_STATE); var asset = LifecycleUtils.currentAsset(); if ((!asset) || (!asset.id)) { throw 'Unable to locate details about asset'; } return caramel.url(apiBase + '/' + asset.id + apiChangeState + '?type=' + asset.type + '&lifecycle=' + this.lifecycleName); }; LifecycleImpl.prototype.urlUpdateChecklist = function() { var apiBase = LifecycleUtils.config(constants.API_BASE); var apiUpdateChecklist = LifecycleUtils.config(constants.API_UPDATE_CHECKLIST); var asset = LifecycleUtils.currentAsset(); if ((!asset) || (!asset.id)) { throw 'Unable to locate details about asset'; } return caramel.url(apiBase + '/' + asset.id + apiUpdateChecklist + '?type=' + asset.type + '&lifecycle=' + this.lifecycleName); }; LifecycleImpl.prototype.urlFetchHistory = function() { var apiBase = LifecycleUtils.config(constants.API_BASE); var apiFetchHistory = LifecycleUtils.config(constants.API_FETCH_HISTORY); var asset = LifecycleUtils.currentAsset(); if ((!asset) || (!asset.id)) { throw 'Unable to locate details about asset'; } return caramel.url(apiBase + '/' + asset.id + apiFetchHistory + '?type=' + asset.type + '&lifecycle=' + this.lifecycleName); }; LifecycleImpl.prototype.checklist = function() { var state = this.state(this.currentState); var datamodel; if (arguments.length === 1) { console.log('changing checklist state'); datamodel = (state.datamodel) ? state.datamodel : (state.datamodel = {}); datamodel.checkItems = arguments[0]; } else { datamodel = state.datamodel || {}; return datamodel.checkItems ? datamodel.checkItems : []; } }; /** * This method returns the available actions of a given lifecycle state * if required state is not provided it is assumed to be current state * Note : as this method only returns pre-set allowed actions for the current state, * allowed actions should be set calling setAllowedActions(actions) */ LifecycleImpl.prototype.actions = function() { //Assume that a state has not been provided var currentState = this.currentState; if ((arguments.length === 1) && (typeof arguments[0] === 'string')) { currentState = arguments[0]; } var state = this.stateMap.states[currentState] || {}; var transitions = state.transitions || []; var actions = []; var transition; for (var index = 0; index < transitions.length; index++) { transition = transitions[index]; if (currentState == this.currentState) { if (state.allowedActions && state.allowedActions[transition.event]){ actions.push(transition.event); } } else { actions.push(transition.event); } } return actions; }; LifecycleImpl.prototype.setAllowedActions = function(actions) { var currentState = this.currentState; var state = this.stateMap.states[currentState] || {}; state.allowedActions = actions; return state.allowedActions; }; LifecycleImpl.prototype.nextStateByAction = function(action, state) { //Get tinformation about the state var stateDetails = this.state(state); var transitions = stateDetails.transitions || []; var transition; var nextState; for (var index = 0; index < transitions.length; index++) { transition = transitions[index]; if (transition.event.toLowerCase() === action.toLowerCase()) { nextState = transition.target; return nextState; } } return nextState; }; LifecycleImpl.prototype.invokeAction = function() { var action = arguments[0]; var comment = arguments[1]; var optionalArguments = arguments[2]; var nextState; var data = {}; if (!action) { throw 'Attempt to invoke an action without providing the action'; return; } nextState = this.nextStateByAction(action, this.currentState); if (!nextState) { throw 'Unable to locate the next state for the given action::' + action; } data.nextState = nextState; //Check if the action is one of the available actions for the current state var availableActions = this.actions(this.currentState); if ((availableActions.indexOf(action, 0) <= -1)) { throw 'Attempt to invoke an action (' + action + ') which is not available for the current state : ' + this.currentState; } if (comment) { data.comment = comment; } if (optionalArguments) { data.arguments = optionalArguments; } if (arguments[0]){ data.nextAction = arguments[0]; } //Determine the next state LifecycleAPI.event(constants.EVENT_ACTION_START); var that = this; $.ajax({ url: this.urlChangeState(data), type: 'POST', data: JSON.stringify(data), contentType: 'application/json', success: function(data) { that.previousState = that.currentState; that.currentState = data.data.newState; var traversableStates = data.data.traversableStates || []; //If next states are not returned then lifecycle //actions are not permitted if (traversableStates.length === 0) { that.isLCActionsPermitted = false; } LifecycleAPI.event(constants.EVENT_ACTION_SUCCESS); LifecycleAPI.event(constants.EVENT_STATE_CHANGE); that.fetchState(); }, error: function (jqXHR, textStatus, errorThrown) { LifecycleAPI.event(constants.EVENT_ACTION_FAILED, jqXHR.responseJSON); } }); }; LifecycleImpl.prototype.updateChecklist = function(checklistItemIndex, checked) { var data = {}; var entry = {}; entry.index = checklistItemIndex; entry.checked = checked; data.checklist = []; data.checklist.push(entry); LifecycleAPI.event(constants.EVENT_UPDATE_CHECKLIST_START); var that = this; $.ajax({ type: 'POST', url: this.urlUpdateChecklist(), data: JSON.stringify(data), contentType: 'application/json', success: function() { //Update the internal check list items LifecycleAPI.event(constants.EVENT_UPDATE_CHECKLIST_SUCCESS); that.fetchState(); }, error: function() { LifecycleAPI.event(constants.EVENT_UPDATE_CHECKLIST_FAILED); } }); }; LifecycleImpl.prototype.fetchState = function() { LifecycleAPI.event(constants.EVENT_FETCH_STATE_START); var that = this; $.ajax({ url: this.urlFetchState(), success: function(data) { var data = data.data; that.previousState = that.currentState; if (!data.id) { LifecycleAPI.event(constants.EVENT_FETCH_STATE_FAILED); return; } that.currentState = data.id.toLowerCase(); that.isLCActionsPermitted = data.isLCActionsPermitted; that.isDeletable = data.isDeletable; for (var index = 0; index < data.checkItems.length; index++) { data.checkItems[index].index = index; } that.checklist(data.checkItems); that.setAllowedActions(data.approvedActions); LifecycleAPI.event(constants.EVENT_FETCH_STATE_SUCCESS); }, error: function() { LifecycleAPI.event(constants.EVENT_FETCH_STATE_FAILED); } }) }; LifecycleImpl.prototype.userPermited = function() { return this.isLCActionsPermitted; }; LifecycleImpl.prototype.processHistory = function(data) { console.log('### Processing history ###'); var entry; var historyEntry; this.history = []; for (var index = 0; index < data.length; index++) { entry = data[index]; historyEntry = {}; historyEntry.state = entry.state; historyEntry.timestamp = entry.timestamp; historyEntry.user = entry.user; historyEntry.actionType = constants.HISTORY_ACTION_CHECKITEM; historyEntry.comment = entry.comment; historyEntry.hasComment = false; if (historyEntry.comment) { historyEntry.hasComment = true; } historyEntry.dateOfTransition = entry.dateofTransition; //Check if it is a state change if (entry.targetState) { historyEntry.targetState = entry.targetState; historyEntry.actionType = constants.HISTORY_ACTION_TRANSITION; } this.history.push(historyEntry); } }; LifecycleImpl.prototype.fetchHistory = function() { LifecycleAPI.event(constants.EVENT_FETCH_HISTORY_START); var that = this; $.ajax({ url: this.urlFetchHistory(), success: function(data) { var data = data.data || []; // that.history = []; //Reset the history // for (var index = 0; index < data.length; index++) { // that.history.push(data[index]); // } that.processHistory(data); LifecycleAPI.event(constants.EVENT_FETCH_HISTORY_SUCCESS); }, error: function() { LifecycleAPI.event(constants.EVENT_FETCH_HISTORY_FAILED); } }) }; LifecycleImpl.prototype.nextStates = function() { //Assume that a state has not been provided var currentState = this.currentState; if ((arguments.length === 1) && (typeof arguments[0] === 'string')) { currentState = arguments[0]; } var state = this.stateMap.states[currentState] || {}; var transitions = state.transitions || []; var transition; var states = []; for (var index = 0; index < transitions.length; index++) { transition = transitions[index]; states.push(transition.target); } return states; }; LifecycleImpl.prototype.state = function(name) { return this.stateMap.states[name]; }; LifecycleImpl.prototype.stateNode = function(name) { return this.dagreD3GraphObject.node(name); }; LifecycleImpl.prototype.changeState = function(nextState) { this.currentState = nextState; LifecycleAPI.event(constants.EVENT_STATE_CHANGE); }; LifecycleImpl.prototype.transitionUIs = function() { var state = this.currentState; var action; var stateDetails; var transition; var transitionMappedToAction; var transitionUI; if (arguments.length === 1) { state = arguments[0]; } if (arguments.length === 2) { action = arguments[1]; } stateDetails = this.state(state) || {}; transitionUIs = (stateDetails.datamodel) ? stateDetails.datamodel.transitionUIs : []; if (!action) { return transitionUIs; } if (!transitionUIs) { return []; } //Find the transition UI for the provided action for (var index = 0; index < transitionUIs.length; index++) { transition = transitionUIs[index]; if (transition.action.toLowerCase() === action.toLowerCase()) { transitionMappedToAction = transition; } } return transitionMappedToAction; }; LifecycleImpl.prototype.transitionInputs = function(action) { var currentState = this.currentState; var state = this.state(currentState); var stateDataModel = state.datamodel || {}; var transitionInputs = stateDataModel.transitionInputs || {}; var targetAction = action.toLowerCase(); if (transitionInputs.hasOwnProperty(targetAction)) { return transitionInputs[targetAction]; } return null; // return { // "action": "Promote", // "inputs": [{ // "name": "id", // "type": "text" // }] // }; }; LifecycleImpl.prototype.highlightCurrentState = function() { var currentStateNode = this.stateNode(this.currentState); var previousStateNode; selectNode(currentStateNode.elem); if ((this.previousState) && (this.previousState !== this.currentState)) { previousStateNode = this.stateNode(this.previousState); unselectNode(previousStateNode.elem); } }; var selectNode = function(elem) { var rect = $(elem).find('rect'); rect.css('fill', '#3a9ecf'); rect.css('stroke', '#3a9ecf'); }; var unselectNode = function(elem) { var rect = $(elem).find('rect'); rect.css('fill', '#f9f9f9'); rect.css('stroke', '#f9f9f9'); }; }());<|fim▁end|>
<|file_name|>ipset.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # # Copyright (C) 2015 Red Hat, Inc. # # Authors: # Thomas Woerner <[email protected]> # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # import os.path from firewall.core.prog import runProg from firewall.core.logger import log from firewall.functions import tempFile, readfile from firewall.config import COMMANDS IPSET_MAXNAMELEN = 32 IPSET_TYPES = [ # bitmap and set types are currently not supported # "bitmap:ip", # "bitmap:ip,mac", # "bitmap:port", # "list:set", "hash:ip", #"hash:ip,port", #"hash:ip,port,ip", #"hash:ip,port,net", #"hash:ip,mark", "hash:net", #"hash:net,net", #"hash:net,port", #"hash:net,port,net", #"hash:net,iface", "hash:mac", ] IPSET_CREATE_OPTIONS = { "family": "inet|inet6", "hashsize": "value", "maxelem": "value", "timeout": "value in secs", # "counters": None, # "comment": None, } class ipset: def __init__(self): self._command = COMMANDS["ipset"] def __run(self, args): # convert to string list _args = ["%s" % item for item in args] log.debug2("%s: %s %s", self.__class__, self._command, " ".join(_args)) (status, ret) = runProg(self._command, _args) if status != 0: raise ValueError("'%s %s' failed: %s" % (self._command, " ".join(_args), ret)) return ret def check_name(self, name): if len(name) > IPSET_MAXNAMELEN: raise FirewallError(INVALID_NAME, "ipset name '%s' is not valid" % name) def supported_types(self): ret = { } output = "" try: output = self.__run(["--help"]) except ValueError as e: log.debug1("ipset error: %s" % e) lines = output.splitlines() in_types = False for line in lines: #print(line) if in_types: splits = line.strip().split(None, 2) ret[splits[0]] = splits[2] if line.startswith("Supported set types:"): in_types = True return ret def check_type(self, type_name): if len(type_name) > IPSET_MAXNAMELEN or type_name not in IPSET_TYPES: raise FirewallError(INVALID_TYPE, "ipset type name '%s' is not valid" % type_name) def create(self, set_name, type_name, options=None): self.check_name(set_name) self.check_type(type_name) args = [ "create", set_name, type_name ] if options: for k,v in options.items(): args.append(k) if v != "": args.append(v) return self.__run(args) def destroy(self, set_name): self.check_name(set_name) return self.__run([ "destroy", set_name ]) def add(self, set_name, entry, options=None): args = [ "add", set_name, entry ] if options: args.append("%s" % " ".join(options)) return self.__run(args) def delete(self, set_name, entry, options=None): args = [ "del", set_name, entry ] if options: args.append("%s" % " ".join(options)) return self.__run(args) def test(self, set_name, entry, options=None): args = [ "test", set_name, entry ] if options: args.append("%s" % " ".join(options)) return self.__run(args) def list(self, set_name=None): args = [ "list" ] if set_name: args.append(set_name) return self.__run(args).split() def save(self, set_name=None): args = [ "save" ] if set_name: args.append(set_name) return self.__run(args) def restore(self, set_name, type_name, entries, create_options=None, entry_options=None): self.check_name(set_name) self.check_type(type_name) temp_file = tempFile() if ' ' in set_name: set_name = "'%s'" % set_name args = [ "create", set_name, type_name, "-exist" ] if create_options: for k,v in create_options.items(): args.append(k) if v != "": args.append(v) temp_file.write("%s\n" % " ".join(args)) for entry in entries: if ' ' in entry: entry = "'%s'" % entry if entry_options: temp_file.write("add %s %s %s\n" % (set_name, entry, " ".join(entry_options))) else: temp_file.write("add %s %s\n" % (set_name, entry)) temp_file.close() stat = os.stat(temp_file.name) log.debug2("%s: %s restore %s", self.__class__, self._command, "%s: %d" % (temp_file.name, stat.st_size)) args = [ "restore" ] (status, ret) = runProg(self._command, args, stdin=temp_file.name) if log.getDebugLogLevel() > 2: try: lines = readfile(temp_file.name) except: pass else: i = 1 for line in readfile(temp_file.name): log.debug3("%8d: %s" % (i, line), nofmt=1, nl=0) if not line.endswith("\n"): log.debug3("", nofmt=1) i += 1<|fim▁hole|> raise ValueError("'%s %s' failed: %s" % (self._command, " ".join(args), ret)) return ret def flush(self, set_name): args = [ "flush" ] if set_name: args.append(set_name) return self.__run(args) def rename(self, old_set_name, new_set_name): return self.__run([ "rename", old_set_name, new_set_name ]) def swap(self, set_name_1, set_name_2): return self.__run([ "swap", set_name_1, set_name_2 ]) def version(self): return self.__run([ "version" ]) def check_ipset_name(ipset): if len(ipset) > IPSET_MAXNAMELEN: return False return True<|fim▁end|>
os.unlink(temp_file.name) if status != 0:
<|file_name|>intro.cpp<|end_file_name|><|fim▁begin|>/* ScummVM - Graphic Adventure Engine * * ScummVM is the legal property of its developers, whose names * are too numerous to list here. Please refer to the COPYRIGHT * file distributed with this source distribution. * * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU General Public License * as published by the Free Software Foundation; either version 2 * of the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. * */<|fim▁hole|> * This code is based on Labyrinth of Time code with assistance of * * Copyright (c) 1993 Terra Nova Development * Copyright (c) 2004 The Wyrmkeep Entertainment Co. * */ #include "lab/lab.h" #include "lab/anim.h" #include "lab/dispman.h" #include "lab/eventman.h" #include "lab/intro.h" #include "lab/music.h" #include "lab/resource.h" #include "lab/utils.h" namespace Lab { Intro::Intro(LabEngine *vm) : _vm(vm) { _quitIntro = false; _introDoBlack = false; _font = _vm->_resource->getFont("F:Map.fon"); } Intro::~Intro() { _vm->_graphics->freeFont(&_font); } void Intro::introEatMessages() { while (1) { IntuiMessage *msg = _vm->_event->getMsg(); if (_vm->shouldQuit()) { _quitIntro = true; return; } if (!msg) return; if ((msg->_msgClass == kMessageRightClick) || ((msg->_msgClass == kMessageRawKey) && (msg->_code == Common::KEYCODE_ESCAPE))) _quitIntro = true; } } void Intro::doPictText(const Common::String filename, bool isScreen) { Common::String path = Common::String("Lab:rooms/Intro/") + filename; uint timeDelay = (isScreen) ? 35 : 7; _vm->updateEvents(); if (_quitIntro) return; uint32 lastMillis = 0; bool drawNextText = true; bool doneFl = false; bool begin = true; Common::File *textFile = _vm->_resource->openDataFile(path); char *textBuffer = new char[textFile->size()]; textFile->read(textBuffer, textFile->size()); delete textFile; const char *curText = textBuffer; while (1) { if (drawNextText) { if (begin) begin = false; else if (isScreen) _vm->_graphics->fade(false); if (isScreen) { _vm->_graphics->rectFillScaled(10, 10, 310, 190, 7); curText += _vm->_graphics->flowText(_font, _vm->_isHiRes ? 0 : -1, 5, 7, false, false, true, true, _vm->_utils->vgaRectScale(14, 11, 306, 189), curText); _vm->_graphics->fade(true); } else curText += _vm->_graphics->longDrawMessage(Common::String(curText), false); doneFl = (*curText == 0); drawNextText = false; introEatMessages(); if (_quitIntro) { if (isScreen) _vm->_graphics->fade(false); delete[] textBuffer; return; } lastMillis = _vm->_system->getMillis(); } IntuiMessage *msg = _vm->_event->getMsg(); if (_vm->shouldQuit()) { _quitIntro = true; return; } if (!msg) { _vm->updateEvents(); _vm->_anim->diffNextFrame(); uint32 elapsedSeconds = (_vm->_system->getMillis() - lastMillis) / 1000; if (elapsedSeconds > timeDelay) { if (doneFl) { if (isScreen) _vm->_graphics->fade(false); delete[] textBuffer; return; } else { drawNextText = true; } } _vm->waitTOF(); } else { uint32 msgClass = msg->_msgClass; uint16 code = msg->_code; if ((msgClass == kMessageRightClick) || ((msgClass == kMessageRawKey) && (code == Common::KEYCODE_ESCAPE))) { _quitIntro = true; if (isScreen) _vm->_graphics->fade(false); delete[] textBuffer; return; } else if ((msgClass == kMessageLeftClick) || (msgClass == kMessageRightClick)) { if (msgClass == kMessageLeftClick) { if (doneFl) { if (isScreen) _vm->_graphics->fade(false); delete[] textBuffer; return; } else drawNextText = true; } introEatMessages(); if (_quitIntro) { if (isScreen) _vm->_graphics->fade(false); delete[] textBuffer; return; } } if (doneFl) { if (isScreen) _vm->_graphics->fade(false); delete[] textBuffer; return; } else drawNextText = true; } } // while(1) } void Intro::musicDelay() { _vm->updateEvents(); if (_quitIntro) return; for (int i = 0; i < 20; i++) { _vm->updateEvents(); _vm->waitTOF(); _vm->waitTOF(); _vm->waitTOF(); } } void Intro::nReadPict(const Common::String filename, bool playOnce) { Common::String finalFileName = Common::String("P:Intro/") + filename; _vm->updateEvents(); introEatMessages(); if (_quitIntro) return; _vm->_anim->_doBlack = _introDoBlack; _vm->_anim->stopDiffEnd(); _vm->_graphics->readPict(finalFileName, playOnce); } void Intro::play() { uint16 palette[16] = { 0x0000, 0x0855, 0x0FF9, 0x0EE7, 0x0ED5, 0x0DB4, 0x0CA2, 0x0C91, 0x0B80, 0x0B80, 0x0B91, 0x0CA2, 0x0CB3, 0x0DC4, 0x0DD6, 0x0EE7 }; _vm->_anim->_doBlack = true; if (_vm->getPlatform() == Common::kPlatformDOS) { nReadPict("EA0"); nReadPict("EA1"); nReadPict("EA2"); nReadPict("EA3"); } else if (_vm->getPlatform() == Common::kPlatformWindows) { nReadPict("WYRMKEEP"); // Wait 4 seconds (400 x 10ms) for (int i = 0; i < 400; i++) { introEatMessages(); if (_quitIntro) break; _vm->_system->delayMillis(10); } } _vm->_graphics->blackAllScreen(); if (_vm->getPlatform() != Common::kPlatformAmiga) _vm->_music->changeMusic("Music:BackGrou", false, false); else _vm->_music->changeMusic("Music:BackGround", false, false); _vm->_anim->_noPalChange = true; if (_vm->getPlatform() == Common::kPlatformDOS) nReadPict("TNDcycle.pic"); else nReadPict("TNDcycle2.pic"); _vm->_anim->_noPalChange = false; _vm->_graphics->_fadePalette = palette; for (int i = 0; i < 16; i++) { palette[i] = ((_vm->_anim->_diffPalette[i * 3] >> 2) << 8) + ((_vm->_anim->_diffPalette[i * 3 + 1] >> 2) << 4) + (_vm->_anim->_diffPalette[i * 3 + 2] >> 2); } _vm->updateEvents(); if (!_quitIntro) _vm->_graphics->fade(true); for (int times = 0; times < 150; times++) { introEatMessages(); if (_quitIntro) break; _vm->updateEvents(); uint16 temp = palette[2]; for (int i = 2; i < 15; i++) palette[i] = palette[i + 1]; palette[15] = temp; _vm->_graphics->setAmigaPal(palette); _vm->waitTOF(); } if (!_quitIntro) { _vm->_graphics->fade(false); _vm->_graphics->blackAllScreen(); _vm->updateEvents(); } nReadPict("Title.A"); nReadPict("AB"); musicDelay(); nReadPict("BA"); nReadPict("AC"); musicDelay(); if (_vm->getPlatform() == Common::kPlatformWindows) musicDelay(); // more credits on this page now nReadPict("CA"); nReadPict("AD"); musicDelay(); if (_vm->getPlatform() == Common::kPlatformWindows) musicDelay(); // more credits on this page now nReadPict("DA"); musicDelay(); _vm->updateEvents(); _vm->_graphics->blackAllScreen(); _vm->updateEvents(); _vm->_anim->_noPalChange = true; nReadPict("Intro.1"); _vm->_anim->_noPalChange = false; for (int i = 0; i < 16; i++) { palette[i] = ((_vm->_anim->_diffPalette[i * 3] >> 2) << 8) + ((_vm->_anim->_diffPalette[i * 3 + 1] >> 2) << 4) + (_vm->_anim->_diffPalette[i * 3 + 2] >> 2); } doPictText("i.1", true); if (_vm->getPlatform() == Common::kPlatformWindows) { doPictText("i.2A", true); doPictText("i.2B", true); } _vm->_graphics->blackAllScreen(); _vm->updateEvents(); _introDoBlack = true; nReadPict("Station1"); doPictText("i.3"); nReadPict("Station2"); doPictText("i.4"); nReadPict("Stiles4"); doPictText("i.5"); nReadPict("Stiles3"); doPictText("i.6"); if (_vm->getPlatform() == Common::kPlatformWindows) nReadPict("Platform2"); else nReadPict("Platform"); doPictText("i.7"); nReadPict("Subway.1"); doPictText("i.8"); nReadPict("Subway.2"); doPictText("i.9"); doPictText("i.10"); doPictText("i.11"); if (!_quitIntro) for (int i = 0; i < 50; i++) { for (int idx = (8 * 3); idx < (255 * 3); idx++) _vm->_anim->_diffPalette[idx] = 255 - _vm->_anim->_diffPalette[idx]; _vm->updateEvents(); _vm->waitTOF(); _vm->_graphics->setPalette(_vm->_anim->_diffPalette, 256); _vm->waitTOF(); _vm->waitTOF(); } doPictText("i.12"); doPictText("i.13"); _introDoBlack = false; nReadPict("Daed0"); doPictText("i.14"); nReadPict("Daed1"); doPictText("i.15"); nReadPict("Daed2"); doPictText("i.16"); doPictText("i.17"); doPictText("i.18"); nReadPict("Daed3"); doPictText("i.19"); doPictText("i.20"); nReadPict("Daed4"); doPictText("i.21"); nReadPict("Daed5"); doPictText("i.22"); doPictText("i.23"); doPictText("i.24"); nReadPict("Daed6"); doPictText("i.25"); doPictText("i.26"); nReadPict("Daed7", false); doPictText("i.27"); doPictText("i.28"); _vm->_anim->stopDiffEnd(); nReadPict("Daed8"); doPictText("i.29"); doPictText("i.30"); nReadPict("Daed9"); doPictText("i.31"); doPictText("i.32"); doPictText("i.33"); nReadPict("Daed9a"); nReadPict("Daed10"); doPictText("i.34"); doPictText("i.35"); doPictText("i.36"); nReadPict("SubX"); if (_quitIntro) { _vm->_graphics->rectFill(0, 0, _vm->_graphics->_screenWidth - 1, _vm->_graphics->_screenHeight - 1, 0); _vm->_anim->_doBlack = true; } } } // End of namespace Lab<|fim▁end|>
/*
<|file_name|>explicit_null.rs<|end_file_name|><|fim▁begin|>use juniper::{ graphql_object, graphql_value, graphql_vars, EmptyMutation, EmptySubscription, GraphQLInputObject, Nullable, Variables, }; pub struct Context; impl juniper::Context for Context {} pub struct Query; #[derive(GraphQLInputObject)] struct ObjectInput { field: Nullable<i32>, } #[graphql_object(context = Context)] impl Query { fn is_explicit_null(arg: Nullable<i32>) -> bool { arg.is_explicit_null() } fn object_field_is_explicit_null(obj: ObjectInput) -> bool { obj.field.is_explicit_null() } } type Schema = juniper::RootNode<'static, Query, EmptyMutation<Context>, EmptySubscription<Context>>; #[tokio::test] async fn explicit_null() { let query = r#" query Foo($emptyObj: ObjectInput!, $literalNullObj: ObjectInput!) { literalOneIsExplicitNull: isExplicitNull(arg: 1) literalNullIsExplicitNull: isExplicitNull(arg: null) noArgIsExplicitNull: isExplicitNull literalOneFieldIsExplicitNull: objectFieldIsExplicitNull(obj: {field: 1}) literalNullFieldIsExplicitNull: objectFieldIsExplicitNull(obj: {field: null}) noFieldIsExplicitNull: objectFieldIsExplicitNull(obj: {}) emptyVariableObjectFieldIsExplicitNull: objectFieldIsExplicitNull(obj: $emptyObj) literalNullVariableObjectFieldIsExplicitNull: objectFieldIsExplicitNull(obj: $literalNullObj) } "#; let schema = &Schema::new( Query, EmptyMutation::<Context>::new(), EmptySubscription::<Context>::new(), ); let vars: Variables = graphql_vars! { "emptyObj": {}, "literalNullObj": {"field": null}, }; assert_eq!( juniper::execute(query, None, &schema, &vars, &Context).await, Ok(( graphql_value!({ "literalOneIsExplicitNull": false, "literalNullIsExplicitNull": true, "noArgIsExplicitNull": false, "literalOneFieldIsExplicitNull": false,<|fim▁hole|> "literalNullVariableObjectFieldIsExplicitNull": true, }), vec![], )), ); }<|fim▁end|>
"literalNullFieldIsExplicitNull": true, "noFieldIsExplicitNull": false, "emptyVariableObjectFieldIsExplicitNull": false,
<|file_name|>sync_client.js<|end_file_name|><|fim▁begin|>import SyncClient from 'sync-client'; const versions = [{<|fim▁hole|> version: 1, stores: { bookmarks: 'id, parentID', folders: 'id, parentID', }, }, { version: 2, stores: { bookmarks: 'id, parentID, *tags', folders: 'id, parentID', tags: 'id', }, }]; export default new SyncClient('BookmarksManager', versions); export { SyncClient };<|fim▁end|>
<|file_name|>DocValuesGroupByOptimizedIteratorTest.java<|end_file_name|><|fim▁begin|>/* * Licensed to Crate under one or more contributor license agreements. * See the NOTICE file distributed with this work for additional * information regarding copyright ownership. Crate licenses this file * to you under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may * obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. * * However, if you have executed another commercial license agreement * with Crate these terms will supersede the license and you may use the * software solely pursuant to the terms of the relevant commercial * agreement. */ package io.crate.execution.engine.collect; import io.crate.breaker.RamAccounting; import io.crate.data.BatchIterator; import io.crate.data.Row; import io.crate.execution.engine.aggregation.impl.SumAggregation; import io.crate.expression.reference.doc.lucene.BytesRefColumnReference; import io.crate.expression.reference.doc.lucene.CollectorContext; import io.crate.expression.reference.doc.lucene.LongColumnReference; import io.crate.expression.reference.doc.lucene.LuceneCollectorExpression; import io.crate.metadata.Functions; import io.crate.metadata.Reference; import io.crate.metadata.ReferenceIdent; import io.crate.metadata.RelationName; import io.crate.metadata.RowGranularity; import io.crate.metadata.functions.Signature; import io.crate.test.integration.CrateDummyClusterServiceUnitTest; import io.crate.testing.TestingRowConsumer; import io.crate.types.DataTypes; import org.apache.lucene.document.Document; import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.store.ByteBuffersDirectory; import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.index.mapper.NumberFieldMapper; import org.junit.Before; import org.junit.Test; import java.io.IOException; import java.util.List; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Consumer; import static io.crate.testing.TestingHelpers.createNodeContext; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.instanceOf; public class DocValuesGroupByOptimizedIteratorTest extends CrateDummyClusterServiceUnitTest { private Functions functions; private IndexSearcher indexSearcher; private List<Object[]> rows = List.of( new Object[]{"1", 1L, 1L}, new Object[]{"0", 0L, 2L}, new Object[]{"1", 1L, 3L}, new Object[]{"0", 0L, 4L} ); @Before public void setup() throws IOException { var nodeContext = createNodeContext(); functions = nodeContext.functions(); var indexWriter = new IndexWriter(new ByteBuffersDirectory(), new IndexWriterConfig()); for (var row : rows) { Document doc = new Document(); doc.add(new SortedSetDocValuesField("x", BytesRefs.toBytesRef(row[0]))); doc.add(new NumericDocValuesField("y", (Long) row[1])); doc.add(new NumericDocValuesField("z", (Long) row[2])); indexWriter.addDocument(doc); } indexWriter.commit(); indexSearcher = new IndexSearcher(DirectoryReader.open(indexWriter)); } <|fim▁hole|> @Test public void test_group_by_doc_values_optimized_iterator_for_single_numeric_key() throws Exception { SumAggregation<?> sumAggregation = (SumAggregation<?>) functions.getQualified( Signature.aggregate( SumAggregation.NAME, DataTypes.LONG.getTypeSignature(), DataTypes.LONG.getTypeSignature() ), List.of(DataTypes.LONG), DataTypes.LONG ); var aggregationField = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG); aggregationField.setName("z"); var sumDocValuesAggregator = sumAggregation.getDocValueAggregator( List.of(DataTypes.LONG), List.of(aggregationField) ); var keyExpressions = List.of(new LongColumnReference("y")); var it = DocValuesGroupByOptimizedIterator.GroupByIterator.forSingleKey( List.of(sumDocValuesAggregator), indexSearcher, new Reference( new ReferenceIdent(RelationName.fromIndexName("test"), "y"), RowGranularity.DOC, DataTypes.LONG, null, null ), keyExpressions, RamAccounting.NO_ACCOUNTING, new MatchAllDocsQuery(), new CollectorContext() ); var rowConsumer = new TestingRowConsumer(); rowConsumer.accept(it, null); assertThat( rowConsumer.getResult(), containsInAnyOrder(new Object[]{0L, 6L}, new Object[]{1L, 4L})); } @Test public void test_group_by_doc_values_optimized_iterator_for_many_keys() throws Exception { SumAggregation<?> sumAggregation = (SumAggregation<?>) functions.getQualified( Signature.aggregate( SumAggregation.NAME, DataTypes.LONG.getTypeSignature(), DataTypes.LONG.getTypeSignature() ), List.of(DataTypes.LONG), DataTypes.LONG ); var aggregationField = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG); aggregationField.setName("z"); var sumDocValuesAggregator = sumAggregation.getDocValueAggregator( List.of(DataTypes.LONG), List.of(aggregationField) ); var keyExpressions = List.of(new BytesRefColumnReference("x"), new LongColumnReference("y")); var keyRefs = List.of( new Reference( new ReferenceIdent(RelationName.fromIndexName("test"), "x"), RowGranularity.DOC, DataTypes.STRING, null, null ), new Reference( new ReferenceIdent(RelationName.fromIndexName("test"), "y"), RowGranularity.DOC, DataTypes.LONG, null, null ) ); var it = DocValuesGroupByOptimizedIterator.GroupByIterator.forManyKeys( List.of(sumDocValuesAggregator), indexSearcher, keyRefs, keyExpressions, RamAccounting.NO_ACCOUNTING, new MatchAllDocsQuery(), new CollectorContext() ); var rowConsumer = new TestingRowConsumer(); rowConsumer.accept(it, null); assertThat( rowConsumer.getResult(), containsInAnyOrder(new Object[]{"0", 0L, 6L}, new Object[]{"1", 1L, 4L}) ); } @Test public void test_optimized_iterator_stop_processing_on_kill() throws Exception { Throwable expectedException = stopOnInterrupting(it -> it.kill(new InterruptedException("killed"))); assertThat(expectedException, instanceOf(InterruptedException.class)); } @Test public void test_optimized_iterator_stop_processing_on_close() throws Exception { Throwable expectedException = stopOnInterrupting(BatchIterator::close); assertThat(expectedException, instanceOf(IllegalStateException.class)); } private Throwable stopOnInterrupting(Consumer<BatchIterator<Row>> interrupt) throws Exception { CountDownLatch waitForLoadNextBatch = new CountDownLatch(1); CountDownLatch pauseOnDocumentCollecting = new CountDownLatch(1); CountDownLatch batchLoadingCompleted = new CountDownLatch(1); BatchIterator<Row> it = createBatchIterator(() -> { waitForLoadNextBatch.countDown(); try { pauseOnDocumentCollecting.await(5, TimeUnit.SECONDS); } catch (InterruptedException e) { throw new RuntimeException(e); } }); AtomicReference<Throwable> exception = new AtomicReference<>(); Thread t = new Thread(() -> { try { it.loadNextBatch().whenComplete((r, e) -> { if (e != null) { exception.set(e.getCause()); } batchLoadingCompleted.countDown(); }); } catch (Exception e) { exception.set(e); } }); t.start(); waitForLoadNextBatch.await(5, TimeUnit.SECONDS); interrupt.accept(it); pauseOnDocumentCollecting.countDown(); batchLoadingCompleted.await(5, TimeUnit.SECONDS); return exception.get(); } private BatchIterator<Row> createBatchIterator(Runnable onNextReader) { return DocValuesGroupByOptimizedIterator.GroupByIterator.getIterator( List.of(), indexSearcher, List.of(new LuceneCollectorExpression<>() { @Override public void setNextReader(LeafReaderContext context) { onNextReader.run(); } @Override public Object value() { return null; } }), RamAccounting.NO_ACCOUNTING, (states, key) -> { }, (expressions) -> expressions.get(0).value(), (key, cells) -> cells[0] = key, new MatchAllDocsQuery(), new CollectorContext() ); } }<|fim▁end|>
<|file_name|>mypydoc.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: iso-8859-1 -*- # """\ # Modifies the pydoc contained in Python to use the member function filelink # for filelink generation, so it can be later overridden. # See also http://bugs.python.org/issue902061 """ # # Copyright (C) 2009 Rene Liebscher # # This program is free software; you can redistribute it and/or modify it under # the terms of the GNU Lesser General Public License as published by the Free # Software Foundation; either version 3 of the License, or (at your option) any # later version. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public License along with # this program; if not, see <http://www.gnu.org/licenses/>. # __revision__ = "$Id: mypydoc.py,v 1.9 2009/10/07 20:52:24 rliebscher Exp $" import sys, inspect from string import join, split, strip import pydoc from pydoc import visiblename, pkgutil, getdoc, isdata class MyHTMLDoc(pydoc.HTMLDoc): """Formatter class for HTML documentation.""" def filelink(self, url, path): """Create link to source file.""" return '<a href="file:%s">%s</a>' % (url, path) def docmodule(self, object, name=None, mod=None, *ignored): """Produce HTML documentation for a module object.""" name = object.__name__ # ignore the passed-in name try: all = object.__all__ except AttributeError: all = None parts = split(name, '.') links = [] for i in range(len(parts)-1): links.append( '<a href="%s.html"><font color="#ffffff">%s</font></a>' % (join(parts[:i+1], '.'), parts[i])) linkedname = join(links + parts[-1:], '.') head = '<big><big><strong>%s</strong></big></big>' % linkedname try: path = inspect.getabsfile(object) url = path if sys.platform == 'win32': import nturl2path url = nturl2path.pathname2url(path) # modified filelink = self.filelink(url, path) # end modified except TypeError: filelink = '(built-in)' info = [] if hasattr(object, '__version__'): version = str(object.__version__) if version[:11] == '$' + 'Revision: ' and version[-1:] == '$': version = strip(version[11:-1]) info.append('version %s' % self.escape(version)) if hasattr(object, '__date__'): info.append(self.escape(str(object.__date__))) if info: head = head + ' (%s)' % join(info, ', ') docloc = self.getdocloc(object) if docloc is not None: docloc = '<br><a href="%(docloc)s">Module Docs</a>' % locals() else: docloc = '' result = self.heading( head, '#ffffff', '#7799ee', '<a href=".">index</a><br>' + filelink + docloc) modules = inspect.getmembers(object, inspect.ismodule) classes, cdict = [], {} for key, value in inspect.getmembers(object, inspect.isclass): # if __all__ exists, believe it. Otherwise use old heuristic. if (all is not None or (inspect.getmodule(value) or object) is object): if visiblename(key, all): classes.append((key, value)) cdict[key] = cdict[value] = '#' + key for key, value in classes: for base in value.__bases__: key, modname = base.__name__, base.__module__ module = sys.modules.get(modname) if modname != name and module and hasattr(module, key): if getattr(module, key) is base: if not key in cdict: cdict[key] = cdict[base] = modname + '.html#' + key funcs, fdict = [], {} for key, value in inspect.getmembers(object, inspect.isroutine): # if __all__ exists, believe it. Otherwise use old heuristic. if (all is not None or inspect.isbuiltin(value) or inspect.getmodule(value) is object): if visiblename(key, all): funcs.append((key, value)) fdict[key] = '#-' + key if inspect.isfunction(value): fdict[value] = fdict[key] data = [] for key, value in inspect.getmembers(object, isdata): if visiblename(key, all): data.append((key, value)) doc = self.markup(getdoc(object), self.preformat, fdict, cdict) doc = doc and '<tt>%s</tt>' % doc result = result + '<p>%s</p>\n' % doc if hasattr(object, '__path__'): modpkgs = [] for importer, modname, ispkg in pkgutil.iter_modules(object.__path__): modpkgs.append((modname, name, ispkg, 0)) modpkgs.sort() contents = self.multicolumn(modpkgs, self.modpkglink) result = result + self.bigsection( 'Package Contents', '#ffffff', '#aa55cc', contents) elif modules: contents = self.multicolumn( modules, lambda (key, value), s=self: s.modulelink(value)) result = result + self.bigsection( 'Modules', '#fffff', '#aa55cc', contents) if classes: classlist = map(lambda (key, value): value, classes) contents = [ self.formattree(inspect.getclasstree(classlist, 1), name)] for key, value in classes: contents.append(self.document(value, key, name, fdict, cdict))<|fim▁hole|> result = result + self.bigsection( 'Classes', '#ffffff', '#ee77aa', join(contents)) if funcs: contents = [] for key, value in funcs: contents.append(self.document(value, key, name, fdict, cdict)) result = result + self.bigsection( 'Functions', '#ffffff', '#eeaa77', join(contents)) if data: contents = [] for key, value in data: contents.append(self.document(value, key)) result = result + self.bigsection( 'Data', '#ffffff', '#55aa55', join(contents, '<br>\n')) if hasattr(object, '__author__'): contents = self.markup(str(object.__author__), self.preformat) result = result + self.bigsection( 'Author', '#ffffff', '#7799ee', contents) if hasattr(object, '__credits__'): contents = self.markup(str(object.__credits__), self.preformat) result = result + self.bigsection( 'Credits', '#ffffff', '#7799ee', contents) return result # --------------------------------------- interactive interpreter interface pydoc.html = MyHTMLDoc() if __name__ == '__main__': pydoc.cli()<|fim▁end|>
<|file_name|>u32.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! Operations and constants for unsigned 32-bits integers (`u32` type) #![doc(primitive = "u32")] <|fim▁hole|>uint_module!(u32, i32, 32)<|fim▁end|>
<|file_name|>test_security.py<|end_file_name|><|fim▁begin|># Copyright 2012-2015 Mattias Fliesberg # # This file is part of opmuse. # # opmuse is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # opmuse is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with opmuse. If not, see <http://www.gnu.org/licenses/>. from . import setup_db, teardown_db from opmuse.security import User, hash_password class TestSecurity: def setup_method(self): setup_db(self) def teardown_method(self): teardown_db(self) def test_login(self): user = self.session.query(User).filter_by(login="admin").one()<|fim▁hole|> hashed = hash_password("admin", user.salt) assert hashed == user.password hashed = hash_password("wrong", user.salt) assert hashed != user.password<|fim▁end|>
<|file_name|>server_query.rs<|end_file_name|><|fim▁begin|>use async_graphql::{Context, FieldResult}; use chrono::prelude::*; use eyre::{ // eyre, Result, // Context as _, }; use printspool_json_store::{JsonRow, Record}; // use async_graphql::{ // // ID, // // Context, // FieldResult, // }; // use printspool_json_store::Record as _; use crate::{built_info, server::Server}; #[derive(Default)] pub struct ServerQuery; #[derive(async_graphql::InputObject, Default, Debug)] pub struct FeatureFlagsInput { filter: Option<Vec<String>>, } #[async_graphql::Object] impl ServerQuery { async fn server_version(&self) -> String { // See https://docs.rs/built/0.4.4/built/ let version_number = built_info::GIT_VERSION.unwrap_or("DEV"); let dirty_string = if built_info::GIT_DIRTY.unwrap_or(false) { " + Uncommitted Changes" } else { "" }; <|fim▁hole|> dirty_string = dirty_string, ) } /// Returns the current date time from the server. Useful for determining the connection /// latency. async fn ping(&self) -> DateTime<Utc> { Utc::now() } // TODO: Do we need pending updates still in the new architecture? // hasPendingUpdates #[instrument(skip(self, ctx))] async fn server_name<'ctx>( &self, ctx: &'ctx Context<'_>, ) -> FieldResult<Option<String>> { let db: &crate::Db = ctx.data()?; async move { let servers = sqlx::query_as!( JsonRow, r#" SELECT servers.props FROM servers WHERE (servers.props->'is_self')::boolean IS TRUE "#, ) .fetch_all(db) .await?; let servers = Server::from_rows(servers)?; let name = servers.into_iter().next().map(|server| server.name); Result::<_>::Ok(name) } // log the backtrace which is otherwise lost by FieldResult .await .map_err(|err| { warn!("{:?}", err); err.into() }) } #[instrument(skip(self))] async fn feature_flags( &self, #[graphql(default)] input: FeatureFlagsInput, ) -> FieldResult<Vec<String>> { let mut flags: Vec<String> = vec![ // Feature Flags Go Here! // "slicer".to_string(), ]; if std::env::var("ENABLE_SLICER") == Ok("1".to_string()) { flags.push("slicer".to_string()); } if let Some(filter) = input.filter { flags = flags .into_iter() .filter(|flag| filter.contains(flag)) .collect(); } Ok(flags) } }<|fim▁end|>
// eg. Teg 0.1.0 for linux/x86_64 format!( "Teg {version_number}{dirty_string}", version_number = version_number,
<|file_name|>createDB.py<|end_file_name|><|fim▁begin|>import sqlite3 import sys import os def menu(): sane = 1 while sane == 1: print "[ - ] Please enter absolute path to cred. database to be created: " in_path = raw_input() if os.path.exists(in_path): os.system('cls' if os.name == 'nt' else 'clear') print "[ - ] Invalid path, try again." else: sane = 0 return(in_path) def main(dbPath): createQ = "CREATE TABLE "+'"main"'+" ('pri_Index' INTEGER PRIMARY KEY AUTOINCREMENT, 'identifier' TEXT , 'clearTextP' TEXT , 'srcMD5' TEXT , 'srcSHA1' TEXT , 'srcBCRYPT' TEXT , 'rainTableMD5' TEXT , 'rainTableSHA1' TEXT , 'rainTableBCRYPT' TEXT)" try: db_conn = sqlite3.connect(dbPath) except: print "[ - ] Unable to create, check path and try again." sys.exit(1) cur = db_conn.cursor() cur.execute(createQ) print "[ - ] DB created at "+dbPath+"\nPress enter to exit." <|fim▁hole|> end = raw_input() try: main(menu()) except KeyboardInterrupt: print "[ - ] CTRL+C caught, exiting."<|fim▁end|>
<|file_name|>batch_conversion.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python3 # -*- coding: utf-8 -*- # vim:fenc=utf-8 # # Copyright © 2016 Pi-Yueh Chuang <[email protected]> # # Distributed under terms of the MIT license. """convert the output file in a batch""" import os import os.path as op import sys import argparse if os.getenv("PyFR") is None: raise EnvironmentError("Environmental variable PyFR is not set") else: PyFRPath = os.getenv("PyFR") if PyFRPath not in sys.path: sys.path.append(PyFRPath) try: import pyfr import pyfr.writers except ImportError as err: err.msg += "! Please check the path set in the environmental variable PyFR." raise def parseArgs(args=sys.argv[1:]): """parse arguments Args: args: list of strings. Default is sys.argv[1:]. Returns: parser.parse_args(args) """ parser = argparse.ArgumentParser( description="2D Cavity Flow Post-Precessor") parser.add_argument( "casePath", metavar="path", help="The path to a PyFR case folder", type=str) parser.add_argument( "-s", "--soln-dir", metavar="soln-dir", dest="solnDir", help="The directory (under casePath) containing *.pyfrs files. " + "(Default = solutions)", type=str, default="solutions") parser.add_argument( "-v", "--vtu-dir", metavar="vtu-dir", dest="vtuDir", help="The directory (under casePath) in where *.vtu files will be. " + "If the folder does not exist, the script will create it. " "(Default = vtu)", type=str, default="vtu") parser.add_argument( "-m", "--mesh", metavar="mesh", dest="mesh", help="The mesh file required. " + "The default is to use the first-found .pyfrm file in the case " + "directory. If multiple .pyfrm files exist in the case directory, " "it is suggested to set the argument.", type=str, default=None) parser.add_argument( "-o", "--overwrite", dest="overwrite", help="Whether to overwrite the output files if they already exist.", action="store_true") parser.add_argument( "-d", "--degree", dest="degree", help="The level of mesh. If the solver use higher-order " + "polynomials, than it may be necessary to set larger degree.", type=int, default=0) return parser.parse_args(args) def setup_dirs(args): """set up path to directories necessary Args: args: parsed arguments generated by parser.parse_args()<|fim▁hole|> Returns: areparse.Namespace object with full paths """ # set up the path to case directory args.casePath = os.path.abspath(args.casePath) # set up and check the path to case directory args.solnDir = args.casePath + "/" + args.solnDir if not op.isdir(args.solnDir): raise RuntimeError( "The path " + args.solnDir + " does not exist.") # set up the path for .pyfrm file if args.mesh is not None: args.mesh = args.casePath + "/" + args.mesh if not op.isfile(args.mesh): raise RuntimeError( "The input mesh file " + args.mesh + " does not exist.") else: for f in os.listdir(args.casePath): if f.endswith(".pyfrm"): args.mesh = args.casePath + "/" + f if args.mesh is None: raise RuntimeError( "Could not find any .pyfrm file in the case folder " + args.casePath) # set up and create the directory for .vtu files, if it does not exist args.vtuDir = args.casePath + "/" + args.vtuDir if not op.isdir(args.vtuDir): os.mkdir(args.vtuDir) return args def get_pyfrs_list(pyfrsDirPath): """get list of file names that end with .pyfrs in pyfrsDirPath Args: pyfrsDirPath: path to the folder of .pyfrs files Returns: a list of file names """ fileList = [f for f in os.listdir(pyfrsDirPath) if op.splitext(f)[1] == ".pyfrs"] if len(fileList) == 0: raise RuntimeError( "No .pyfrs file was found in the path " + pyfrsDirPath) return fileList def generate_vtu(vtuPath, pyfrsPath, pyfrsList, mesh, overwrite, degree): """generate .vtu files, if they do not exist Args: vtuPath: the path to folder of .vtu files pyfrsPath: the path to .pyfrs files pyfrsList: the list of .pyfrs which to be converted mesh: the .pyfrm file overwrite: whether to overwrite the .vtu file if it already exist """ vtuList = [op.splitext(f)[0]+".vtu" for f in pyfrsList] for i, o in zip(pyfrsList, vtuList): ifile = op.join(pyfrsPath, i) ofile = op.join(vtuPath, o) if op.isfile(ofile) and not overwrite: print("Warning: " + "the vtu file " + o + " exists " + "and won't be overwrited because overwrite=False") else: output_vtu(mesh, ifile, ofile, degree) def output_vtu(mesh, iFile, oFile, g=True, p="double", d=0): """convert a single .pyfrs file to .vtu file using PyFR's converter Args: mesh: mesh file (must end with .pyfrm) input: input file name (must end with .pyfrs) output: output file name (must end with .vtu) g: whether to export gradients p: precision, either "single" or "double" d: degree of the element (set this according the order of the polynimal) """ writerArgs = argparse.Namespace( meshf=mesh, solnf=iFile, outf=oFile, precision=p, gradients=g, divisor=d) writer = pyfr.writers.get_writer_by_extn(".vtu", writerArgs) print("Converting " + iFile + " to " + oFile) writer.write_out() def get_pyfrs_files(pyfrsDirPath): pass if __name__ == "__main__": args = parseArgs() args = setup_dirs(args) pyfrsList = get_pyfrs_list(args.solnDir) generate_vtu( args.vtuDir, args.solnDir, pyfrsList, args.mesh, args.overwrite, args.degree)<|fim▁end|>
<|file_name|>api_resource.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- <|fim▁hole|> Provides sane defaults and the logic needed to augment these settings with the internal ``class Meta`` used on ``Resource`` subclasses. """ allowed_methods = ['get', 'post', 'put', 'delete', 'patch'] list_allowed_methods = None detail_allowed_methods = None # limit = getattr(settings, 'API_LIMIT_PER_PAGE', 20) urlconf_namespace = None default_format = 'application/json' filtering = {} ordering = [] object_class = None queryset = None fields = [] excludes = [] include_resource_uri = True include_absolute_url = False always_return_data = False api_name = None resource_name = None resp_message = 'Good!' resp_script = None resp_success = True resp_template = 'adminpanel/ap-test.html' resp_type = 'tpl' resp_render_data = None make_function = None def __new__(cls, meta=None): overrides = {} # Handle overrides. if meta: for override_name in dir(meta): # No internals please. if not override_name.startswith('_'): overrides[override_name] = getattr(meta, override_name) allowed_methods = overrides.get('allowed_methods', ['get', 'post', 'put', 'delete', 'patch']) if overrides.get('list_allowed_methods', None) is None: overrides['list_allowed_methods'] = allowed_methods if overrides.get('detail_allowed_methods', None) is None: overrides['detail_allowed_methods'] = allowed_methods return object.__new__(type('ResourceOptions', (cls,), overrides))<|fim▁end|>
class ResourceOptions(object): """ A configuration class for ``Resource``.
<|file_name|>settings.py<|end_file_name|><|fim▁begin|>DD_SAVE_DIR = 'home/pi/' DD_TIME_FORMAT = '%H:%S-%d-%m-%Y' MOUNT_DIR = '/mnt' FC_SAVE_DIR = '/home/pi/' FC_TIME_FORMAT = '%S:%H-%d-%m-%Y' POP_INDEX = 1 DEBUG = 0 PLANT_LOAD_DIR = '/home/pi/pruebas/'<|fim▁hole|><|fim▁end|>
PLANT_SAVE_DIR = '/'
<|file_name|>fov.rs<|end_file_name|><|fim▁begin|>// Copyright 2013-2014 Jeffery Olson // // Licensed under the 3-Clause BSD License, see LICENSE.txt // at the top-level of this repository. // This file may not be copied, modified, or distributed // except according to those terms. use std::vec::Vec; use std::default::Default; use std::collections::HashSet; use uuid::Uuid; use world::{Payloadable, World, RelativeCoord, TraversalDirection}; use world::TraversalDirection::*; use zone::{Zone, Tile}; pub use self::FovType::*; #[deriving(Clone, Encodable, Decodable, Copy, Show, PartialEq)] pub enum FovType { Blocking, Transparent, Void } impl Default for FovType { fn default() -> FovType { FovType::Void } } impl FovType { pub fn allow_los(&self) -> bool { match *self { Transparent | Void => true, Blocking => false } } } pub trait FovItem { fn get_fov(&self) -> FovType; } pub fn compute<TWorldPayload, TZonePayload, TPayload: Send + Payloadable + FovItem>( world: &World<TWorldPayload, TZonePayload, TPayload>, focus: RelativeCoord, radius: uint, start_ang: &mut [f64], end_ang: &mut [f64]) -> Vec<RelativeCoord> { let mut visible_tiles: HashSet<RelativeCoord> = HashSet::new(); let mut pending_zones = vec!((focus.zone_id, (focus.lx, focus.ly), (focus.gx, focus.gy), radius, Uuid::nil(), NoDirection)); let octants = [ ((1, 1), true), // 0 - SE-vert ((1, 1), false), // 1 - SE-horiz ((1, -1), true), // 2 - NE-vert ((1, -1), false), // 3 - NE-horiz ((-1, 1), true), // 4 - SW-vert ((-1, 1), false), // 5 - SW-horiz ((-1, -1), true), // 6 - NW-vert ((-1, -1), false) // 7 - NW-horiz ]; while pending_zones.len() > 0 { let before_len = visible_tiles.len(); let (curr_zid, curr_focus, curr_offset, max_radius, from_pid, from_dir) = pending_zones.pop().expect("fov::compute .. popping zone, shouldn't happen"); // When processing a connected zone, we only do the half of the screen // that we'll see based on the direction into which we arrived at the portal let mut octants_slice = match from_dir { // originating zone.. process all quads NoDirection => octants.iter().filter(|_| { true }), // North - NW and NE quads North => octants.iter().filter(|o| { let &((_,y),_) = *o; y == -1 }), // South - SW and SE quads South => octants.iter().filter(|o| { let &((_,y),_) = *o; y == 1 }), // West - NW and SW quads West => octants.iter().filter(|o| { let &((x,_),_) = *o; x == -1 }), // East - NE and SE quads East => octants.iter().filter(|o| { let &((x,_),_) = *o; x == 1 }), }; let zone = world.get_zone(&curr_zid); // always insert focus pos, then check for portal at starting pos if from_pid == Uuid::nil() { visible_tiles.insert(RelativeCoord::new(curr_zid, curr_focus, curr_offset)); let curr_tile = zone.get_tile(curr_focus); match curr_tile.portal_id { Some(pid) => { pending_zones.push(build_pending_zone_entry( world, zone.id, pid, curr_offset, max_radius)); }, None => {} } } let mut in_fov: HashSet<int> = HashSet::new(); for o in octants_slice { let (quadrant, is_vert) = *o; let (tiles, zones) = compute_octant( world, zone, curr_focus, curr_offset, max_radius, from_pid, &mut in_fov, start_ang, end_ang, quadrant, is_vert, from_dir); for t in tiles.into_iter() { visible_tiles.insert(t); } for z in zones.into_iter() { pending_zones.push(z); } } let found = visible_tiles.len() - before_len; } debug!("num of visible_tiles: {}", visible_tiles.len()); visible_tiles.into_iter().collect() } fn min(a: int, b: int) -> int { if a < b { a } else { b } } fn max(a: int, b: int) -> int { if a > b { a } else { b } } type ComputeOctantPendingZones = (Uuid, (uint, uint), (int, int), uint, Uuid, TraversalDirection); fn compute_octant<TWorldPayload, TZonePayload, TTilePayload: Send + Payloadable + FovItem>( world: &World<TWorldPayload, TZonePayload, TTilePayload>, zone: &Zone<TZonePayload, TTilePayload>, position: (uint, uint), offset: (int, int), max_radius: uint, from_pid: Uuid, in_fov: &mut HashSet<int>, start_angle: &mut [f64], end_angle: &mut [f64], dn: (int, int), is_vert: bool, from_dir: TraversalDirection) -> (Vec<RelativeCoord>, Vec<ComputeOctantPendingZones>) { let mut visible_tiles = HashSet::new(); let mut pending_zones = HashSet::new(); let stub_tile = Tile::stub(); let padding = 34 as int; let (raw_px, raw_py) = position; let (raw_px, raw_py) = (raw_px as int, raw_py as int); let (in_ox, in_oy) = offset; let wsize = zone.size; let wsize_sq = wsize * wsize; let (position_x, position_y) = match from_dir { NoDirection => (raw_px, raw_py), _ => { let (px, py) = (raw_px - in_ox, raw_py - in_oy); (px, py) } }; let (dx, dy) = dn; { let mut iteration = 1 as int; let mut done = false; let mut total_obstacles = 0; let mut obstacles_in_last_line = 0; let mut min_angle = 0.0; // do while there are unblocked slopes left and the algo is within // the map's boundaries // scan progressive lines/columns from the focal-point outwards // branch:0 initial x,y values + initial bounds check for outer let mut x = if is_vert { 0 as int } else { (position_x as int + dx) as int }; // branch:0 let mut y = if is_vert { (position_y as int + dy) as int } else { 0 as int }; // branch:0 if is_vert { if y < -padding || y >= (wsize as int)+padding { debug!("vdt: starting y:{} < 0 || y >= wisze", y); done = true; } } else { if x < -padding || x >= (wsize as int)+padding { debug!("hdt: starting x:{} < 0 || x >= wisze", x); done = true; } } while !done { // process cells in the line let slopes_per_cell = 1.0 / (iteration as f64 + 1.0); let half_slopes = slopes_per_cell * 0.5; let mut processed_cell = (min_angle / slopes_per_cell) as int; done = true; // branch:1 calculate min/max inner bounds + set inner let (mini, maxi) = if is_vert { let minx = max(-padding, position_x as int - iteration); let maxx = min((wsize as int+padding) - 1, position_x as int + iteration); (minx, maxx) } else { let miny = max(-padding, position_y as int - iteration); let maxy = min((wsize as int+padding) - 1, position_y as int + iteration); (miny, maxy) }; // branch:1 let mut inner = if is_vert { x = (position_x as int + (processed_cell * dx)) as int; x } else { y = (position_y as int + (processed_cell * dy)) as int; y }; while inner >= mini && inner <= maxi { let c = x + (y * wsize as int); let in_bounds = x >= 0 && y >= 0 && zone.coords_in_bounds((x as uint, y as uint)); let c_tile = if in_bounds { zone.tile_at_idx(c as uint) } else { &stub_tile }; let is_void = match c_tile.payload.get_fov() { Void => true, _ => false }; let mut allow_los = c_tile.payload.get_fov().allow_los(); let mut visible = true; let start_slope = processed_cell as f64 * slopes_per_cell; let center_slope = start_slope + half_slopes; let end_slope = start_slope + slopes_per_cell; if obstacles_in_last_line > 0 && !in_fov.contains(&c) { let mut idx = 0; while in_bounds && visible && idx < obstacles_in_last_line { if allow_los { if center_slope > start_angle[idx] && center_slope < end_angle[idx] { visible = false; } } else if (start_slope >= start_angle[idx]) && (end_slope <= end_angle[idx]) { visible = false; } // branch:2 zy vals + n - dn bounds checks let zy = if is_vert { x + ((y-dy) * wsize as int) } else { x-dx + (y* wsize as int) }; // branch:2 let n_minus_dn_bounds_check = if is_vert { (x - dx >= 0) && (x - dx < wsize as int) } else { (y - dy >= 0) && (y - dy < wsize as int) }; let zy_tile_trans = if zy >= 0 && zy < wsize_sq as int { let t = zone.tile_at_idx(zy as uint); t.payload.get_fov().allow_los() } else { true }; let zyx = (x-dx) + ((y-dy) * wsize as int); let zyx_tile_trans = if zyx >= 0 && zyx < wsize_sq as int { let t = zone.tile_at_idx(zyx as uint); t.payload.get_fov().allow_los() } else { true }; if visible && (!in_fov.contains(&zy) || !zy_tile_trans) && (n_minus_dn_bounds_check && ((!in_fov.contains(&zyx)) || (!zyx_tile_trans))) { visible = false; } idx += 1; } } if is_void { visible = false; done = false; } let mut non_blocking_axis = true; match from_dir { North => { if y == raw_py && x != raw_px { non_blocking_axis = false; visible = true; done = true; allow_los = false; } else if y > raw_py { visible = false; done = false; } }, South => { if y == raw_py && x != raw_px { non_blocking_axis = false; visible = true; done = true; allow_los = false; } else if y < raw_py { visible = false; done = false; } }, East => {<|fim▁hole|> if x == raw_px && y != raw_py { non_blocking_axis = false; visible = true; done = true; allow_los = false; } else if x < raw_px { visible = false; done = false; } }, West => { if x == raw_px && y != raw_py { non_blocking_axis = false; visible = true; done = true; allow_los = false; } else if x > raw_px { visible = false; done = false; } }, _ => {} } if visible { let (gx, gy) = offset; let (ox, oy) = (x - raw_px, y - raw_py); let this_gx = (ox+gx, oy+gy); let found_already = in_fov.contains(&c); if non_blocking_axis { in_fov.insert(c); } let add_this_tile = match c_tile.portal_id { Some(pid) => { if pid != from_pid && !found_already { let iter = if iteration < 0 { 0 } else { iteration }; let remaining_radius = max_radius as int - iter; let remaining_radius = if remaining_radius < 0 { 0 as uint } else { remaining_radius as uint }; let remaining_radius = if from_pid == Uuid::nil() { max_radius } else { remaining_radius }; let pz = build_pending_zone_entry( world, zone.id, pid, this_gx, remaining_radius ); pending_zones.insert(pz); false } else { true } }, None => true }; if non_blocking_axis && add_this_tile { visible_tiles.insert(RelativeCoord::new( zone.id, (x as uint, y as uint), this_gx)); } done = false; if !allow_los { // update angle state.. if min_angle >= start_slope { min_angle = end_slope; } else { start_angle[total_obstacles] = start_slope; end_angle[total_obstacles] = end_slope; total_obstacles += 1; } } } processed_cell += 1; // branch:3 update x||y and inner if is_vert { x += dx; inner = x; } else { y += dy; inner = y; } } if iteration == max_radius as int { debug!("vdt: iteration == max_radius"); done = true; } iteration += 1; obstacles_in_last_line = total_obstacles; // branch:4 update x||y + done if is_vert { y += dy; if y < -padding || y >= (wsize as int)+padding { done = true; } } else { x += dx; if x < -padding || x >= (wsize as int)+padding { done = true; } } if min_angle == 1.0 { done = true; } } } (visible_tiles.into_iter().collect(), pending_zones.into_iter().collect()) } fn build_pending_zone_entry<TWorldPayload, TZonePayload, TPayload: Send + Payloadable + FovItem>( world: &World<TWorldPayload, TZonePayload, TPayload>, zid: Uuid, pid: Uuid, this_gx: (int, int), remaining_radius: uint) -> (Uuid, (uint, uint), (int, int), uint, Uuid, TraversalDirection) { let portal = world.get_portal(pid); let (ozid, from_dir) = portal.info_from(zid); let other_zone = world.get_zone(&ozid); let oc = other_zone.get_portal_coords(&pid); (ozid, *oc, this_gx, remaining_radius, pid, from_dir) }<|fim▁end|>
<|file_name|>daterange-picker-test.js<|end_file_name|><|fim▁begin|>import { moduleForComponent, test } from 'ember-qunit';<|fim▁hole|>moduleForComponent('daterange-picker', 'DaterangePickerComponent', { // specify the other units that are required for this test // needs: ['component:foo', 'helper:bar'] }); test('it renders', function() { expect(2); // creates the component instance var component = this.subject(); equal(component._state, 'preRender'); // appends the component to the page this.append(); equal(component._state, 'inDOM'); });<|fim▁end|>
<|file_name|>RunFlask.py<|end_file_name|><|fim▁begin|>#We don't use sagenb.notebook.run_notebook because we want the server in the same python environment as our app so we have access to the Notebook and Worksheet objects. ######### # Flask # ######### import os, random from guru.globals import GURU_PORT, GURU_NOTEBOOK_DIR import sagenb.notebook.notebook as notebook from sagenb.misc.misc import find_next_available_port import flask_server.base as flask_base def startServer(notebook_to_use=None, open_browser=False, debug_mode=False): #notebook_directory = os.path.join(DOT_SAGENB, "sage_notebook.sagenb") #Setup the notebook. if notebook_to_use is None: #We assume the notebook is empty. notebook_to_use = notebook.load_notebook(notebook_directory) notebook_to_use.user_manager().add_user('admin', 'admin','[email protected]',force=True) notebook_to_use.save() #Write out changes to disk. <|fim▁hole|> notebook_directory = notebook_to_use._dir #Setup the flask app. opts={} opts['startup_token'] = '{0:x}'.format(random.randint(0, 2**128)) startup_token = opts['startup_token'] flask_base.notebook = notebook_to_use #create_app will now use notebook_to_use instead of the provided location. flask_app = flask_base.create_app(interface="localhost", port=8081,secure=False, **opts) sagenb_pid = os.path.join(notebook_directory, "sagenb.pid") with open(sagenb_pid, 'w') as pidfile: pidfile.write(str(os.getpid())) #What does this block even do? import logging logger=logging.getLogger('werkzeug') logger.setLevel(logging.WARNING) #logger.setLevel(logging.INFO) # to see page requests #logger.setLevel(logging.DEBUG) logger.addHandler(logging.StreamHandler()) port = find_next_available_port('localhost', GURU_PORT) notebook_to_use.port = port #MAKE THIS HAPPEN IN flask_base: g.username = session['username'] = 'admin' if open_browser: from sagenb.misc.misc import open_page open_page('localhost', port, False, '/?startup_token=%s' % startup_token) try: if debug_mode: flask_app.run(host='localhost', port=port, threaded=True, ssl_context=None, debug=True, use_reloader=False) else: flask_app.run(host='localhost', port=port, threaded=True, ssl_context=None, debug=False) finally: #save_notebook(flask_base.notebook) os.unlink(sagenb_pid)<|fim▁end|>
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>#=============================================================================== # Copyright (C) 2014-2019 Anton Vorobyov # # This file is part of Phobos. # # Phobos is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Phobos is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the<|fim▁hole|>#=============================================================================== from .cached_property import cachedproperty from .eve_normalize import EveNormalizer from .resource_browser import ResourceBrowser from .translator import Translator<|fim▁end|>
# GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public License # along with Phobos. If not, see <http://www.gnu.org/licenses/>.
<|file_name|>import.go<|end_file_name|><|fim▁begin|>// Copyright 2017 Pilosa Corp. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and<|fim▁hole|>import ( "context" "io" "github.com/pilosa/pilosa" "github.com/pilosa/pilosa/ctl" "github.com/spf13/cobra" ) var Importer *ctl.ImportCommand // newImportCommand runs the Pilosa import subcommand for ingesting bulk data. func newImportCommand(stdin io.Reader, stdout, stderr io.Writer) *cobra.Command { Importer = ctl.NewImportCommand(stdin, stdout, stderr) importCmd := &cobra.Command{ Use: "import", Short: "Bulk load data into pilosa.", Long: `Bulk imports one or more CSV files to a host's index and field. The data of the CSV file are grouped by shard for the most efficient import. The format of the CSV file is: ROWID,COLUMNID,[TIME] The file should contain no headers. The TIME column is optional and can be omitted. If it is present then its format should be YYYY-MM-DDTHH:MM. `, RunE: func(cmd *cobra.Command, args []string) error { Importer.Paths = args return Importer.Run(context.Background()) }, } flags := importCmd.Flags() flags.StringVarP(&Importer.Host, "host", "", "localhost:10101", "host:port of Pilosa.") flags.StringVarP(&Importer.Index, "index", "i", "", "Pilosa index to import into.") flags.StringVarP(&Importer.Field, "field", "f", "", "Field to import into.") flags.BoolVar(&Importer.IndexOptions.Keys, "index-keys", false, "Specify keys=true when creating an index") flags.BoolVar(&Importer.FieldOptions.Keys, "field-keys", false, "Specify keys=true when creating a field") flags.StringVar(&Importer.FieldOptions.Type, "field-type", "", "Specify the field type when creating a field. One of: set, int, time, bool, mutex") flags.Int64Var(&Importer.FieldOptions.Min, "field-min", 0, "Specify the minimum for an int field on creation") flags.Int64Var(&Importer.FieldOptions.Max, "field-max", 0, "Specify the maximum for an int field on creation") flags.StringVar(&Importer.FieldOptions.CacheType, "field-cache-type", pilosa.CacheTypeRanked, "Specify the cache type for a set field on creation. One of: none, lru, ranked") flags.Uint32Var(&Importer.FieldOptions.CacheSize, "field-cache-size", 50000, "Specify the cache size for a set field on creation") flags.Var(&Importer.FieldOptions.TimeQuantum, "field-time-quantum", "Specify the time quantum for a time field on creation. One of: D, DH, H, M, MD, MDH, Y, YM, YMD, YMDH") flags.IntVarP(&Importer.BufferSize, "buffer-size", "s", 10000000, "Number of bits to buffer/sort before importing.") flags.BoolVarP(&Importer.Sort, "sort", "", false, "Enables sorting before import.") flags.BoolVarP(&Importer.CreateSchema, "create", "e", false, "Create the schema if it does not exist before import.") flags.BoolVarP(&Importer.Clear, "clear", "", false, "Clear the data provided in the import.") ctl.SetTLSConfig(flags, &Importer.TLS.CertificatePath, &Importer.TLS.CertificateKeyPath, &Importer.TLS.SkipVerify) return importCmd }<|fim▁end|>
// limitations under the License. package cmd
<|file_name|>DateTimeAuthenticationRequestRiskCalculatorTests.java<|end_file_name|><|fim▁begin|>package org.apereo.cas.impl.calcs; import org.apereo.cas.api.AuthenticationRiskEvaluator; import org.apereo.cas.api.AuthenticationRiskScore; import org.apereo.cas.authentication.Authentication; import org.apereo.cas.authentication.CoreAuthenticationTestUtils; import org.apereo.cas.config.CasCoreAuthenticationConfiguration; import org.apereo.cas.config.CasCoreAuthenticationHandlersConfiguration; import org.apereo.cas.config.CasCoreAuthenticationMetadataConfiguration; import org.apereo.cas.config.CasCoreAuthenticationPolicyConfiguration; import org.apereo.cas.config.CasCoreAuthenticationPrincipalConfiguration; import org.apereo.cas.config.CasCoreAuthenticationSupportConfiguration; import org.apereo.cas.config.CasCoreConfiguration; import org.apereo.cas.config.CasCoreHttpConfiguration; import org.apereo.cas.config.CasCoreServicesConfiguration; import org.apereo.cas.config.CasCoreTicketsConfiguration; import org.apereo.cas.config.CasCoreUtilConfiguration; import org.apereo.cas.config.CasCoreWebConfiguration; import org.apereo.cas.config.CasDefaultServiceTicketIdGeneratorsConfiguration; import org.apereo.cas.config.CasPersonDirectoryConfiguration; import org.apereo.cas.config.ElectronicFenceConfiguration; import org.apereo.cas.config.support.CasWebApplicationServiceFactoryConfiguration; import org.apereo.cas.impl.mock.MockTicketGrantingTicketCreatedEventProducer; import org.apereo.cas.logout.config.CasCoreLogoutConfiguration; import org.apereo.cas.services.RegisteredService; import org.apereo.cas.services.RegisteredServiceTestUtils; import org.apereo.cas.support.events.config.CasCoreEventsConfiguration; import org.apereo.cas.support.events.dao.CasEventRepository; import org.apereo.cas.support.geo.config.GoogleMapsGeoCodingConfiguration; import org.apereo.cas.web.config.CasCookieConfiguration; import org.apereo.cas.web.flow.config.CasCoreWebflowConfiguration; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.cloud.autoconfigure.RefreshAutoConfiguration; import org.springframework.mock.web.MockHttpServletRequest; import org.springframework.scheduling.annotation.EnableScheduling; import org.springframework.test.annotation.DirtiesContext; import org.springframework.test.context.TestPropertySource; import org.springframework.test.context.junit4.SpringRunner; import static org.junit.Assert.*; /** * This is {@link DateTimeAuthenticationRequestRiskCalculatorTests}. * * @author Misagh Moayyed * @since 5.1.0 */ @RunWith(SpringRunner.class) @SpringBootTest(classes = {RefreshAutoConfiguration.class, ElectronicFenceConfiguration.class, CasWebApplicationServiceFactoryConfiguration.class, CasDefaultServiceTicketIdGeneratorsConfiguration.class, CasCoreAuthenticationPrincipalConfiguration.class, CasCoreAuthenticationPolicyConfiguration.class, CasCoreAuthenticationMetadataConfiguration.class, CasCoreAuthenticationSupportConfiguration.class, CasCoreAuthenticationHandlersConfiguration.class, CasCoreAuthenticationConfiguration.class, CasCoreHttpConfiguration.class, CasPersonDirectoryConfiguration.class, CasCoreServicesConfiguration.class, GoogleMapsGeoCodingConfiguration.class, CasCoreWebConfiguration.class, CasCoreWebflowConfiguration.class, CasCoreConfiguration.class, CasCoreTicketsConfiguration.class, CasCoreLogoutConfiguration.class, CasCookieConfiguration.class, CasCoreUtilConfiguration.class, CasCoreEventsConfiguration.class}) @TestPropertySource(properties = "cas.authn.adaptive.risk.dateTime.enabled=true") @DirtiesContext @EnableScheduling public class DateTimeAuthenticationRequestRiskCalculatorTests { @Autowired @Qualifier("casEventRepository") private CasEventRepository casEventRepository; @Autowired @Qualifier("authenticationRiskEvaluator") private AuthenticationRiskEvaluator authenticationRiskEvaluator; @Before public void prepTest() { MockTicketGrantingTicketCreatedEventProducer.createEvents(this.casEventRepository); } @Test public void verifyTestWhenNoAuthnEventsFoundForUser() { final Authentication authentication = CoreAuthenticationTestUtils.getAuthentication("datetimeperson"); final RegisteredService service = RegisteredServiceTestUtils.getRegisteredService("test"); final MockHttpServletRequest request = new MockHttpServletRequest(); final AuthenticationRiskScore score = authenticationRiskEvaluator.eval(authentication, service, request); assertTrue(score.isHighestRisk()); } @Test public void verifyTestWhenAuthnEventsFoundForUser() { final Authentication authentication = CoreAuthenticationTestUtils.getAuthentication("casuser"); final RegisteredService service = RegisteredServiceTestUtils.getRegisteredService("test"); final MockHttpServletRequest request = new MockHttpServletRequest(); final AuthenticationRiskScore score = authenticationRiskEvaluator.eval(authentication, service, request);<|fim▁hole|> assertTrue(score.isLowestRisk()); } }<|fim▁end|>
<|file_name|>AlertMessageView_spec.js<|end_file_name|><|fim▁begin|>define( [ 'views/AlertMessageView',<|fim▁hole|> ], function (AlertMessageView, Mediator) { describe('AlertMessageView', function () { var resultsCollection, view, mediator; beforeEach(function () { resultsCollection = new Backbone.Collection(); mediator = new Mediator(); }); it('should initially be hidden', function () { view = new AlertMessageView().render(); expect(view.$el.find('.alert')).toHaveClass('hidden'); }); it('should become visible when a new message arrives', function () { view = new AlertMessageView(); view.setMediator(mediator); mediator.trigger('app:alert', {title: 'x', content: 'y'}); expect(view.$el.find('.alert')).not.toHaveClass('hidden'); }); it('should close it when a user clicks the X', function () { view = new AlertMessageView().render(); view.setMediator(mediator); view.removeAlertMessage(); expect(view.$el.find('.alert')).toHaveClass('hidden'); }); }); });<|fim▁end|>
'lib/Mediator'
<|file_name|>fig4.py<|end_file_name|><|fim▁begin|>#### # Figure 4 # needs: # - data/*.npz produced by run.py #### import glob import sys sys.path.append('..') from lib.mppaper import * import lib.mpsetup as mpsetup import lib.immune as immune files = sorted((immune.parse_value(f, 'epsilon'), f) for f in glob.glob("data/*.npz")) import run sigma = run.sigma #### left figure #### epsilons = [] similarities = [] similaritiesQ = [] similaritiesPtilde = [] for epsilon, f in files: npz = np.load(f) P1 = npz['P1'] P2 = npz['P2'] Ptilde1 = npz['Ptilde1'] Ptilde2 = npz['Ptilde2'] Q1 = npz['Q1'] Q2 = npz['Q2'] epsilons.append(epsilon) similarities.append(immune.similarity(P1, P2)) similaritiesQ.append(immune.similarity(Q1, Q2)) similaritiesPtilde.append(immune.similarity(Ptilde1, Ptilde2)) fig = plt.figure() ax = fig.add_subplot(121) ax.axhline(1.0, color=almostblack) ax.plot(epsilons, similarities, label='$P_r^\star$', **linedotstyle) ax.plot(epsilons, similaritiesQ, label='$Q_a$', **linedotstyle) ax.plot(epsilons, similaritiesPtilde, label=r'$\tilde P_a$', **linedotstyle)<|fim▁hole|>ax.set_xlabel('noise $\epsilon$') ax.set_ylabel('Similarity') ax.set_xlim(0.0, 0.5) ax.set_ylim(0.0, 1.05) ax.legend(ncol=1, loc='center right') ax.xaxis.labelpad = axis_labelpad ax.yaxis.labelpad = axis_labelpad mpsetup.despine(ax) fig.tight_layout(pad=tight_layout_pad) fig.subplots_adjust(top=0.85) #### right figures #### epsilon_illustration = 0.2 epsilon, f = [tup for tup in files if tup[0] == epsilon_illustration][0] npz = np.load(f) P1 = npz['P1'] P2 = npz['P2'] Qbase = npz['Qbase'] Q1 = npz['Q1'] Q2 = npz['Q2'] x = npz['x'] axQ = fig.add_subplot(222) for i, Q in enumerate([Q1, Q2]): axQ.plot(x/sigma, Q, lw=0.5 * linewidth, label='ind. %g' % (i+1)) axQ.set_xlim(0, 10) axQ.set_ylabel(r'$Q_a$') axP = fig.add_subplot(224, sharex=axQ) for i, p in enumerate([P1, P2]): axP.plot(x/sigma, p, label='ind. %g' % (i+1), **linedotstyle) axP.locator_params(axis='x', nbins=5, tight=True) axP.set_xlim(0, 20) axP.set_ylabel(r'$P_r^\star$') axP.legend(ncol=2, handletextpad=0.1, loc='upper right', bbox_to_anchor=(1.05, 1.20)) for a in [axQ, axP]: a.set_ylim(ymin=0.0) mpsetup.despine(a) a.set_yticks([]) a.xaxis.labelpad = axis_labelpad a.yaxis.labelpad = axis_labelpad axP.set_xlabel('$x \; / \; \sigma$') plt.setp(axQ.get_xticklabels(), visible=False) #### finish figure #### fig.tight_layout(pad=tight_layout_pad, h_pad=1.0) fig.savefig('fig4.svg') plt.show()<|fim▁end|>
<|file_name|>CycleResource.java<|end_file_name|><|fim▁begin|>package com.douwe.notes.resource.impl; import com.douwe.notes.entities.Cycle; import com.douwe.notes.resource.ICycleResource; import com.douwe.notes.service.ICycleService; import com.douwe.notes.service.IInsfrastructureService; import com.douwe.notes.service.ServiceException; import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; import javax.ejb.EJB; import javax.ws.rs.Path; /** * * @author Vincent Douwe <[email protected]> */ @Path("/cycles") public class CycleResource implements ICycleResource{ @EJB private IInsfrastructureService infranstructureService; @EJB private ICycleService cycleService; public Cycle createCycle(Cycle cycle) { try { return cycleService.saveOrUpdateCycle(cycle); } catch (ServiceException ex) { Logger.getLogger(CycleResource.class.getName()).log(Level.SEVERE, null, ex); return null; } } public List<Cycle> getAllCycle() { try { return cycleService.getAllCycles(); } catch (ServiceException ex) { Logger.getLogger(CycleResource.class.getName()).log(Level.SEVERE, null, ex); return null; } } public Cycle getCycle(long id) { try { return cycleService.findCycleById(id); } catch (ServiceException ex) { Logger.getLogger(CycleResource.class.getName()).log(Level.SEVERE, null, ex); return null; } } public Cycle updateCycle(long id, Cycle cycle) { try { Cycle c = cycleService.findCycleById(id); if(c != null){ c.setNom(cycle.getNom()); return cycleService.saveOrUpdateCycle(c); } return null; } catch (ServiceException ex) { Logger.getLogger(CycleResource.class.getName()).log(Level.SEVERE, null, ex); return null; } } public void deleteCycle(long id) { try { cycleService.deleteCycle(id); } catch (ServiceException ex) { Logger.getLogger(CycleResource.class.getName()).log(Level.SEVERE, null, ex); } } public IInsfrastructureService getInfranstructureService() { return infranstructureService; } public void setInfranstructureService(IInsfrastructureService infranstructureService) { this.infranstructureService = infranstructureService; } public ICycleService getCycleService() { return cycleService; } public void setCycleService(ICycleService cycleService) { this.cycleService = cycleService; } <|fim▁hole|> }<|fim▁end|>
<|file_name|>ssf_reader.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python -*- coding:utf-8 -*- __Author__ = "Riyaz Ahmad Bhat" __Email__ = "[email protected]" import re from collections import namedtuple from sanity_checker import SanityChecker class DefaultList(list): """Equivalent of Default dictionaries for Indexing Errors.""" def __init__(self, default=None): self.default = default list.__init__(self) def __getitem__(self, index): try: return list.__getitem__(self, index) except IndexError: return self.default class SSFReader (SanityChecker): def __init__ (self, sentence):<|fim▁hole|> super(SSFReader, self).__init__() self.id_ = int() self.nodeList = list() self.chunk_word = dict() self.sentence = sentence self.modifierModified = dict() self.node = namedtuple('node', ('id', 'head', 'children', 'pos', 'poslcat', 'af', 'vpos', 'name','drel','parent', 'chunkId', 'chunkType', 'mtype', 'troot', 'coref', 'stype','voicetype', 'posn')) self.features = namedtuple('features', ('lemma','cat','gen','num','per','case','vib','tam')) def getAnnotations (self): children_ = list() for line in self.sentence.split("\n"): nodeInfo = line.decode("utf-8").split("\t") if nodeInfo[0].isdigit(): assert len(nodeInfo) == 4 # no need to process trash! FIXME attributeValue_pairs = self.FSPairs(nodeInfo[3][4:-1]) attributes = self.updateFSValues(attributeValue_pairs) h = attributes.get #NOTE h -> head node attributes elif nodeInfo[0].replace(".",'',1).isdigit(): assert (len(nodeInfo) == 4) and (nodeInfo[1] and nodeInfo[2] != '') # FIXME self.id_ += 1 pos_ = nodeInfo[2].encode("utf-8").decode("ascii",'ignore').encode("ascii") wordForm_ = nodeInfo[1] attributeValue_pairs = self.FSPairs(nodeInfo[3][4:-1]) if attributeValue_pairs['name'] == h('head_'):# NOTE head word of the chunk self.nodeList.append(self.node(str(self.id_),wordForm_,children_,pos_,h('poslcat_'), self.features(h('lemma_') if h('lemma_') else wordForm_ ,h('cat_'),h('gen_'), h('num_'), h('per_'),h('case_'),h('vib_'),h('tam_')),h('vpos_'),h('head_'),h('drel_'), h('parent_'),h('chunkId_'),":".join(('head',h('chunkId_'))),h('mtype_'),h('troot_'), h('coref_'),h('stype_'),h('voicetype_'),h('posn_'))) self.modifierModified[h('chunkId_')] = h('parent_') self.chunk_word[h('chunkId_')] = h('head_') else: attributes = self.updateFSValues(attributeValue_pairs) c = attributes.get #NOTE c -> child node attributes children_.append(self.node(str(self.id_),wordForm_,[],pos_,c('poslcat_'),self.features(c('lemma_') \ if c('lemma_') else wordForm_ ,c('cat_'),c('gen_'),c('num_'),c('per_'),c('case_'),c('vib_'), c('tam_')),c('vpos_'),c('name_'),"_","_",None,":".join(('child',h('chunkId_'))),c('mtype_'), c('troot_'),c('coref_'),None, None, c('posn_'))) else: children_ = list() return self def FSPairs (self, FS) : feats = dict() for feat in FS.split(): if "=" not in feat:continue feat = re.sub("af='+","af='",feat.replace("dmrel=",'drel=')) assert len(feat.split("=")) == 2 attribute,value = feat.split("=") feats[attribute] = value return feats def morphFeatures (self, AF): "LEMMA,CAT,GEN,NUM,PER,CASE,VIB,TAM" assert len(AF[:-1].split(",")) == 8 # no need to process trash! FIXME lemma_,cat_,gen_,num_,per_,case_,vib_,tam_ = AF.split(",") if len(lemma_) > 1: lemma_ = lemma_.strip("'") return lemma_.strip("'"),cat_,gen_,num_,per_,case_,vib_,tam_.strip("'") def updateFSValues (self, attributeValue_pairs): attributes = dict(zip(['head_','poslcat_','af_','vpos_','name_','drel_','parent_','mtype_','troot_','chunkId_',\ 'coref_','stype_','voicetype_','posn_'], [None] * 14)) attributes.update(dict(zip(['lemma_','cat_','gen_','num_','per_','case_','vib_','tam_'], [''] * 8))) for key,value in attributeValue_pairs.items(): if key == "af": attributes['lemma_'],attributes['cat_'],attributes['gen_'],attributes['num_'],\ attributes['per_'],attributes['case_'],attributes['vib_'],attributes['tam_'] = \ self.morphFeatures (value) elif key == "drel": assert len(value.split(":")) == 2 # no need to process trash! FIXME attributes['drel_'], attributes['parent_'] = re.sub("'|\"",'',value).split(":") assert attributes['drel_'] and attributes['parent_'] != "" # no need to process trash! FIXME else: variable = str(key) + "_" if variable == "name_": attributes['chunkId_'] = re.sub("'|\"",'',value) attributes[variable] = re.sub("'|\"",'',value) return attributes<|fim▁end|>
<|file_name|>life_test.go<|end_file_name|><|fim▁begin|>// Copyright 2017 Aleksandr Demeshko. All rights reserved. // conway project // life_test.go package conway import ( "fmt" // "testing" ) <|fim▁hole|> Cell{0, 0}: 0, }, popNumber: 0, } p.Next() fmt.Println(p) // Output: {map[] 1} } func Example_twoCells() { var p = Population{cells: map[Cell]int{ Cell{0, 0}: 0, Cell{0, 1}: 0, }, popNumber: 0, } p.Next() fmt.Println(p) // Output: {map[] 1} } func Example_blinker() { var p = Population{cells: map[Cell]int{ Cell{0, 0}: 0, Cell{0, 1}: 0, Cell{0, -1}: 0, }, popNumber: 0, } p.SaveToFile("blinker0.log") p.Next() p.SaveToFile("blinker1.log") fmt.Println(len(p.cells)) // Output: 3 }<|fim▁end|>
func Example_singleCell() { var p = Population{cells: map[Cell]int{
<|file_name|>app.py<|end_file_name|><|fim▁begin|>#encoding:utf-8 subreddit = 'jacksepticeye' t_channel = '@r_jacksepticeye' def send_post(submission, r2t):<|fim▁hole|><|fim▁end|>
return r2t.send_simple(submission)
<|file_name|>chops.rs<|end_file_name|><|fim▁begin|>#[macro_use] extern crate holmes; use holmes::simple::*; extern crate tiamat; extern crate bap; use bap::high::bitvector::BitVector; #[test] pub fn chop_2() {<|fim▁hole|> single(&|holmes, core| { tiamat::uaf(vec!["./samples/chops/2.so".to_string()], 20, false)(holmes, core)?; assert_eq!(query!(holmes, use_after_free_flow([_]))?.len(), 1); let res = query!(holmes, use_after_free {source = source, alias_set = alias_set})?; assert!(res.len() > 0); Ok(()) }) } #[test] pub fn chop_14() { single(&|holmes, core| { tiamat::uaf(vec!["./samples/chops/14.so".to_string()], 30, true)(holmes, core)?; assert_eq!(query!(holmes, use_after_free_flow([_]))?.len(), 1); let res = query!(holmes, use_after_free {source = source, alias_set = alias_set})?; assert!(res.len() > 0); Ok(()) }) } #[test] pub fn chop_4() { single(&|holmes, core| { tiamat::uaf(vec!["./samples/chops/4.so".to_string()], 48, false)(holmes, core)?; assert_eq!(query!(holmes, use_after_free_flow([_]))?.len(), 1); let res = query!(holmes, use_after_free {source = source, alias_set = alias_set})?; assert!(res.len() > 0); Ok(()) }) } #[test] pub fn chop_7() { single(&|holmes, core| { tiamat::uaf(vec!["./samples/chops/7.so".to_string()], 31, false)(holmes, core)?; assert!(query!(holmes, use_after_free_flow([_]))?.len() > 1); let res = query!(holmes, use_after_free {source = source, alias_set = alias_set})?; assert!(res.len() > 0); for row in res { assert!((&row[0] == &BitVector::from_u64(0x578b, 64).to_value()) || (&row[0] == &BitVector::from_u64(0x84f3, 64).to_value()) || (&row[0] == &BitVector::from_u64(0x8545, 64).to_value())); } Ok(()) }) }<|fim▁end|>
<|file_name|>rpcprotocol.cpp<|end_file_name|><|fim▁begin|>// Copyright (c) 2010 Satoshi Nakamoto // Copyright (c) 2009-2013 The Bitcoin developers // Distributed under the MIT/X11 software license, see the accompanying // file COPYING or http://www.opensource.org/licenses/mit-license.php. #include "rpcprotocol.h" #include "util.h" #include <stdint.h> #include <boost/algorithm/string.hpp> #include <boost/asio.hpp> #include <boost/asio/ssl.hpp> #include <boost/bind.hpp> #include <boost/filesystem.hpp> #include <boost/foreach.hpp> #include <boost/iostreams/concepts.hpp> #include <boost/iostreams/stream.hpp> #include <boost/lexical_cast.hpp> #include <boost/shared_ptr.hpp> #include "json/json_spirit_writer_template.h" using namespace std; using namespace boost; using namespace boost::asio; using namespace json_spirit; // // HTTP protocol // // This ain't Apache. We're just using HTTP header for the length field // and to be compatible with other JSON-RPC implementations. // string HTTPPost(const string& strMsg, const map<string,string>& mapRequestHeaders) { ostringstream s; s << "POST / HTTP/1.1\r\n" << "User-Agent: bitcurrency-json-rpc/" << FormatFullVersion() << "\r\n" << "Host: 127.0.0.1\r\n" << "Content-Type: application/json\r\n" << "Content-Length: " << strMsg.size() << "\r\n" << "Connection: close\r\n" << "Accept: application/json\r\n"; BOOST_FOREACH(const PAIRTYPE(string, string)& item, mapRequestHeaders) s << item.first << ": " << item.second << "\r\n"; s << "\r\n" << strMsg; return s.str(); } static string rfc1123Time() { return DateTimeStrFormat("%a, %d %b %Y %H:%M:%S +0000", GetTime()); } string HTTPReply(int nStatus, const string& strMsg, bool keepalive) { if (nStatus == HTTP_UNAUTHORIZED) return strprintf("HTTP/1.0 401 Authorization Required\r\n" "Date: %s\r\n" "Server: bitcurrency-json-rpc/%s\r\n" "WWW-Authenticate: Basic realm=\"jsonrpc\"\r\n" "Content-Type: text/html\r\n" "Content-Length: 296\r\n" "\r\n" "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.01 Transitional//EN\"\r\n" "\"http://www.w3.org/TR/1999/REC-html401-19991224/loose.dtd\">\r\n" "<HTML>\r\n" "<HEAD>\r\n" "<TITLE>Error</TITLE>\r\n" "<META HTTP-EQUIV='Content-Type' CONTENT='text/html; charset=ISO-8859-1'>\r\n" "</HEAD>\r\n"<|fim▁hole|> const char *cStatus; if (nStatus == HTTP_OK) cStatus = "OK"; else if (nStatus == HTTP_BAD_REQUEST) cStatus = "Bad Request"; else if (nStatus == HTTP_FORBIDDEN) cStatus = "Forbidden"; else if (nStatus == HTTP_NOT_FOUND) cStatus = "Not Found"; else if (nStatus == HTTP_INTERNAL_SERVER_ERROR) cStatus = "Internal Server Error"; else cStatus = ""; return strprintf( "HTTP/1.1 %d %s\r\n" "Date: %s\r\n" "Connection: %s\r\n" "Content-Length: %"PRIszu"\r\n" "Content-Type: application/json\r\n" "Server: bitcurrency-json-rpc/%s\r\n" "\r\n" "%s", nStatus, cStatus, rfc1123Time(), keepalive ? "keep-alive" : "close", strMsg.size(), FormatFullVersion(), strMsg); } bool ReadHTTPRequestLine(std::basic_istream<char>& stream, int &proto, string& http_method, string& http_uri) { string str; getline(stream, str); // HTTP request line is space-delimited vector<string> vWords; boost::split(vWords, str, boost::is_any_of(" ")); if (vWords.size() < 2) return false; // HTTP methods permitted: GET, POST http_method = vWords[0]; if (http_method != "GET" && http_method != "POST") return false; // HTTP URI must be an absolute path, relative to current host http_uri = vWords[1]; if (http_uri.size() == 0 || http_uri[0] != '/') return false; // parse proto, if present string strProto = ""; if (vWords.size() > 2) strProto = vWords[2]; proto = 0; const char *ver = strstr(strProto.c_str(), "HTTP/1."); if (ver != NULL) proto = atoi(ver+7); return true; } int ReadHTTPStatus(std::basic_istream<char>& stream, int &proto) { string str; getline(stream, str); vector<string> vWords; boost::split(vWords, str, boost::is_any_of(" ")); if (vWords.size() < 2) return HTTP_INTERNAL_SERVER_ERROR; proto = 0; const char *ver = strstr(str.c_str(), "HTTP/1."); if (ver != NULL) proto = atoi(ver+7); return atoi(vWords[1].c_str()); } int ReadHTTPHeaders(std::basic_istream<char>& stream, map<string, string>& mapHeadersRet) { int nLen = 0; while (true) { string str; std::getline(stream, str); if (str.empty() || str == "\r") break; string::size_type nColon = str.find(":"); if (nColon != string::npos) { string strHeader = str.substr(0, nColon); boost::trim(strHeader); boost::to_lower(strHeader); string strValue = str.substr(nColon+1); boost::trim(strValue); mapHeadersRet[strHeader] = strValue; if (strHeader == "content-length") nLen = atoi(strValue.c_str()); } } return nLen; } int ReadHTTPMessage(std::basic_istream<char>& stream, map<string, string>& mapHeadersRet, string& strMessageRet, int nProto) { mapHeadersRet.clear(); strMessageRet = ""; // Read header int nLen = ReadHTTPHeaders(stream, mapHeadersRet); if (nLen < 0 || nLen > (int)MAX_SIZE) return HTTP_INTERNAL_SERVER_ERROR; // Read message if (nLen > 0) { vector<char> vch(nLen); stream.read(&vch[0], nLen); strMessageRet = string(vch.begin(), vch.end()); } string sConHdr = mapHeadersRet["connection"]; if ((sConHdr != "close") && (sConHdr != "keep-alive")) { if (nProto >= 1) mapHeadersRet["connection"] = "keep-alive"; else mapHeadersRet["connection"] = "close"; } return HTTP_OK; } // // JSON-RPC protocol. Bitcoin speaks version 1.0 for maximum compatibility, // but uses JSON-RPC 1.1/2.0 standards for parts of the 1.0 standard that were // unspecified (HTTP errors and contents of 'error'). // // 1.0 spec: http://json-rpc.org/wiki/specification // 1.2 spec: http://groups.google.com/group/json-rpc/web/json-rpc-over-http // http://www.codeproject.com/KB/recipes/JSON_Spirit.aspx // string JSONRPCRequest(const string& strMethod, const Array& params, const Value& id) { Object request; request.push_back(Pair("method", strMethod)); request.push_back(Pair("params", params)); request.push_back(Pair("id", id)); return write_string(Value(request), false) + "\n"; } Object JSONRPCReplyObj(const Value& result, const Value& error, const Value& id) { Object reply; if (error.type() != null_type) reply.push_back(Pair("result", Value::null)); else reply.push_back(Pair("result", result)); reply.push_back(Pair("error", error)); reply.push_back(Pair("id", id)); return reply; } string JSONRPCReply(const Value& result, const Value& error, const Value& id) { Object reply = JSONRPCReplyObj(result, error, id); return write_string(Value(reply), false) + "\n"; } Object JSONRPCError(int code, const string& message) { Object error; error.push_back(Pair("code", code)); error.push_back(Pair("message", message)); return error; }<|fim▁end|>
"<BODY><H1>401 Unauthorized.</H1></BODY>\r\n" "</HTML>\r\n", rfc1123Time(), FormatFullVersion());
<|file_name|>departures_gui.cpp<|end_file_name|><|fim▁begin|>/* $Id$ */ /* * This file is part of OpenTTD. * OpenTTD is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, version 2. * OpenTTD is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. * See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with OpenTTD. If not, see <http://www.gnu.org/licenses/>. */ /** @file departures_gui.cpp Scheduled departures from a station. */ #include "stdafx.h" #include "debug.h" #include "gui.h" #include "textbuf_gui.h" #include "strings_func.h" #include "window_func.h" #include "vehicle_func.h" #include "string_func.h" #include "window_gui.h" #include "timetable.h" #include "vehiclelist.h" #include "company_base.h" #include "date_func.h" #include "departures_gui.h" #include "station_base.h" #include "vehicle_gui_base.h" #include "vehicle_base.h" #include "vehicle_gui.h" #include "order_base.h" #include "settings_type.h" #include "core/smallvec_type.hpp" #include "date_type.h" #include "company_type.h" #include "departures_func.h" #include "cargotype.h" #include "table/sprites.h" #include "table/strings.h" static const NWidgetPart _nested_departures_list[] = { NWidget(NWID_HORIZONTAL), NWidget(WWT_CLOSEBOX, COLOUR_GREY), NWidget(WWT_CAPTION, COLOUR_GREY, WID_DB_CAPTION), SetDataTip(STR_DEPARTURES_CAPTION, STR_TOOLTIP_WINDOW_TITLE_DRAG_THIS), NWidget(WWT_SHADEBOX, COLOUR_GREY), NWidget(WWT_STICKYBOX, COLOUR_GREY), EndContainer(), NWidget(NWID_HORIZONTAL), NWidget(WWT_MATRIX, COLOUR_GREY, WID_DB_LIST), SetMinimalSize(0, 0), SetFill(1, 0), SetResize(1, 1), NWidget(NWID_VSCROLLBAR, COLOUR_GREY, WID_DB_SCROLLBAR), EndContainer(), NWidget(NWID_HORIZONTAL), NWidget(WWT_PANEL, COLOUR_GREY), SetMinimalSize(0, 12), SetResize(1, 0), SetFill(1, 1), EndContainer(), NWidget(WWT_TEXTBTN, COLOUR_GREY, WID_DB_SHOW_DEPS), SetMinimalSize(6, 12), SetFill(0, 1), SetDataTip(STR_DEPARTURES_DEPARTURES, STR_DEPARTURES_DEPARTURES_TOOLTIP), NWidget(WWT_TEXTBTN, COLOUR_GREY, WID_DB_SHOW_ARRS), SetMinimalSize(6, 12), SetFill(0, 1), SetDataTip(STR_DEPARTURES_ARRIVALS, STR_DEPARTURES_ARRIVALS_TOOLTIP), NWidget(WWT_TEXTBTN, COLOUR_GREY, WID_DB_SHOW_VIA), SetMinimalSize(11, 12), SetFill(0, 1), SetDataTip(STR_DEPARTURES_VIA_BUTTON, STR_DEPARTURES_VIA_TOOLTIP), NWidget(WWT_TEXTBTN, COLOUR_GREY, WID_DB_SHOW_TRAINS), SetMinimalSize(14, 12), SetFill(0, 1), SetDataTip(STR_TRAIN, STR_STATION_VIEW_SCHEDULED_TRAINS_TOOLTIP), NWidget(WWT_TEXTBTN, COLOUR_GREY, WID_DB_SHOW_ROADVEHS), SetMinimalSize(14, 12), SetFill(0, 1), SetDataTip(STR_LORRY, STR_STATION_VIEW_SCHEDULED_ROAD_VEHICLES_TOOLTIP), NWidget(WWT_TEXTBTN, COLOUR_GREY, WID_DB_SHOW_SHIPS), SetMinimalSize(14, 12), SetFill(0, 1), SetDataTip(STR_SHIP, STR_STATION_VIEW_SCHEDULED_SHIPS_TOOLTIP), NWidget(WWT_TEXTBTN, COLOUR_GREY, WID_DB_SHOW_PLANES), SetMinimalSize(14, 12), SetFill(0, 1), SetDataTip(STR_PLANE, STR_STATION_VIEW_SCHEDULED_AIRCRAFT_TOOLTIP), NWidget(WWT_RESIZEBOX, COLOUR_GREY), EndContainer(), }; static WindowDesc _departures_desc( WDP_AUTO, 260, 246, WC_DEPARTURES_BOARD, WC_NONE, 0, _nested_departures_list, lengthof(_nested_departures_list) ); static uint cached_date_width = 0; ///< The cached maximum width required to display a date. static uint cached_status_width = 0; ///< The cached maximum width required to show the status field. static uint cached_date_arrow_width = 0; ///< The cached width of the red/green arrows that may be displayed alongside times. static bool cached_date_display_method; ///< Whether the above cached values refers to original (d,m,y) dates or the 24h clock. static bool cached_arr_dep_display_method; ///< Whether to show departures and arrivals on a single line. template<bool Twaypoint = false> struct DeparturesWindow : public Window { protected: StationID station; ///< The station whose departures we're showing. DepartureList *departures; ///< The current list of departures from this station. DepartureList *arrivals; ///< The current list of arrivals from this station. uint entry_height; ///< The height of an entry in the departures list. uint tick_count; ///< The number of ticks that have elapsed since the window was created. Used for scrolling text. int calc_tick_countdown; ///< The number of ticks to wait until recomputing the departure list. Signed in case it goes below zero. bool show_types[4]; ///< The vehicle types to show in the departure list. bool departure_types[3]; ///< The types of departure to show in the departure list. uint min_width; ///< The minimum width of this window. Scrollbar *vscroll; virtual uint GetMinWidth() const; static void RecomputeDateWidth(); virtual void DrawDeparturesListItems(const Rect &r) const; void DeleteDeparturesList(DepartureList* list); public: DeparturesWindow(const WindowDesc *desc, WindowNumber window_number) : Window(), station(window_number), departures(new DepartureList()), arrivals(new DepartureList()), entry_height(1 + FONT_HEIGHT_NORMAL + 1 + (_settings_client.gui.departure_larger_font ? FONT_HEIGHT_NORMAL : FONT_HEIGHT_SMALL) + 1 + 1), tick_count(0), calc_tick_countdown(0), min_width(400) { this->CreateNestedTree(desc); this->vscroll = this->GetScrollbar(WID_DB_SCROLLBAR); this->FinishInitNested(desc, window_number); /* By default, only show departures. */ departure_types[0] = true; departure_types[1] = false; departure_types[2] = false; this->LowerWidget(WID_DB_SHOW_DEPS); this->RaiseWidget(WID_DB_SHOW_ARRS); this->RaiseWidget(WID_DB_SHOW_VIA); for (uint i = 0; i < 4; ++i) { show_types[i] = true; this->LowerWidget(WID_DB_SHOW_TRAINS + i); } if (Twaypoint) { this->GetWidget<NWidgetCore>(WID_DB_CAPTION)->SetDataTip(STR_DEPARTURES_CAPTION_WAYPOINT, STR_TOOLTIP_WINDOW_TITLE_DRAG_THIS); for (uint i = 0; i < 4; ++i) { this->DisableWidget(WID_DB_SHOW_TRAINS + i); } this->DisableWidget(WID_DB_SHOW_ARRS); this->DisableWidget(WID_DB_SHOW_DEPS); this->DisableWidget(WID_DB_SHOW_VIA); departure_types[2] = true; this->LowerWidget(WID_DB_SHOW_VIA); } } virtual ~DeparturesWindow() { this->DeleteDeparturesList(departures); this->DeleteDeparturesList(this->arrivals); } virtual void UpdateWidgetSize(int widget, Dimension *size, const Dimension &padding, Dimension *fill, Dimension *resize) { switch (widget) { case WID_DB_LIST: resize->height = DeparturesWindow::entry_height; size->height = 2 * resize->height; break; } } virtual void SetStringParameters(int widget) const { if (widget == WID_DB_CAPTION) { const Station *st = Station::Get(this->station); SetDParam(0, st->index); } } virtual void OnClick(Point pt, int widget, int click_count) { switch (widget) { case WID_DB_SHOW_TRAINS: // Show trains to this station case WID_DB_SHOW_ROADVEHS: // Show road vehicles to this station case WID_DB_SHOW_SHIPS: // Show ships to this station case WID_DB_SHOW_PLANES: // Show aircraft to this station this->show_types[widget - WID_DB_SHOW_TRAINS] = !this->show_types[widget - WID_DB_SHOW_TRAINS]; if (this->show_types[widget - WID_DB_SHOW_TRAINS]) { this->LowerWidget(widget); } else { this->RaiseWidget(widget); } /* We need to recompute the departures list. */ this->calc_tick_countdown = 0; /* We need to redraw the button that was pressed. */ this->SetWidgetDirty(widget); break; case WID_DB_SHOW_DEPS: case WID_DB_SHOW_ARRS: if (_settings_client.gui.departure_show_both) break; /* FALL THROUGH */ case WID_DB_SHOW_VIA: this->departure_types[widget - WID_DB_SHOW_DEPS] = !this->departure_types[widget - WID_DB_SHOW_DEPS]; if (this->departure_types[widget - WID_DB_SHOW_DEPS]) { this->LowerWidget(widget); } else { this->RaiseWidget(widget); } if (!this->departure_types[0]) { this->RaiseWidget(WID_DB_SHOW_VIA); this->DisableWidget(WID_DB_SHOW_VIA); } else { this->EnableWidget(WID_DB_SHOW_VIA); if (this->departure_types[2]) { this->LowerWidget(WID_DB_SHOW_VIA); } } /* We need to recompute the departures list. */ this->calc_tick_countdown = 0; /* We need to redraw the button that was pressed. */ this->SetWidgetDirty(widget); break; case WID_DB_LIST: // Matrix to show departures /* We need to find the departure corresponding to where the user clicked. */ uint32 id_v = (pt.y - this->GetWidget<NWidgetBase>(WID_DB_LIST)->pos_y) / this->entry_height; if (id_v >= this->vscroll->GetCapacity()) return; // click out of bounds id_v += this->vscroll->GetPosition(); if (id_v >= (this->departures->Length() + this->arrivals->Length())) return; // click out of list bound uint departure = 0; uint arrival = 0; /* Draw each departure. */ for (uint i = 0; i <= id_v; ++i) { const Departure *d; if (arrival == this->arrivals->Length()) { d = (*(this->departures))[departure++]; } else if (departure == this->departures->Length()) { d = (*(this->arrivals))[arrival++]; } else { d = (*(this->departures))[departure]; const Departure *a = (*(this->arrivals))[arrival]; if (a->scheduled_date < d->scheduled_date) { d = a; arrival++; } else { departure++; } } if (i == id_v) { ShowVehicleViewWindow(d->vehicle); break; } } break; } } virtual void OnTick() { if (_pause_mode == PM_UNPAUSED) { this->tick_count += 1; this->calc_tick_countdown -= 1; } /* Recompute the minimum date display width if the cached one is no longer valid. */ if (cached_date_width == 0 || _settings_client.gui.time_in_minutes != cached_date_display_method || _settings_client.gui.departure_show_both != cached_arr_dep_display_method) { this->RecomputeDateWidth(); } /* We need to redraw the scrolling text in its new position. */ this->SetWidgetDirty(WID_DB_LIST); /* Recompute the list of departures if we're due to. */ if (this->calc_tick_countdown <= 0) { this->calc_tick_countdown = _settings_client.gui.departure_calc_frequency; this->DeleteDeparturesList(this->departures); this->DeleteDeparturesList(this->arrivals); //arr/dep csere // this->departures = (this->departure_types[0] ? MakeDepartureList(this->station, this->show_types, D_DEPARTURE, Twaypoint || this->departure_types[2]) : new DepartureList()); // this->arrivals = (this->departure_types[1] && !_settings_client.gui.departure_show_both ? MakeDepartureList(this->station, this->show_types, D_ARRIVAL ) : new DepartureList()); this->departures = (this->departure_types[0] ? MakeDepartureList(this->station, this->show_types, D_ARRIVAL, Twaypoint || this->departure_types[2]) : new DepartureList()); this->arrivals = (this->departure_types[1] && !_settings_client.gui.departure_show_both ? MakeDepartureList(this->station, this->show_types, D_DEPARTURE ) : new DepartureList()); this->SetWidgetDirty(WID_DB_LIST); } uint new_width = this->GetMinWidth(); if (new_width != this->min_width) { NWidgetCore *n = this->GetWidget<NWidgetCore>(WID_DB_LIST); n->SetMinimalSize(new_width, 0); this->ReInit(); this->min_width = new_width; } uint new_height = 1 + FONT_HEIGHT_NORMAL + 1 + (_settings_client.gui.departure_larger_font ? FONT_HEIGHT_NORMAL : FONT_HEIGHT_SMALL) + 1 + 1; if (new_height != this->entry_height) { this->entry_height = new_height; this->SetWidgetDirty(WID_DB_LIST); this->ReInit(); } } virtual void OnPaint() { if (Twaypoint || _settings_client.gui.departure_show_both) { this->DisableWidget(WID_DB_SHOW_ARRS); this->DisableWidget(WID_DB_SHOW_DEPS); } else { this->EnableWidget(WID_DB_SHOW_ARRS); this->EnableWidget(WID_DB_SHOW_DEPS); } this->vscroll->SetCount(min(_settings_client.gui.max_departures, this->departures->Length() + this->arrivals->Length())); this->DrawWidgets(); } virtual void DrawWidget(const Rect &r, int widget) const { switch (widget) { case WID_DB_LIST: this->DrawDeparturesListItems(r); break; } } virtual void OnResize() { this->vscroll->SetCapacityFromWidget(this, WID_DB_LIST); this->GetWidget<NWidgetCore>(WID_DB_LIST)->widget_data = (this->vscroll->GetCapacity() << MAT_ROW_START) + (1 << MAT_COL_START); } }; /** * Shows a window of scheduled departures for a station. * @param station the station to show a departures window for */ void ShowStationDepartures(StationID station) { AllocateWindowDescFront<DeparturesWindow<> >(&_departures_desc, station); } /** * Shows a window of scheduled departures for a station. * @param station the station to show a departures window for */ void ShowWaypointDepartures(StationID waypoint) { AllocateWindowDescFront<DeparturesWindow<true> >(&_departures_desc, waypoint); } template<bool Twaypoint> void DeparturesWindow<Twaypoint>::RecomputeDateWidth() { cached_date_width = 0; cached_status_width = 0; cached_date_display_method = _settings_client.gui.time_in_minutes; cached_arr_dep_display_method = _settings_client.gui.departure_show_both; cached_status_width = max((GetStringBoundingBox(STR_DEPARTURES_ON_TIME)).width, cached_status_width); cached_status_width = max((GetStringBoundingBox(STR_DEPARTURES_DELAYED)).width, cached_status_width); cached_status_width = max((GetStringBoundingBox(STR_DEPARTURES_CANCELLED)).width, cached_status_width); uint interval = cached_date_display_method ? _settings_client.gui.ticks_per_minute : DAY_TICKS; uint count = cached_date_display_method ? 24*60 : 365; for (uint i = 0; i < count; ++i) { SetDParam(0, INT_MAX - (i*interval)); SetDParam(1, INT_MAX - (i*interval)); cached_date_width = max(GetStringBoundingBox(cached_arr_dep_display_method ? STR_DEPARTURES_TIME_BOTH : STR_DEPARTURES_TIME_DEP).width, cached_date_width); cached_status_width = max((GetStringBoundingBox(STR_DEPARTURES_EXPECTED)).width, cached_status_width); } SetDParam(0, 0); cached_date_arrow_width = GetStringBoundingBox(STR_DEPARTURES_TIME_DEP).width - GetStringBoundingBox(STR_DEPARTURES_TIME).width; if (!_settings_client.gui.departure_show_both) { cached_date_width -= cached_date_arrow_width; } } template<bool Twaypoint> uint DeparturesWindow<Twaypoint>::GetMinWidth() const { uint result = 0; /* Time */ result = cached_date_width; /* Vehicle type icon */ result += _settings_client.gui.departure_show_vehicle_type ? (GetStringBoundingBox(STR_DEPARTURES_TYPE_PLANE)).width : 0; /* Status */ result += cached_status_width; /* Find the maximum company name width. */ int toc_width = 0; /* Find the maximum company name width. */ int group_width = 0; /* Find the maximum vehicle name width. */ int veh_width = 0; if (_settings_client.gui.departure_show_vehicle || _settings_client.gui.departure_show_company || _settings_client.gui.departure_show_group) { for (uint i = 0; i < 4; ++i) { VehicleList vehicles; /* MAX_COMPANIES is probably the wrong thing to put here, but it works. GenerateVehicleSortList doesn't check the company when the type of list is VL_STATION_LIST (r20801). */ if (!GenerateVehicleSortList(&vehicles, VehicleListIdentifier(VL_STATION_LIST, (VehicleType)(VEH_TRAIN + i), MAX_COMPANIES, station).Pack())) { /* Something went wrong: panic! */ continue; } for (const Vehicle **v = vehicles.Begin(); v != vehicles.End(); v++) { SetDParam(0, (uint64)((*v)->index)); int width = (GetStringBoundingBox(STR_DEPARTURES_VEH)).width; if (_settings_client.gui.departure_show_vehicle && width > veh_width) veh_width = width; if ((*v)->group_id != INVALID_GROUP && (*v)->group_id != DEFAULT_GROUP) { SetDParam(0, (uint64)((*v)->group_id)); width = (GetStringBoundingBox(STR_DEPARTURES_GROUP)).width; if (_settings_client.gui.departure_show_group && width > group_width) group_width = width; } SetDParam(0, (uint64)((*v)->owner)); width = (GetStringBoundingBox(STR_DEPARTURES_TOC)).width; if (_settings_client.gui.departure_show_company && width > toc_width) toc_width = width; } } } result += toc_width + veh_width + group_width; return result + 140; } /** * Deletes this window's departure list. */ template<bool Twaypoint> void DeparturesWindow<Twaypoint>::DeleteDeparturesList(DepartureList *list) { /* SmallVector uses free rather than delete on its contents (which doesn't invoke the destructor), so we need to delete each departure manually. */ for (uint i = 0; i < list->Length(); ++i) { Departure **d = list->Get(i); delete *d; /* Make sure a double free doesn't happen. */ *d = NULL; } list->Reset(); delete list; list = NULL; } /** * Draws a list of departures. */<|fim▁hole|>{ int left = r.left + WD_MATRIX_LEFT; int right = r.right - WD_MATRIX_RIGHT; bool rtl = _current_text_dir == TD_RTL; bool ltr = !rtl; int text_offset = WD_FRAMERECT_RIGHT; int text_left = left + (rtl ? 0 : text_offset); int text_right = right - (rtl ? text_offset : 0); int y = r.top + 1; uint max_departures = min(this->vscroll->GetPosition() + this->vscroll->GetCapacity(), this->departures->Length() + this->arrivals->Length()); if (max_departures > _settings_client.gui.max_departures) { max_departures = _settings_client.gui.max_departures; } byte small_font_size = _settings_client.gui.departure_larger_font ? FONT_HEIGHT_NORMAL : FONT_HEIGHT_SMALL; /* Draw the black background. */ GfxFillRect(r.left + 1, r.top, r.right - 1, r.bottom, PC_BLACK); /* Nothing selected? Then display the information text. */ bool none_selected[2] = {true, true}; for (uint i = 0; i < 4; ++i) { if (this->show_types[i]) { none_selected[0] = false; break; } } for (uint i = 0; i < 2; ++i) { if (this->departure_types[i]) { none_selected[1] = false; break; } } if (none_selected[0] || none_selected[1]) { DrawString(text_left, text_right, y + 1, STR_DEPARTURES_NONE_SELECTED); return; } /* No scheduled departures? Then display the information text. */ if (max_departures == 0) { DrawString(text_left, text_right, y + 1, STR_DEPARTURES_EMPTY); return; } /* Find the maximum possible width of the departure time and "Expt <time>" fields. */ int time_width = cached_date_width; if (!_settings_client.gui.departure_show_both) { time_width += (departure_types[0] && departure_types[1] ? cached_date_arrow_width : 0); } /* Vehicle type icon */ int type_width = _settings_client.gui.departure_show_vehicle_type ? (GetStringBoundingBox(STR_DEPARTURES_TYPE_PLANE)).width : 0; /* Find the maximum width of the status field */ int status_width = cached_status_width; /* Find the width of the "Calling at:" field. */ int calling_at_width = (GetStringBoundingBox(_settings_client.gui.departure_larger_font ? STR_DEPARTURES_CALLING_AT_LARGE : STR_DEPARTURES_CALLING_AT)).width; /* Find the maximum company name width. */ int toc_width = 0; /* Find the maximum group name width. */ int group_width = 0; /* Find the maximum vehicle name width. */ int veh_width = 0; if (_settings_client.gui.departure_show_vehicle || _settings_client.gui.departure_show_company || _settings_client.gui.departure_show_group) { for (uint i = 0; i < 4; ++i) { VehicleList vehicles; /* MAX_COMPANIES is probably the wrong thing to put here, but it works. GenerateVehicleSortList doesn't check the company when the type of list is VL_STATION_LIST (r20801). */ if (!GenerateVehicleSortList(&vehicles, VehicleListIdentifier(VL_STATION_LIST, (VehicleType)(VEH_TRAIN + i), MAX_COMPANIES, station).Pack())) { /* Something went wrong: panic! */ continue; } for (const Vehicle **v = vehicles.Begin(); v != vehicles.End(); v++) { SetDParam(0, (uint64)((*v)->index)); int width = (GetStringBoundingBox(STR_DEPARTURES_VEH)).width; if (_settings_client.gui.departure_show_vehicle && width > veh_width) veh_width = width; if ((*v)->group_id != INVALID_GROUP && (*v)->group_id != DEFAULT_GROUP) { SetDParam(0, (uint64)((*v)->group_id)); width = (GetStringBoundingBox(STR_DEPARTURES_GROUP)).width; if (_settings_client.gui.departure_show_group && width > group_width) group_width = width; } SetDParam(0, (uint64)((*v)->owner)); width = (GetStringBoundingBox(STR_DEPARTURES_TOC)).width; if (_settings_client.gui.departure_show_company && width > toc_width) toc_width = width; } } } uint departure = 0; uint arrival = 0; /* Draw each departure. */ for (uint i = 0; i < max_departures; ++i) { const Departure *d; if (arrival == this->arrivals->Length()) { d = (*(this->departures))[departure++]; } else if (departure == this->departures->Length()) { d = (*(this->arrivals))[arrival++]; } else { d = (*(this->departures))[departure]; const Departure *a = (*(this->arrivals))[arrival]; if (a->scheduled_date < d->scheduled_date) { d = a; arrival++; } else { departure++; } } if (i < this->vscroll->GetPosition()) { continue; } /* If for some reason the departure is too far in the future or is at a negative time, skip it. */ if ((d->scheduled_date / DAY_TICKS) > (_date + _settings_client.gui.max_departure_time) || d->scheduled_date < 0) { continue; } if (d->terminus == INVALID_STATION) continue; StringID time_str = (departure_types[0] && departure_types[1]) ? (d->type == D_DEPARTURE ? STR_DEPARTURES_TIME_DEP : STR_DEPARTURES_TIME_ARR) : STR_DEPARTURES_TIME; if (_settings_client.gui.departure_show_both) time_str = STR_DEPARTURES_TIME_BOTH; /* Time */ SetDParam(0, d->scheduled_date); SetDParam(1, d->scheduled_date - d->order->wait_time); ltr ? DrawString( text_left, text_left + time_width, y + 1, time_str) : DrawString(text_right - time_width, text_right, y + 1, time_str); /* Vehicle type icon, with thanks to sph */ if (_settings_client.gui.departure_show_vehicle_type) { StringID type = STR_DEPARTURES_TYPE_TRAIN; int offset = (_settings_client.gui.departure_show_vehicle_color ? 1 : 0); switch (d->vehicle->type) { case VEH_TRAIN: type = STR_DEPARTURES_TYPE_TRAIN; break; case VEH_ROAD: type = IsCargoInClass(d->vehicle->cargo_type, CC_PASSENGERS) ? STR_DEPARTURES_TYPE_BUS : STR_DEPARTURES_TYPE_LORRY; break; case VEH_SHIP: type = STR_DEPARTURES_TYPE_SHIP; break; case VEH_AIRCRAFT: type = STR_DEPARTURES_TYPE_PLANE; break; default: break; } type += offset; DrawString(text_left + time_width + 3, text_left + time_width + type_width + 3, y, type); } /* The icons to show with the destination and via stations. */ StringID icon = STR_DEPARTURES_STATION_NONE; StringID icon_via = STR_DEPARTURES_STATION_NONE; if (_settings_client.gui.departure_destination_type) { Station *t = Station::Get(d->terminus.station); if (t->facilities & FACIL_DOCK && t->facilities & FACIL_AIRPORT && d->vehicle->type != VEH_SHIP && d->vehicle->type != VEH_AIRCRAFT) { icon = STR_DEPARTURES_STATION_PORTAIRPORT; } else if (t->facilities & FACIL_DOCK && d->vehicle->type != VEH_SHIP) { icon = STR_DEPARTURES_STATION_PORT; } else if (t->facilities & FACIL_AIRPORT && d->vehicle->type != VEH_AIRCRAFT) { icon = STR_DEPARTURES_STATION_AIRPORT; } } if (_settings_client.gui.departure_destination_type && d->via != INVALID_STATION) { Station *t = Station::Get(d->via); if (t->facilities & FACIL_DOCK && t->facilities & FACIL_AIRPORT && d->vehicle->type != VEH_SHIP && d->vehicle->type != VEH_AIRCRAFT) { icon_via = STR_DEPARTURES_STATION_PORTAIRPORT; } else if (t->facilities & FACIL_DOCK && d->vehicle->type != VEH_SHIP) { icon_via = STR_DEPARTURES_STATION_PORT; } else if (t->facilities & FACIL_AIRPORT && d->vehicle->type != VEH_AIRCRAFT) { icon_via = STR_DEPARTURES_STATION_AIRPORT; } } /* Destination */ if (d->via == INVALID_STATION) { /* Only show the terminus. */ SetDParam(0, d->terminus.station); SetDParam(1, icon); ltr ? DrawString( text_left + time_width + type_width + 6, text_right - status_width - (toc_width + veh_width + group_width + 2) - 2, y + 1, STR_DEPARTURES_TERMINUS) : DrawString(text_left + status_width + (toc_width + veh_width + group_width + 2) + 2, text_right - time_width - type_width - 6, y + 1, STR_DEPARTURES_TERMINUS); } else { /* Show the terminus and the via station. */ SetDParam(0, d->terminus.station); SetDParam(1, icon); SetDParam(2, d->via); SetDParam(3, icon_via); int text_width = (GetStringBoundingBox(STR_DEPARTURES_TERMINUS_VIA_STATION)).width; if (text_width < text_right - status_width - (toc_width + veh_width + group_width + 2) - 2 - (text_left + time_width + type_width + 6)) { /* They will both fit, so show them both. */ SetDParam(0, d->terminus.station); SetDParam(1, icon); SetDParam(2, d->via); SetDParam(3, icon_via); ltr ? DrawString( text_left + time_width + type_width + 6, text_right - status_width - (toc_width + veh_width + group_width + 2) - 2, y + 1, STR_DEPARTURES_TERMINUS_VIA_STATION) : DrawString(text_left + status_width + (toc_width + veh_width + group_width + 2) + 2, text_right - time_width - type_width - 6, y + 1, STR_DEPARTURES_TERMINUS_VIA_STATION); } else { /* They won't both fit, so switch between showing the terminus and the via station approximately every 4 seconds. */ if (this->tick_count & (1 << 7)) { SetDParam(0, d->via); SetDParam(1, icon_via); ltr ? DrawString( text_left + time_width + type_width + 6, text_right - status_width - (toc_width + veh_width + group_width + 2) - 2, y + 1, STR_DEPARTURES_VIA) : DrawString(text_left + status_width + (toc_width + veh_width + group_width + 2) + 2, text_right - time_width - type_width - 6, y + 1, STR_DEPARTURES_VIA); } else { SetDParam(0, d->terminus.station); SetDParam(1, icon); ltr ? DrawString( text_left + time_width + type_width + 6, text_right - status_width - (toc_width + veh_width + group_width + 2) - 2, y + 1, STR_DEPARTURES_TERMINUS_VIA) : DrawString(text_left + status_width + (toc_width + veh_width + group_width + 2) + 2, text_right - time_width - type_width - 6, y + 1, STR_DEPARTURES_TERMINUS_VIA); } } } /* Status */ { int status_left = ltr ? text_right - status_width - 2 - (toc_width + veh_width + group_width + 2) : text_left + (toc_width + veh_width + group_width + 2) + 7; int status_right = ltr ? text_right - (toc_width + veh_width + group_width + 2) + 2 : text_left + status_width + 2 + (toc_width + veh_width + group_width + 7); if (d->status == D_ARRIVED) { /* The vehicle has arrived. */ DrawString(status_left, status_right, y + 1, STR_DEPARTURES_ARRIVED); } else if(d->status == D_CANCELLED) { /* The vehicle has been cancelled. */ DrawString(status_left, status_right, y + 1, STR_DEPARTURES_CANCELLED); } else{ if (d->lateness <= DAY_TICKS && d->scheduled_date > ((_date * DAY_TICKS) + _date_fract)) { /* We have no evidence that the vehicle is late, so assume it is on time. */ DrawString(status_left, status_right, y + 1, STR_DEPARTURES_ON_TIME); } else { if ((d->scheduled_date + d->lateness) < ((_date * DAY_TICKS) + _date_fract)) { /* The vehicle was expected to have arrived by now, even if we knew it was going to be late. */ /* We assume that the train stays at least a day at a station so it won't accidentally be marked as delayed for a fraction of a day. */ DrawString(status_left, status_right, y + 1, STR_DEPARTURES_DELAYED); } else { /* The vehicle is expected to be late and is not yet due to arrive. */ SetDParam(0, d->scheduled_date + d->lateness); DrawString(status_left, status_right, y + 1, STR_DEPARTURES_EXPECTED); } } } } /* Vehicle name */ if (_settings_client.gui.departure_show_vehicle) { SetDParam(0, (uint64)(d->vehicle->index)); ltr ? DrawString(text_right - (toc_width + veh_width + group_width + 2), text_right - toc_width - group_width - 2, y + 1, STR_DEPARTURES_VEH) : DrawString( text_left + toc_width + group_width + 2, text_left + (toc_width + veh_width + group_width + 2), y + 1, STR_DEPARTURES_VEH); } /* Group name */ if (_settings_client.gui.departure_show_group && d->vehicle->group_id != INVALID_GROUP && d->vehicle->group_id != DEFAULT_GROUP) { SetDParam(0, (uint64)(d->vehicle->group_id)); ltr ? DrawString(text_right - (toc_width + group_width + 2), text_right - toc_width - 2, y + 1, STR_DEPARTURES_GROUP) : DrawString( text_left + toc_width + 2, text_left + (toc_width + group_width + 2), y + 1, STR_DEPARTURES_GROUP); } /* Operating company */ if (_settings_client.gui.departure_show_company) { SetDParam(0, (uint64)(d->vehicle->owner)); ltr ? DrawString(text_right - toc_width, text_right, y + 1, STR_DEPARTURES_TOC, TC_FROMSTRING, SA_RIGHT) : DrawString( text_left, text_left + toc_width, y + 1, STR_DEPARTURES_TOC, TC_FROMSTRING, SA_LEFT); } int bottom_y = y + this->entry_height - small_font_size - (_settings_client.gui.departure_larger_font ? 1 : 3); /* Calling at */ ltr ? DrawString( text_left, text_left + calling_at_width, bottom_y, _settings_client.gui.departure_larger_font ? STR_DEPARTURES_CALLING_AT_LARGE : STR_DEPARTURES_CALLING_AT) : DrawString(text_right - calling_at_width, text_right, bottom_y, _settings_client.gui.departure_larger_font ? STR_DEPARTURES_CALLING_AT_LARGE : STR_DEPARTURES_CALLING_AT); /* List of stations */ /* RTL languages can be handled in the language file, e.g. by having the following: */ /* STR_DEPARTURES_CALLING_AT_STATION :{STATION}, {RAW_STRING} */ /* STR_DEPARTURES_CALLING_AT_LAST_STATION :{STATION} & {RAW_STRING}*/ char buffer[512], scratch[512]; if (d->calling_at.Length() != 0) { SetDParam(0, (uint64)(*d->calling_at.Get(0)).station); GetString(scratch, STR_DEPARTURES_CALLING_AT_FIRST_STATION, lastof(scratch)); StationID continuesTo = INVALID_STATION; if (d->calling_at.Get(0)->station == d->terminus.station && d->calling_at.Length() > 1) { continuesTo = d->calling_at.Get(d->calling_at.Length() - 1)->station; } else if (d->calling_at.Length() > 1) { /* There's more than one stop. */ uint i; /* For all but the last station, write out ", <station>". */ for (i = 1; i < d->calling_at.Length() - 1; ++i) { StationID s = d->calling_at.Get(i)->station; if (s == d->terminus.station) { continuesTo = d->calling_at.Get(d->calling_at.Length() - 1)->station; break; } SetDParam(0, (uint64)scratch); SetDParam(1, (uint64)s); GetString(buffer, STR_DEPARTURES_CALLING_AT_STATION, lastof(buffer)); strncpy(scratch, buffer, sizeof(scratch)); } /* Finally, finish off with " and <station>". */ SetDParam(0, (uint64)scratch); SetDParam(1, (uint64)d->calling_at.Get(i)->station); GetString(buffer, STR_DEPARTURES_CALLING_AT_LAST_STATION, lastof(buffer)); strncpy(scratch, buffer, sizeof(scratch)); } SetDParam(0, (uint64)scratch); StringID string; if (continuesTo == INVALID_STATION) { string = _settings_client.gui.departure_larger_font ? STR_DEPARTURES_CALLING_AT_LIST_LARGE : STR_DEPARTURES_CALLING_AT_LIST; } else { SetDParam(1, continuesTo); string = _settings_client.gui.departure_larger_font ? STR_DEPARTURES_CALLING_AT_LIST_SMART_TERMINUS_LARGE : STR_DEPARTURES_CALLING_AT_LIST_SMART_TERMINUS; } GetString(buffer, string, lastof(buffer)); } else { buffer[0] = 0; //SetDParam(0, d->terminus); //GetString(scratch, STR_DEPARTURES_CALLING_AT_FIRST_STATION, lastof(scratch)); } int list_width = (GetStringBoundingBox(buffer, _settings_client.gui.departure_larger_font ? FS_NORMAL : FS_SMALL)).width; /* Draw the whole list if it will fit. Otherwise scroll it. */ if (list_width < text_right - (text_left + calling_at_width + 2)) { ltr ? DrawString(text_left + calling_at_width + 2, text_right, bottom_y, buffer) : DrawString( text_left, text_right - calling_at_width - 2, bottom_y, buffer); } else { DrawPixelInfo tmp_dpi; if (ltr ? !FillDrawPixelInfo(&tmp_dpi, text_left + calling_at_width + 2, bottom_y, text_right - (text_left + calling_at_width + 2), small_font_size + 3) : !FillDrawPixelInfo(&tmp_dpi, text_left , bottom_y, text_right - (text_left + calling_at_width + 2), small_font_size + 3)) { y += this->entry_height; continue; } DrawPixelInfo *old_dpi = _cur_dpi; _cur_dpi = &tmp_dpi; /* The scrolling text starts out of view at the right of the screen and finishes when it is out of view at the left of the screen. */ int pos = ltr ? text_right - (this->tick_count % (list_width + text_right - text_left)) : text_left + (this->tick_count % (list_width + text_right - text_left)); ltr ? DrawString( pos, INT16_MAX, 0, buffer, TC_FROMSTRING, SA_LEFT | SA_FORCE) : DrawString(-INT16_MAX, pos, 0, buffer, TC_FROMSTRING, SA_RIGHT | SA_FORCE); _cur_dpi = old_dpi; } y += this->entry_height; } }<|fim▁end|>
template<bool Twaypoint> void DeparturesWindow<Twaypoint>::DrawDeparturesListItems(const Rect &r) const
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. //<|fim▁hole|>// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! # The Unicode Library //! //! Unicode-intensive functions for `char` and `str` types. //! //! This crate provides a collection of Unicode-related functionality, //! including decompositions, conversions, etc., and provides traits //! implementing these functions for the `char` and `str` types. //! //! The functionality included here is only that which is necessary to //! provide for basic string-related manipulations. This crate does not //! (yet) aim to provide a full set of Unicode tables. #![crate_name = "unicode"] #![unstable(feature = "unicode")] #![feature(staged_api)] #![staged_api] #![crate_type = "rlib"] #![doc(html_logo_url = "http://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", html_favicon_url = "http://www.rust-lang.org/favicon.ico", html_root_url = "http://doc.rust-lang.org/nightly/", html_playground_url = "http://play.rust-lang.org/")] #![feature(no_std)] #![no_std] #![feature(core)] extern crate core; // regex module pub use tables::regex; mod normalize; mod tables; mod u_char; mod u_str; // re-export char so that std et al see it correctly /// Character manipulation (`char` type, Unicode Scalar Value) /// /// This module provides the `CharExt` trait, as well as its /// implementation for the primitive `char` type, in order to allow /// basic character manipulation. /// /// A `char` actually represents a /// *[Unicode Scalar Value](http://www.unicode.org/glossary/#unicode_scalar_value)*, /// as it can contain any Unicode code point except high-surrogate and /// low-surrogate code points. /// /// As such, only values in the ranges \[0x0,0xD7FF\] and \[0xE000,0x10FFFF\] /// (inclusive) are allowed. A `char` can always be safely cast to a `u32`; /// however the converse is not always true due to the above range limits /// and, as such, should be performed via the `from_u32` function. #[stable(feature = "rust1", since = "1.0.0")] #[doc(primitive = "char")] pub mod char { pub use core::char::{MAX, from_u32, from_digit}; pub use normalize::{decompose_canonical, decompose_compatible, compose}; pub use tables::normalization::canonical_combining_class; pub use tables::UNICODE_VERSION; pub use u_char::CharExt; } pub mod str { pub use u_str::{UnicodeStr, Words, Graphemes, GraphemeIndices}; pub use u_str::{utf8_char_width, is_utf16, Utf16Items, Utf16Item}; pub use u_str::{utf16_items, Utf16Encoder}; }<|fim▁end|>
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
<|file_name|>textmate.js<|end_file_name|><|fim▁begin|>define([ "less!theme/textmate.less" ], function(cssContent) { return { 'isDark': false, 'cssClass': "ace-tm", 'cssText': cssContent<|fim▁hole|><|fim▁end|>
} });
<|file_name|>memchr.rs<|end_file_name|><|fim▁begin|>const LO_U64: u64 = 0x0101010101010101; const HI_U64: u64 = 0x8080808080808080; // use truncation const LO_USIZE: usize = LO_U64 as usize; const HI_USIZE: usize = HI_U64 as usize; #[cfg(target_pointer_width = "32")] const USIZE_BYTES: usize = 4; #[cfg(target_pointer_width = "64")] const USIZE_BYTES: usize = 8; /// Return `true` if `x` contains any zero byte. /// /// From *Matters Computational*, J. Arndt /// /// "The idea is to subtract one from each of the bytes and then look for /// bytes where the borrow propagated all the way to the most significant /// bit." #[inline] fn contains_zero_byte(x: usize) -> bool { x.wrapping_sub(LO_USIZE) & !x & HI_USIZE != 0 } #[cfg(target_pointer_width = "32")] #[inline] fn repeat_byte(b: u8) -> usize { let mut rep = (b as usize) << 8 | b as usize; rep = rep << 16 | rep; rep } #[cfg(target_pointer_width = "64")] #[inline] fn repeat_byte(b: u8) -> usize { let mut rep = (b as usize) << 8 | b as usize; rep = rep << 16 | rep; rep = rep << 32 | rep; rep } /// A safe interface to `memchr`. /// /// Returns the index corresponding to the first occurrence of `needle` in /// `haystack`, or `None` if one is not found. /// /// memchr reduces to super-optimized machine code at around an order of /// magnitude faster than `haystack.iter().position(|&b| b == needle)`. /// (See benchmarks.) /// /// # Example /// /// This shows how to find the first position of a byte in a byte string. /// /// ```rust /// use memchr::memchr; /// /// let haystack = b"the quick brown fox"; /// assert_eq!(memchr(b'k', haystack), Some(8)); /// ``` #[inline(always)] // reduces constant overhead pub fn memchr(needle: u8, haystack: &[u8]) -> Option<usize> { fn memchr_specific(needle: u8, haystack: &[u8]) -> Option<usize> { fallback::memchr(needle, haystack) } memchr_specific(needle, haystack) } /// A safe interface to `memrchr`. /// /// Returns the index corresponding to the last occurrence of `needle` in /// `haystack`, or `None` if one is not found.<|fim▁hole|>/// /// # Example /// /// This shows how to find the last position of a byte in a byte string. /// /// ```rust /// use memchr::memrchr; /// /// let haystack = b"the quick brown fox"; /// assert_eq!(memrchr(b'o', haystack), Some(17)); /// ``` #[inline(always)] // reduces constant overhead pub fn memrchr(needle: u8, haystack: &[u8]) -> Option<usize> { fn memrchr_specific(needle: u8, haystack: &[u8]) -> Option<usize> { fallback::memrchr(needle, haystack) } memrchr_specific(needle, haystack) } mod fallback { use std::cmp; use super::{USIZE_BYTES, contains_zero_byte, repeat_byte}; /// Return the first index matching the byte `a` in `text`. pub fn memchr(x: u8, text: &[u8]) -> Option<usize> { // Scan for a single byte value by reading two `usize` words at a time. // // Split `text` in three parts // - unaligned inital part, before the first word aligned address in text // - body, scan by 2 words at a time // - the last remaining part, < 2 word size let len = text.len(); let ptr = text.as_ptr(); // search up to an aligned boundary let align = (ptr as usize) & (USIZE_BYTES - 1); let mut offset; if align > 0 { offset = cmp::min(USIZE_BYTES - align, len); if let Some(index) = text[..offset].iter().position(|elt| *elt == x) { return Some(index); } } else { offset = 0; } // search the body of the text let repeated_x = repeat_byte(x); if len >= 2 * USIZE_BYTES { while offset <= len - 2 * USIZE_BYTES { unsafe { let u = *(ptr.offset(offset as isize) as *const usize); let v = *(ptr.offset((offset + USIZE_BYTES) as isize) as *const usize); // break if there is a matching byte let zu = contains_zero_byte(u ^ repeated_x); let zv = contains_zero_byte(v ^ repeated_x); if zu || zv { break; } } offset += USIZE_BYTES * 2; } } // find the byte after the point the body loop stopped text[offset..].iter().position(|elt| *elt == x).map(|i| offset + i) } /// Return the last index matching the byte `a` in `text`. pub fn memrchr(x: u8, text: &[u8]) -> Option<usize> { // Scan for a single byte value by reading two `usize` words at a time. // // Split `text` in three parts // - unaligned tail, after the last word aligned address in text // - body, scan by 2 words at a time // - the first remaining bytes, < 2 word size let len = text.len(); let ptr = text.as_ptr(); // search to an aligned boundary let end_align = (ptr as usize + len) & (USIZE_BYTES - 1); let mut offset; if end_align > 0 { offset = len - cmp::min(USIZE_BYTES - end_align, len); if let Some(index) = text[offset..].iter().rposition(|elt| *elt == x) { return Some(offset + index); } } else { offset = len; } // search the body of the text let repeated_x = repeat_byte(x); while offset >= 2 * USIZE_BYTES { unsafe { let u = *(ptr.offset(offset as isize - 2 * USIZE_BYTES as isize) as *const usize); let v = *(ptr.offset(offset as isize - USIZE_BYTES as isize) as *const usize); // break if there is a matching byte let zu = contains_zero_byte(u ^ repeated_x); let zv = contains_zero_byte(v ^ repeated_x); if zu || zv { break; } } offset -= 2 * USIZE_BYTES; } // find the byte before the point the body loop stopped text[..offset].iter().rposition(|elt| *elt == x) } } #[cfg(test)] mod tests { extern crate quickcheck; use super::{memchr, memrchr, memchr2, memchr3}; #[test] fn matches_one() { assert_eq!(Some(0), memchr(b'a', b"a")); } #[test] fn matches_begin() { assert_eq!(Some(0), memchr(b'a', b"aaaa")); } #[test] fn matches_end() { assert_eq!(Some(4), memchr(b'z', b"aaaaz")); } #[test] fn matches_nul() { assert_eq!(Some(4), memchr(b'\x00', b"aaaa\x00")); } #[test] fn matches_past_nul() { assert_eq!(Some(5), memchr(b'z', b"aaaa\x00z")); } #[test] fn no_match_empty() { assert_eq!(None, memchr(b'a', b"")); } #[test] fn no_match() { assert_eq!(None, memchr(b'a', b"xyz")); } #[test] fn qc_never_fail() { fn prop(needle: u8, haystack: Vec<u8>) -> bool { memchr(needle, &haystack); true } quickcheck::quickcheck(prop as fn(u8, Vec<u8>) -> bool); } #[test] fn matches_one_reversed() { assert_eq!(Some(0), memrchr(b'a', b"a")); } #[test] fn matches_begin_reversed() { assert_eq!(Some(3), memrchr(b'a', b"aaaa")); } #[test] fn matches_end_reversed() { assert_eq!(Some(0), memrchr(b'z', b"zaaaa")); } #[test] fn matches_nul_reversed() { assert_eq!(Some(4), memrchr(b'\x00', b"aaaa\x00")); } #[test] fn matches_past_nul_reversed() { assert_eq!(Some(0), memrchr(b'z', b"z\x00aaaa")); } #[test] fn no_match_empty_reversed() { assert_eq!(None, memrchr(b'a', b"")); } #[test] fn no_match_reversed() { assert_eq!(None, memrchr(b'a', b"xyz")); } #[test] fn qc_never_fail_reversed() { fn prop(needle: u8, haystack: Vec<u8>) -> bool { memrchr(needle, &haystack); true } quickcheck::quickcheck(prop as fn(u8, Vec<u8>) -> bool); } #[test] fn memchr2_matches_one() { assert_eq!(Some(0), memchr2(b'a', b'b', b"a")); assert_eq!(Some(0), memchr2(b'a', b'b', b"b")); assert_eq!(Some(0), memchr2(b'b', b'a', b"a")); assert_eq!(Some(0), memchr2(b'b', b'a', b"b")); } #[test] fn memchr2_matches_begin() { assert_eq!(Some(0), memchr2(b'a', b'b', b"aaaa")); assert_eq!(Some(0), memchr2(b'a', b'b', b"bbbb")); } #[test] fn memchr2_matches_end() { assert_eq!(Some(4), memchr2(b'z', b'y', b"aaaaz")); assert_eq!(Some(4), memchr2(b'z', b'y', b"aaaay")); } #[test] fn memchr2_matches_nul() { assert_eq!(Some(4), memchr2(b'\x00', b'z', b"aaaa\x00")); assert_eq!(Some(4), memchr2(b'z', b'\x00', b"aaaa\x00")); } #[test] fn memchr2_matches_past_nul() { assert_eq!(Some(5), memchr2(b'z', b'y', b"aaaa\x00z")); assert_eq!(Some(5), memchr2(b'y', b'z', b"aaaa\x00z")); } #[test] fn memchr2_no_match_empty() { assert_eq!(None, memchr2(b'a', b'b', b"")); assert_eq!(None, memchr2(b'b', b'a', b"")); } #[test] fn memchr2_no_match() { assert_eq!(None, memchr2(b'a', b'b', b"xyz")); } #[test] fn qc_never_fail_memchr2() { fn prop(needle1: u8, needle2: u8, haystack: Vec<u8>) -> bool { memchr2(needle1, needle2, &haystack); true } quickcheck::quickcheck(prop as fn(u8, u8, Vec<u8>) -> bool); } #[test] fn memchr3_matches_one() { assert_eq!(Some(0), memchr3(b'a', b'b', b'c', b"a")); assert_eq!(Some(0), memchr3(b'a', b'b', b'c', b"b")); assert_eq!(Some(0), memchr3(b'a', b'b', b'c', b"c")); } #[test] fn memchr3_matches_begin() { assert_eq!(Some(0), memchr3(b'a', b'b', b'c', b"aaaa")); assert_eq!(Some(0), memchr3(b'a', b'b', b'c', b"bbbb")); assert_eq!(Some(0), memchr3(b'a', b'b', b'c', b"cccc")); } #[test] fn memchr3_matches_end() { assert_eq!(Some(4), memchr3(b'z', b'y', b'x', b"aaaaz")); assert_eq!(Some(4), memchr3(b'z', b'y', b'x', b"aaaay")); assert_eq!(Some(4), memchr3(b'z', b'y', b'x', b"aaaax")); } #[test] fn memchr3_matches_nul() { assert_eq!(Some(4), memchr3(b'\x00', b'z', b'y', b"aaaa\x00")); assert_eq!(Some(4), memchr3(b'z', b'\x00', b'y', b"aaaa\x00")); assert_eq!(Some(4), memchr3(b'z', b'y', b'\x00', b"aaaa\x00")); } #[test] fn memchr3_matches_past_nul() { assert_eq!(Some(5), memchr3(b'z', b'y', b'x', b"aaaa\x00z")); assert_eq!(Some(5), memchr3(b'y', b'z', b'x', b"aaaa\x00z")); assert_eq!(Some(5), memchr3(b'y', b'x', b'z', b"aaaa\x00z")); } #[test] fn memchr3_no_match_empty() { assert_eq!(None, memchr3(b'a', b'b', b'c', b"")); assert_eq!(None, memchr3(b'b', b'a', b'c', b"")); assert_eq!(None, memchr3(b'c', b'b', b'a', b"")); } #[test] fn memchr3_no_match() { assert_eq!(None, memchr3(b'a', b'b', b'c', b"xyz")); } #[test] fn qc_never_fail_memchr3() { fn prop(needle1: u8, needle2: u8, needle3: u8, haystack: Vec<u8>) -> bool { memchr3(needle1, needle2, needle3, &haystack); true } quickcheck::quickcheck(prop as fn(u8, u8, u8, Vec<u8>) -> bool); } #[test] fn qc_correct_memchr() { fn prop(v: Vec<u8>, offset: u8) -> bool { // test all pointer alignments let uoffset = (offset & 0xF) as usize; let data = if uoffset <= v.len() { &v[uoffset..] } else { &v[..] }; for byte in 0..256u32 { let byte = byte as u8; if memchr(byte, &data) != data.iter().position(|elt| *elt == byte) { return false; } } true } quickcheck::quickcheck(prop as fn(Vec<u8>, u8) -> bool); } #[test] fn qc_correct_memrchr() { fn prop(v: Vec<u8>, offset: u8) -> bool { // test all pointer alignments let uoffset = (offset & 0xF) as usize; let data = if uoffset <= v.len() { &v[uoffset..] } else { &v[..] }; for byte in 0..256u32 { let byte = byte as u8; if memrchr(byte, &data) != data.iter().rposition(|elt| *elt == byte) { return false; } } true } quickcheck::quickcheck(prop as fn(Vec<u8>, u8) -> bool); } #[test] fn qc_correct_memchr2() { fn prop(v: Vec<u8>, offset: u8) -> bool { // test all pointer alignments let uoffset = (offset & 0xF) as usize; let data = if uoffset <= v.len() { &v[uoffset..] } else { &v[..] }; for b1 in 0..256u32 { for b2 in 0..256u32 { let (b1, b2) = (b1 as u8, b2 as u8); let expected = data.iter().position(|&b| b == b1 || b == b2); let got = memchr2(b1, b2, &data); if expected != got { return false; } } } true } quickcheck::quickcheck(prop as fn(Vec<u8>, u8) -> bool); } }<|fim▁end|>
<|file_name|>gyro_plot.py<|end_file_name|><|fim▁begin|># This is an example of popping a packet from the Emotiv class's packet queue # and printing the gyro x and y values to the console. from emokit.emotiv import Emotiv import platform if platform.system() == "Windows": import socket # Needed to prevent gevent crashing on Windows. (surfly / gevent issue #459) import gevent import numpy as np import matplotlib.pyplot as plt import matplotlib.animation as animation is_running = True def evt_main(ring_buf): headset = Emotiv() gevent.spawn(headset.setup) gevent.sleep(0) pos = 0 try: while True: packet = headset.dequeue() print packet.gyro_x, packet.gyro_y<|fim▁hole|> ring_buf[pos] = packet.gyro_x if pos % 4 == 0: yield ring_buf pos = (pos + 1) % 1024 gevent.sleep(0) except KeyboardInterrupt: headset.close() finally: is_running = False headset.close() x = np.linspace(0, 1023, 1024) test_buf = np.zeros(1024) fig, ax = plt.subplots() line, = ax.plot(x, test_buf) plt.axis([0, 1024, -100, 100]) def evt_wrapper(): def gen(): return evt_main(test_buf) return gen def init(): line.set_ydata(np.ma.array(x, mask=True)) return line, def animate(rb): print "Animation!" print rb line.set_ydata(rb) return line, def counter(): i = 0 while is_running: yield i i = i + 1 ani = animation.FuncAnimation(fig, animate, evt_wrapper(), init_func=init, interval=20, blit=True) plt.show() # gevent.Greenlet.spawn(evt_main, test_buf) while True: gevent.sleep(0)<|fim▁end|>
<|file_name|>test_types.py<|end_file_name|><|fim▁begin|>from datetime import date, time try: from decimal import Decimal haveDecimal = True except ImportError: haveDecimal = False from twisted.internet import defer from twisted.trial import unittest import formal from formal import validation class TestValidators(unittest.TestCase): def testHasValidator(self): t = formal.String(validators=[validation.LengthValidator(max=10)]) self.assertEquals(t.hasValidator(validation.LengthValidator), True) def testRequired(self): t = formal.String(required=True) self.assertEquals(t.hasValidator(validation.RequiredValidator), True) self.assertEquals(t.required, True) class TestCreation(unittest.TestCase): def test_immutablility(self): self.assertEquals(formal.String().immutable, False) self.assertEquals(formal.String(immutable=False).immutable, False) self.assertEquals(formal.String(immutable=True).immutable, True) def test_immutablilityOverride(self): class String(formal.String): immutable = True self.assertEquals(String().immutable, True) self.assertEquals(String(immutable=False).immutable, False) self.assertEquals(String(immutable=True).immutable, True) class TestValidate(unittest.TestCase): @defer.deferredGenerator def runSuccessTests(self, type, tests): for test in tests: d = type(*test[0], **test[1]).validate(test[2]) d = defer.waitForDeferred(d) yield d self.assertEquals(d.getResult(), test[3]) @defer.deferredGenerator def runFailureTests(self, type, tests): for test in tests: d = type(*test[0], **test[1]).validate(test[2]) d = defer.waitForDeferred(d) yield d self.assertRaises(test[3], d.getResult) def testStringSuccess(self): return self.runSuccessTests(formal.String, [ ([], {}, None, None), ([], {}, '', None), ([], {}, ' ', ' '), ([], {}, 'foo', 'foo'), ([], {}, u'foo', u'foo'), ([], {'strip': True}, ' ', None), ([], {'strip': True}, ' foo ', 'foo'),<|fim▁hole|> def testStringFailure(self): return self.runFailureTests(formal.String, [ ([], {'required': True}, '', formal.FieldValidationError), ([], {'required': True}, None, formal.FieldValidationError), ]) def testIntegerSuccess(self): return self.runSuccessTests(formal.Integer, [ ([], {}, None, None), ([], {}, 0, 0), ([], {}, 1, 1), ([], {}, -1, -1), ([], {'missing': 1}, None, 1), ([], {'missing': 1}, 2, 2), ]) def testIntegerFailure(self): return self.runFailureTests(formal.Integer, [ ([], {'required': True}, None, formal.FieldValidationError), ]) def testFloatSuccess(self): self.runSuccessTests(formal.Float, [ ([], {}, None, None), ([], {}, 0, 0.0), ([], {}, 0.0, 0.0), ([], {}, .1, .1), ([], {}, 1, 1.0), ([], {}, -1, -1.0), ([], {}, -1.86, -1.86), ([], {'missing': 1.0}, None, 1.0), ([], {'missing': 1.0}, 2.0, 2.0), ]) def testFloatFailure(self): self.runFailureTests(formal.Float, [ ([], {'required': True}, None, formal.FieldValidationError), ]) if haveDecimal: def testDecimalSuccess(self): return self.runSuccessTests(formal.Decimal, [ ([], {}, None, None), ([], {}, Decimal('0'), Decimal('0')), ([], {}, Decimal('0.0'), Decimal('0.0')), ([], {}, Decimal('.1'), Decimal('.1')), ([], {}, Decimal('1'), Decimal('1')), ([], {}, Decimal('-1'), Decimal('-1')), ([], {}, Decimal('-1.86'), Decimal('-1.86')), ([], {'missing': Decimal('1.0')}, None, Decimal('1.0')), ([], {'missing': Decimal('1.0')}, Decimal('2.0'), Decimal('2.0')), ]) def testDecimalFailure(self): return self.runFailureTests(formal.Decimal, [ ([], {'required': True}, None, formal.FieldValidationError), ]) def testBooleanSuccess(self): return self.runSuccessTests(formal.Boolean, [ ([], {}, None, None), ([], {}, True, True), ([], {}, False, False), ([], {'missing' :True}, None, True), ([], {'missing': True}, False, False) ]) def testDateSuccess(self): return self.runSuccessTests(formal.Date, [ ([], {}, None, None), ([], {}, date(2005, 1, 1), date(2005, 1, 1)), ([], {'missing': date(2005, 1, 2)}, None, date(2005, 1, 2)), ([], {'missing': date(2005, 1, 2)}, date(2005, 1, 1), date(2005, 1, 1)), ]) def testDateFailure(self): return self.runFailureTests(formal.Date, [ ([], {'required': True}, None, formal.FieldValidationError), ]) def testTimeSuccess(self): self.runSuccessTests(formal.Time, [ ([], {}, None, None), ([], {}, time(12, 30, 30), time(12, 30, 30)), ([], {'missing': time(12, 30, 30)}, None, time(12, 30, 30)), ([], {'missing': time(12, 30, 30)}, time(12, 30, 31), time(12, 30, 31)), ]) def testTimeFailure(self): self.runFailureTests(formal.Time, [ ([], {'required': True}, None, formal.FieldValidationError), ]) def testSequenceSuccess(self): self.runSuccessTests(formal.Sequence, [ ([formal.String()], {}, None, None), ([formal.String()], {}, ['foo'], ['foo']), ([formal.String()], {'missing': ['foo']}, None, ['foo']), ([formal.String()], {'missing': ['foo']}, ['bar'], ['bar']), ]) def testSequenceFailure(self): self.runFailureTests(formal.Sequence, [ ([formal.String()], {'required': True}, None, formal.FieldValidationError), ([formal.String()], {'required': True}, [], formal.FieldValidationError), ]) def test_file(self): pass test_file.skip = "write tests"<|fim▁end|>
([], {'missing': 'bar'}, 'foo', 'foo'), ([], {'missing': 'bar'}, '', 'bar'), ([], {'strip': True, 'missing': ''}, ' ', ''), ])
<|file_name|>test_item.py<|end_file_name|><|fim▁begin|># Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors # License: GNU General Public License v3. See license.txt from __future__ import unicode_literals import unittest import frappe from frappe.test_runner import make_test_objects from erpnext.controllers.item_variant import (create_variant, ItemVariantExistsError, InvalidItemAttributeValueError, get_variant) from erpnext.stock.doctype.item.item import StockExistsForTemplate from frappe.model.rename_doc import rename_doc from erpnext.stock.doctype.stock_entry.stock_entry_utils import make_stock_entry from erpnext.stock.get_item_details import get_item_details test_ignore = ["BOM"] test_dependencies = ["Warehouse"] def make_item(item_code, properties=None): if frappe.db.exists("Item", item_code): return frappe.get_doc("Item", item_code) item = frappe.get_doc({ "doctype": "Item", "item_code": item_code, "item_name": item_code, "description": item_code, "item_group": "Products" }) if properties: item.update(properties) if item.is_stock_item and not item.default_warehouse: item.default_warehouse = "_Test Warehouse - _TC" item.insert() return item class TestItem(unittest.TestCase): def setUp(self): frappe.flags.attribute_values = None def get_item(self, idx): item_code = test_records[idx].get("item_code") if not frappe.db.exists("Item", item_code): item = frappe.copy_doc(test_records[idx]) item.insert() else: item = frappe.get_doc("Item", item_code) return item def test_get_item_details(self): # delete modified item price record and make as per test_records frappe.db.sql("""delete from `tabItem Price`""") to_check = { "item_code": "_Test Item", "item_name": "_Test Item", "description": "_Test Item 1", "warehouse": "_Test Warehouse - _TC", "income_account": "Sales - _TC", "expense_account": "_Test Account Cost for Goods Sold - _TC", "cost_center": "_Test Cost Center 2 - _TC", "qty": 1.0, "price_list_rate": 100.0, "base_price_list_rate": 0.0, "discount_percentage": 0.0, "rate": 0.0, "base_rate": 0.0, "amount": 0.0, "base_amount": 0.0, "batch_no": None,<|fim▁hole|> "conversion_factor": 1.0, } make_test_objects("Item Price") print(frappe.get_all("Item Price")) details = get_item_details({ "item_code": "_Test Item", "company": "_Test Company", "price_list": "_Test Price List", "currency": "_Test Currency", "doctype": "Sales Order", "conversion_rate": 1, "price_list_currency": "_Test Currency", "plc_conversion_rate": 1, "order_type": "Sales", "customer": "_Test Customer", "conversion_factor": 1, "price_list_uom_dependant": 1, "ignore_pricing_rule": 1 }) for key, value in to_check.iteritems(): self.assertEquals(value, details.get(key)) def test_item_attribute_change_after_variant(self): frappe.delete_doc_if_exists("Item", "_Test Variant Item-L", force=1) variant = create_variant("_Test Variant Item", {"Test Size": "Large"}) variant.save() attribute = frappe.get_doc('Item Attribute', 'Test Size') attribute.item_attribute_values = [] # reset flags frappe.flags.attribute_values = None self.assertRaises(InvalidItemAttributeValueError, attribute.save) frappe.db.rollback() def test_make_item_variant(self): frappe.delete_doc_if_exists("Item", "_Test Variant Item-L", force=1) variant = create_variant("_Test Variant Item", {"Test Size": "Large"}) variant.save() # doing it again should raise error variant = create_variant("_Test Variant Item", {"Test Size": "Large"}) variant.item_code = "_Test Variant Item-L-duplicate" self.assertRaises(ItemVariantExistsError, variant.save) def test_copy_fields_from_template_to_variants(self): frappe.delete_doc_if_exists("Item", "_Test Variant Item-XL", force=1) fields = [{'field_name': 'item_group'}, {'field_name': 'is_stock_item'}] allow_fields = [d.get('field_name') for d in fields] set_item_variant_settings(fields) if not frappe.db.get_value('Item Attribute Value', {'parent': 'Test Size', 'attribute_value': 'Extra Large'}, 'name'): item_attribute = frappe.get_doc('Item Attribute', 'Test Size') item_attribute.append('item_attribute_values', { 'attribute_value' : 'Extra Large', 'abbr': 'XL' }) item_attribute.save() variant = create_variant("_Test Variant Item", {"Test Size": "Extra Large"}) variant.item_code = "_Test Variant Item-XL" variant.item_name = "_Test Variant Item-XL" variant.save() template = frappe.get_doc('Item', '_Test Variant Item') template.item_group = "_Test Item Group D" template.save() variant = frappe.get_doc('Item', '_Test Variant Item-XL') for fieldname in allow_fields: self.assertEquals(template.get(fieldname), variant.get(fieldname)) template = frappe.get_doc('Item', '_Test Variant Item') template.item_group = "_Test Item Group Desktops" template.save() def test_make_item_variant_with_numeric_values(self): # cleanup for d in frappe.db.get_all('Item', filters={'variant_of': '_Test Numeric Template Item'}): frappe.delete_doc_if_exists("Item", d.name) frappe.delete_doc_if_exists("Item", "_Test Numeric Template Item") frappe.delete_doc_if_exists("Item Attribute", "Test Item Length") frappe.db.sql('''delete from `tabItem Variant Attribute` where attribute="Test Item Length"''') frappe.flags.attribute_values = None # make item attribute frappe.get_doc({ "doctype": "Item Attribute", "attribute_name": "Test Item Length", "numeric_values": 1, "from_range": 0.0, "to_range": 100.0, "increment": 0.5 }).insert() # make template item make_item("_Test Numeric Template Item", { "attributes": [ { "attribute": "Test Size" }, { "attribute": "Test Item Length", "numeric_values": 1, "from_range": 0.0, "to_range": 100.0, "increment": 0.5 } ], "default_warehouse": "_Test Warehouse - _TC", "has_variants": 1 }) variant = create_variant("_Test Numeric Template Item", {"Test Size": "Large", "Test Item Length": 1.1}) self.assertEquals(variant.item_code, "_Test Numeric Template Item-L-1.1") variant.item_code = "_Test Numeric Variant-L-1.1" variant.item_name = "_Test Numeric Variant Large 1.1m" self.assertRaises(InvalidItemAttributeValueError, variant.save) variant = create_variant("_Test Numeric Template Item", {"Test Size": "Large", "Test Item Length": 1.5}) self.assertEquals(variant.item_code, "_Test Numeric Template Item-L-1.5") variant.item_code = "_Test Numeric Variant-L-1.5" variant.item_name = "_Test Numeric Variant Large 1.5m" variant.save() def test_item_merging(self): create_item("Test Item for Merging 1") create_item("Test Item for Merging 2") make_stock_entry(item_code="Test Item for Merging 1", target="_Test Warehouse - _TC", qty=1, rate=100) make_stock_entry(item_code="Test Item for Merging 2", target="_Test Warehouse 1 - _TC", qty=1, rate=100) rename_doc("Item", "Test Item for Merging 1", "Test Item for Merging 2", merge=True) self.assertFalse(frappe.db.exists("Item", "Test Item for Merging 1")) self.assertTrue(frappe.db.get_value("Bin", {"item_code": "Test Item for Merging 2", "warehouse": "_Test Warehouse - _TC"})) self.assertTrue(frappe.db.get_value("Bin", {"item_code": "Test Item for Merging 2", "warehouse": "_Test Warehouse 1 - _TC"})) def test_item_variant_by_manufacturer(self): fields = [{'field_name': 'description'}, {'field_name': 'variant_based_on'}] set_item_variant_settings(fields) if frappe.db.exists('Item', '_Test Variant Mfg'): frappe.delete_doc('Item', '_Test Variant Mfg') if frappe.db.exists('Item', '_Test Variant Mfg-1'): frappe.delete_doc('Item', '_Test Variant Mfg-1') if frappe.db.exists('Manufacturer', 'MSG1'): frappe.delete_doc('Manufacturer', 'MSG1') template = frappe.get_doc(dict( doctype='Item', item_code='_Test Variant Mfg', has_variant=1, item_group='Products', variant_based_on='Manufacturer' )).insert() manufacturer = frappe.get_doc(dict( doctype='Manufacturer', short_name='MSG1' )).insert() variant = get_variant(template.name, manufacturer=manufacturer.name) self.assertEquals(variant.item_code, '_Test Variant Mfg-1') self.assertEquals(variant.description, '_Test Variant Mfg') self.assertEquals(variant.manufacturer, 'MSG1') variant.insert() variant = get_variant(template.name, manufacturer=manufacturer.name, manufacturer_part_no='007') self.assertEquals(variant.item_code, '_Test Variant Mfg-2') self.assertEquals(variant.description, '_Test Variant Mfg') self.assertEquals(variant.manufacturer, 'MSG1') self.assertEquals(variant.manufacturer_part_no, '007') def test_stock_exists_against_template_item(self): stock_item = frappe.get_all('Stock Ledger Entry', fields = ["item_code"], limit=1) if stock_item: item_code = stock_item[0].item_code item_doc = frappe.get_doc('Item', item_code) item_doc.has_variants = 1 self.assertRaises(StockExistsForTemplate, item_doc.save) def set_item_variant_settings(fields): doc = frappe.get_doc('Item Variant Settings') doc.set('fields', fields) doc.save() def make_item_variant(): if not frappe.db.exists("Item", "_Test Variant Item-S"): variant = create_variant("_Test Variant Item", """{"Test Size": "Small"}""") variant.item_code = "_Test Variant Item-S" variant.item_name = "_Test Variant Item-S" variant.save() def get_total_projected_qty(item): total_qty = frappe.db.sql(""" select sum(projected_qty) as projected_qty from tabBin where item_code = %(item)s""", {'item': item}, as_dict=1) return total_qty[0].projected_qty if total_qty else 0.0 test_records = frappe.get_test_records('Item') def create_item(item_code, is_stock_item=None): if not frappe.db.exists("Item", item_code): item = frappe.new_doc("Item") item.item_code = item_code item.item_name = item_code item.description = item_code item.item_group = "All Item Groups" item.is_stock_item = is_stock_item or 1 item.save()<|fim▁end|>
"item_tax_rate": '{}', "uom": "_Test UOM",
<|file_name|>generatemedia.py<|end_file_name|><|fim▁begin|>from ...api import generate_media, prepare_media from django.core.management.base import BaseCommand class Command(BaseCommand): help = 'Combines and compresses your media files and saves them in _generated_media.' <|fim▁hole|> generate_media()<|fim▁end|>
requires_model_validation = False def handle(self, *args, **options): prepare_media()
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>// put actual stuff here pub use self::cachedefs::{AccessType, CacheStats, CacheBlock, Cache, VictimCache}; pub mod cachedefs; #[derive(Debug)] pub struct CacheSystem { pub stats: CacheStats, pub l1: Cache, pub vc: VictimCache, pub l2: Cache, } impl CacheSystem { pub fn new(c1: u64, b1: u64, s1: u64, v: u64, c2: u64, b2: u64, s2: u64) -> Self { CacheSystem { stats: CacheStats::new(), l1: Cache::new(c1, b1, s1), vc: VictimCache::new(b1, v), l2: Cache::new(c2, b2, s2), } } pub fn cache_access(&mut self, mode: AccessType, address: u64) { //println!("Trying to {:?} at address {:#X}", mode, address); let in_l1 = self.search_l1(mode, address); if !in_l1 { print!("M1"); let in_vc = self.search_and_modify_vc(address); if !in_vc { if self.vc.v > 0 { print!("MV"); } let in_l2 = self.search_l2(mode, address); } else { println!("HV**"); } self.move_to_l1(mode, address); } else { if self.vc.v > 0 { println!("H1****"); } else { println!("H1**"); } } } pub fn complete_cache(&mut self) { println!("Completing the cache!"); } pub fn print_statistics(&self) { println!("Cache Statistics"); println!("Accesses: {}", self.stats.accesses); println!("Accesses to L2: {}", self.stats.accesses_l2); println!("Accesses to VC: {}", self.stats.accesses_vc); println!("Reads: {}", self.stats.reads); println!("Read misses to L1: {}", self.stats.read_misses_l1); println!("Read misses to L2: {}", self.stats.read_misses_l2); println!("Writes: {}", self.stats.writes); println!("Write misses to L1: {}", self.stats.write_misses_l1); println!("Write misses to L2: {}", self.stats.write_misses_l2); println!("Write backs from L1: {}", self.stats.write_back_l1); println!("Write backs from L2: {}", self.stats.write_back_l2); println!("L1 victims hit in victim hits: {}", self.stats.victim_hits); println!("Average access time: {}", self.stats.avg_access_time_l1); } fn search_l1(&mut self, mode: AccessType, address: u64) -> bool { self.stats.accesses += 1; let mut found = false; let mut hot_block_index = -1i64; let index = (address >> self.l1.b) & ((1u64 << self.l1.indexbits) - 1); let tag = address >> (self.l1.b + self.l1.indexbits); let selected_set = self.l1.sets.get_mut(index as usize); if let Some(set) = selected_set { for (count, block) in set.into_iter().enumerate() { if tag == block.tag { hot_block_index = count as i64; found = true; break; } } if found { let mut hot_block = set.remove(hot_block_index as usize).unwrap(); if let AccessType::Write = mode { hot_block.dirty = true; } set.push_back(hot_block); } } else { panic!("Address index out of bounds in L1! Panic!") } if let AccessType::Read = mode { self.stats.reads += 1; if !found { self.stats.read_misses_l1 += 1; } } else { self.stats.writes += 1; if !found { self.stats.write_misses_l1 += 1; } } found } fn move_to_l1(&mut self, mode: AccessType, address: u64) { let index = (address >> self.l1.b) & ((1u64 << self.l1.indexbits) - 1); let tag = address >> (self.l1.b + self.l1.indexbits); let block_in = CacheBlock { address: address, tag: tag, dirty: match mode { AccessType::Read => { false }, AccessType::Write => { true }, }, }; let selected_set = self.l1.sets.get_mut(index as usize); if let Some(set) = selected_set { if set.len() >= self.l1.max_blocks_per_set as usize { let mut evicted_block = set.pop_front().unwrap(); if evicted_block.dirty { self.stats.write_back_l1 += 1; /*let in_l2 = self.search_l2(AccessType::Write, evicted_block.address); if !in_l2 { self.move_to_l2(AccessType::Write, evicted_block.address); }*/ } evicted_block.dirty = false; if self.vc.v > 0 { if self.vc.set.len() == self.vc.v as usize { self.vc.set.pop_front(); } evicted_block.tag = evicted_block.address >> self.vc.b; self.vc.set.push_back(evicted_block); } } set.push_back(block_in); } else { panic!("Address index out of bounds in L1! Panic!") } } fn search_and_modify_vc(&mut self, address: u64) -> bool { if self.vc.v == 0 { return false; } self.stats.accesses_vc += 1; let mut found = false; let mut hot_block_index = -1i64; let tag = address >> self.vc.b; let ref mut set = self.vc.set; for (count, block) in set.into_iter().enumerate() { if block.tag == tag { found = true; hot_block_index = count as i64;<|fim▁hole|> } if found { set.remove(hot_block_index as usize).unwrap(); self.stats.victim_hits += 1; } found } fn search_l2(&mut self, mode: AccessType, address: u64) -> bool { self.stats.accesses_l2 += 1; let mut found = false; let mut hot_block_index = -1i64; let index = (address >> self.l2.b) & ((1u64 << self.l2.indexbits) - 1); let tag = address >> (self.l2.b + self.l2.indexbits); let selected_set = self.l2.sets.get_mut(index as usize); if let Some(set) = selected_set { for (count, block) in set.into_iter().enumerate() { if tag == block.tag { hot_block_index = count as i64; found = true; break; } } if found { let mut hot_block = set.remove(hot_block_index as usize).unwrap(); if let AccessType::Write = mode { hot_block.dirty = true; } set.push_back(hot_block); } } else { panic!("Address index out of bounds in L2! Panic!"); } if !found { if let AccessType::Read = mode { self.stats.read_misses_l2 += 1; } else { self.stats.write_misses_l2 += 1; } } found } fn move_to_l2(&mut self, mode: AccessType, address: u64) { // Todo tomorrow morning :3 } }<|fim▁end|>
break; }
<|file_name|>pipe.spec.ts<|end_file_name|><|fim▁begin|>//import {it, describe, expect, beforeEach, inject} from '@angular/testing'; import {DurationFormat} from './duration.pipe'; describe('MyPipe Tests', () => { let pipe:DurationFormat; beforeEach(() => { pipe = new DurationFormat(); }); it('Should capitalize all words in a string', () => { var result = pipe.transform('0', null); expect(result).toEqual('0 ч. 0 мин.'); }); }); /* describe('DataService',() =>{ it('should return list', () => { let cs = new DurationFormat(); <|fim▁hole|>*/<|fim▁end|>
} ) });
<|file_name|>y.go<|end_file_name|><|fim▁begin|>/* * Copyright 2017 Dgraph Labs, Inc. and Contributors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package y import ( "bytes" "encoding/binary" "hash/crc32" "math" "os" "sync" "gx/ipfs/QmVmDhyTTUcQXFD1rRQ64fGLMSAoaQvNH3hwuaCFAPq2hy/errors" ) // ErrEOF indicates an end of file when trying to read from a memory mapped file // and encountering the end of slice. var ErrEOF = errors.New("End of mapped region") var ( // This is O_DSYNC (datasync) on platforms that support it -- see file_unix.go datasyncFileFlag = 0x0 // CastagnoliCrcTable is a CRC32 polynomial table CastagnoliCrcTable = crc32.MakeTable(crc32.Castagnoli) ) // OpenExistingSyncedFile opens an existing file, errors if it doesn't exist. func OpenExistingSyncedFile(filename string, sync bool) (*os.File, error) { flags := os.O_RDWR if sync { flags |= datasyncFileFlag } return os.OpenFile(filename, flags, 0) } // CreateSyncedFile creates a new file (using O_EXCL), errors if it already existed. func CreateSyncedFile(filename string, sync bool) (*os.File, error) { flags := os.O_RDWR | os.O_CREATE | os.O_EXCL if sync { flags |= datasyncFileFlag } return os.OpenFile(filename, flags, 0666) } // OpenSyncedFile creates the file if one doesn't exist. func OpenSyncedFile(filename string, sync bool) (*os.File, error) { flags := os.O_RDWR | os.O_CREATE if sync { flags |= datasyncFileFlag } return os.OpenFile(filename, flags, 0666) } // OpenTruncFile opens the file with O_RDWR | O_CREATE | O_TRUNC func OpenTruncFile(filename string, sync bool) (*os.File, error) { flags := os.O_RDWR | os.O_CREATE | os.O_TRUNC if sync { flags |= datasyncFileFlag } return os.OpenFile(filename, flags, 0666) } // SafeCopy does append(a[:0], src...). func SafeCopy(a []byte, src []byte) []byte { return append(a[:0], src...) } // Copy copies a byte slice and returns the copied slice. func Copy(a []byte) []byte { b := make([]byte, len(a)) copy(b, a) return b } // KeyWithTs generates a new key by appending ts to key. func KeyWithTs(key []byte, ts uint64) []byte { out := make([]byte, len(key)+8) copy(out, key) binary.BigEndian.PutUint64(out[len(key):], math.MaxUint64-ts) return out } // ParseTs parses the timestamp from the key bytes. func ParseTs(key []byte) uint64 { if len(key) <= 8 { return 0 } return math.MaxUint64 - binary.BigEndian.Uint64(key[len(key)-8:]) } // CompareKeys checks the key without timestamp and checks the timestamp if keyNoTs // is same. // a<timestamp> would be sorted higher than aa<timestamp> if we use bytes.compare // All keys should have timestamp. func CompareKeys(key1 []byte, key2 []byte) int { AssertTrue(len(key1) > 8 && len(key2) > 8) if cmp := bytes.Compare(key1[:len(key1)-8], key2[:len(key2)-8]); cmp != 0 { return cmp } return bytes.Compare(key1[len(key1)-8:], key2[len(key2)-8:]) } // ParseKey parses the actual key from the key bytes. func ParseKey(key []byte) []byte { if key == nil { return nil } AssertTruef(len(key) > 8, "key=%q", key) return key[:len(key)-8] } // SameKey checks for key equality ignoring the version timestamp suffix. func SameKey(src, dst []byte) bool { if len(src) != len(dst) { return false } return bytes.Equal(ParseKey(src), ParseKey(dst)) } // Slice holds a reusable buf, will reallocate if you request a larger size than ever before.<|fim▁hole|> buf []byte } // Resize reuses the Slice's buffer (or makes a new one) and returns a slice in that buffer of // length sz. func (s *Slice) Resize(sz int) []byte { if cap(s.buf) < sz { s.buf = make([]byte, sz) } return s.buf[0:sz] } // Closer holds the two things we need to close a goroutine and wait for it to finish: a chan // to tell the goroutine to shut down, and a WaitGroup with which to wait for it to finish shutting // down. type Closer struct { closed chan struct{} waiting sync.WaitGroup } // NewCloser constructs a new Closer, with an initial count on the WaitGroup. func NewCloser(initial int) *Closer { ret := &Closer{closed: make(chan struct{})} ret.waiting.Add(initial) return ret } // AddRunning Add()'s delta to the WaitGroup. func (lc *Closer) AddRunning(delta int) { lc.waiting.Add(delta) } // Signal signals the HasBeenClosed signal. func (lc *Closer) Signal() { close(lc.closed) } // HasBeenClosed gets signaled when Signal() is called. func (lc *Closer) HasBeenClosed() <-chan struct{} { return lc.closed } // Done calls Done() on the WaitGroup. func (lc *Closer) Done() { lc.waiting.Done() } // Wait waits on the WaitGroup. (It waits for NewCloser's initial value, AddRunning, and Done // calls to balance out.) func (lc *Closer) Wait() { lc.waiting.Wait() } // SignalAndWait calls Signal(), then Wait(). func (lc *Closer) SignalAndWait() { lc.Signal() lc.Wait() }<|fim▁end|>
// One problem is with n distinct sizes in random order it'll reallocate log(n) times. type Slice struct {
<|file_name|>population.py<|end_file_name|><|fim▁begin|>"""Provide infrastructure to allow exploration of variations within populations. Uses the gemini framework (https://github.com/arq5x/gemini) to build SQLite database of variations for query and evaluation. """ import collections import csv from distutils.version import LooseVersion import os import subprocess import toolz as tz from bcbio import install, utils from bcbio.distributed.transaction import file_transaction from bcbio.pipeline import config_utils from bcbio.pipeline import datadict as dd from bcbio.provenance import do, programs from bcbio.variation import multiallelic, vcfutils def prep_gemini_db(fnames, call_info, samples, extras): """Prepare a gemini database from VCF inputs prepared with snpEff. """ data = samples[0] out_dir = utils.safe_makedir(os.path.join(data["dirs"]["work"], "gemini")) name, caller, is_batch = call_info gemini_db = os.path.join(out_dir, "%s-%s.db" % (name, caller)) multisample_vcf = get_multisample_vcf(fnames, name, caller, data) gemini_vcf = multiallelic.to_single(multisample_vcf, data) use_gemini_quick = (do_db_build(samples) and any(vcfutils.vcf_has_variants(f) for f in fnames)) if not utils.file_exists(gemini_db) and use_gemini_quick: use_gemini = do_db_build(samples) and any(vcfutils.vcf_has_variants(f) for f in fnames) if use_gemini: ped_file = create_ped_file(samples + extras, gemini_vcf) gemini_db = create_gemini_db(gemini_vcf, data, gemini_db, ped_file) return [[(name, caller), {"db": gemini_db if utils.file_exists(gemini_db) else None, "vcf": multisample_vcf if is_batch else None}]] def create_gemini_db(gemini_vcf, data, gemini_db=None, ped_file=None): if not gemini_db: gemini_db = "%s.db" % utils.splitext_plus(gemini_vcf)[0] if not utils.file_exists(gemini_db): if not vcfutils.vcf_has_variants(gemini_vcf): return None with file_transaction(data, gemini_db) as tx_gemini_db: gemini = config_utils.get_program("gemini", data["config"]) if "program_versions" in data["config"].get("resources", {}):<|fim▁hole|> gemini_ver = programs.get_version("gemini", config=data["config"]) else: gemini_ver = None # Recent versions of gemini allow loading only passing variants load_opts = "" if not gemini_ver or LooseVersion(gemini_ver) > LooseVersion("0.6.2.1"): load_opts += " --passonly" # For small test files, skip gene table loading which takes a long time if gemini_ver and LooseVersion(gemini_ver) > LooseVersion("0.6.4"): if _is_small_vcf(gemini_vcf): load_opts += " --skip-gene-tables" if "/test_automated_output/" in gemini_vcf: load_opts += " --test-mode" # Skip CADD or gerp-bp if neither are loaded if gemini_ver and LooseVersion(gemini_ver) >= LooseVersion("0.7.0"): gemini_dir = install.get_gemini_dir(data) for skip_cmd, check_file in [("--skip-cadd", "whole_genome_SNVs.tsv.compressed.gz")]: if not os.path.exists(os.path.join(gemini_dir, check_file)): load_opts += " %s" % skip_cmd # skip gerp-bp which slows down loading load_opts += " --skip-gerp-bp " num_cores = data["config"]["algorithm"].get("num_cores", 1) tmpdir = os.path.dirname(tx_gemini_db) eanns = _get_effects_flag(data) # Apply custom resource specifications, allowing use of alternative annotation_dir resources = config_utils.get_resources("gemini", data["config"]) gemini_opts = " ".join([str(x) for x in resources["options"]]) if resources.get("options") else "" cmd = ("{gemini} {gemini_opts} load {load_opts} -v {gemini_vcf} {eanns} --cores {num_cores} " "--tempdir {tmpdir} {tx_gemini_db}") cmd = cmd.format(**locals()) do.run(cmd, "Create gemini database for %s" % gemini_vcf, data) if ped_file: cmd = [gemini, "amend", "--sample", ped_file, tx_gemini_db] do.run(cmd, "Add PED file to gemini database", data) return gemini_db def _get_effects_flag(data): effects_config = tz.get_in(("config", "algorithm", "effects"), data, "snpeff") if effects_config == "snpeff": return "-t snpEff" elif effects_config == "vep": return "-t VEP" else: return "" def get_affected_status(data): """Retrieve the affected/unaffected status of sample. Uses unaffected (1), affected (2), unknown (0) coding from PED files: http://pngu.mgh.harvard.edu/~purcell/plink/data.shtml#ped """ affected = set(["tumor", "affected"]) unaffected = set(["normal", "unaffected"]) phenotype = str(tz.get_in(["metadata", "phenotype"], data, "")).lower() if phenotype in affected: return 2 elif phenotype in unaffected: return 1 else: return 0 def create_ped_file(samples, base_vcf): """Create a GEMINI-compatible PED file, including gender, family and phenotype information. Checks for a specified `ped` file in metadata, and will use sample information from this file before reconstituting from metadata information. """ def _code_gender(data): g = dd.get_gender(data) if g and str(g).lower() in ["male", "m"]: return 1 elif g and str(g).lower() in ["female", "f"]: return 2 else: return 0 out_file = "%s.ped" % utils.splitext_plus(base_vcf)[0] sample_ped_lines = {} header = ["#Family_ID", "Individual_ID", "Paternal_ID", "Maternal_ID", "Sex", "Phenotype", "Ethnicity"] for md_ped in list(set([x for x in [tz.get_in(["metadata", "ped"], data) for data in samples] if x is not None])): with open(md_ped) as in_handle: reader = csv.reader(in_handle, dialect="excel-tab") for parts in reader: if parts[0].startswith("#") and len(parts) > len(header): header = header + parts[len(header):] else: sample_ped_lines[parts[1]] = parts if not utils.file_exists(out_file): with file_transaction(samples[0], out_file) as tx_out_file: with open(tx_out_file, "w") as out_handle: writer = csv.writer(out_handle, dialect="excel-tab") writer.writerow(header) batch = _find_shared_batch(samples) for data in samples: sname = dd.get_sample_name(data) if sname in sample_ped_lines: writer.writerow(sample_ped_lines[sname]) else: writer.writerow([batch, sname, "-9", "-9", _code_gender(data), get_affected_status(data), "-9"]) return out_file def _find_shared_batch(samples): for data in samples: batch = tz.get_in(["metadata", "batch"], data, dd.get_sample_name(data)) if not isinstance(batch, (list, tuple)): return batch def _is_small_vcf(vcf_file): """Check for small VCFs which we want to analyze quicker. """ count = 0 small_thresh = 250 with utils.open_gzipsafe(vcf_file) as in_handle: for line in in_handle: if not line.startswith("#"): count += 1 if count > small_thresh: return False return True def get_multisample_vcf(fnames, name, caller, data): """Retrieve a multiple sample VCF file in a standard location. Handles inputs with multiple repeated input files from batches. """ unique_fnames = [] for f in fnames: if f not in unique_fnames: unique_fnames.append(f) out_dir = utils.safe_makedir(os.path.join(data["dirs"]["work"], "gemini")) if len(unique_fnames) > 1: gemini_vcf = os.path.join(out_dir, "%s-%s.vcf.gz" % (name, caller)) vrn_file_batch = None for variant in data["variants"]: if variant["variantcaller"] == caller and variant.get("vrn_file_batch"): vrn_file_batch = variant["vrn_file_batch"] if vrn_file_batch: utils.symlink_plus(vrn_file_batch, gemini_vcf) return gemini_vcf else: return vcfutils.merge_variant_files(unique_fnames, gemini_vcf, data["sam_ref"], data["config"]) else: gemini_vcf = os.path.join(out_dir, "%s-%s%s" % (name, caller, utils.splitext_plus(unique_fnames[0])[1])) utils.symlink_plus(unique_fnames[0], gemini_vcf) return gemini_vcf def _has_gemini(data): from bcbio import install gemini_dir = install.get_gemini_dir(data) return ((os.path.exists(gemini_dir) and len(os.listdir(gemini_dir)) > 0) and os.path.exists(os.path.join(os.path.dirname(gemini_dir), "gemini-config.yaml"))) def do_db_build(samples, need_bam=True, gresources=None): """Confirm we should build a gemini database: need gemini + human samples + not in tool_skip. """ genomes = set() for data in samples: if not need_bam or data.get("align_bam"): genomes.add(data["genome_build"]) if "gemini" in utils.get_in(data, ("config", "algorithm", "tools_off"), []): return False if len(genomes) == 1: if not gresources: gresources = samples[0]["genome_resources"] return (tz.get_in(["aliases", "human"], gresources, False) and _has_gemini(samples[0])) else: return False def get_gemini_files(data): """Enumerate available gemini data files in a standard installation. """ try: from gemini import annotations, config except ImportError: return {} return {"base": config.read_gemini_config()["annotation_dir"], "files": annotations.get_anno_files().values()} def _group_by_batches(samples, check_fn): """Group data items into batches, providing details to retrieve results. """ batch_groups = collections.defaultdict(list) singles = [] out_retrieve = [] extras = [] for data in [x[0] for x in samples]: if check_fn(data): batch = tz.get_in(["metadata", "batch"], data) name = str(data["name"][-1]) if batch: out_retrieve.append((str(batch), data)) else: out_retrieve.append((name, data)) for vrn in data["variants"]: if vrn.get("population", True): if batch: batch_groups[(str(batch), vrn["variantcaller"])].append((vrn["vrn_file"], data)) else: singles.append((name, vrn["variantcaller"], data, vrn["vrn_file"])) else: extras.append(data) return batch_groups, singles, out_retrieve, extras def _has_variant_calls(data): if data.get("align_bam"): for vrn in data["variants"]: if vrn.get("vrn_file") and vcfutils.vcf_has_variants(vrn["vrn_file"]): return True return False def prep_db_parallel(samples, parallel_fn): """Prepares gemini databases in parallel, handling jointly called populations. """ batch_groups, singles, out_retrieve, extras = _group_by_batches(samples, _has_variant_calls) to_process = [] has_batches = False for (name, caller), info in batch_groups.iteritems(): fnames = [x[0] for x in info] to_process.append([fnames, (str(name), caller, True), [x[1] for x in info], extras]) has_batches = True for name, caller, data, fname in singles: to_process.append([[fname], (str(name), caller, False), [data], extras]) if len(samples) > 0 and not do_db_build([x[0] for x in samples]) and not has_batches: return samples output = parallel_fn("prep_gemini_db", to_process) out_fetch = {} for batch_id, out_file in output: out_fetch[tuple(batch_id)] = out_file out = [] for batch_name, data in out_retrieve: out_variants = [] for vrn in data["variants"]: use_population = vrn.pop("population", True) if use_population: vrn["population"] = out_fetch[(batch_name, vrn["variantcaller"])] out_variants.append(vrn) data["variants"] = out_variants out.append([data]) for x in extras: out.append([x]) return out<|fim▁end|>
<|file_name|>scope_wrapper.rs<|end_file_name|><|fim▁begin|>use std::default::Default; use communication::Communicator; use progress::frontier::{MutableAntichain, Antichain}; use progress::{Timestamp, Scope}; use progress::nested::Target; use progress::nested::subgraph::Target::{GraphOutput, ScopeInput}; use progress::count_map::CountMap; pub struct ScopeWrapper<T: Timestamp> { pub name: String, pub scope: Option<Box<Scope<T>>>, // the scope itself index: u64, pub inputs: u64, // cached information about inputs pub outputs: u64, // cached information about outputs pub edges: Vec<Vec<Target>>, pub notify: bool, pub summary: Vec<Vec<Antichain<T::Summary>>>, // internal path summaries (input x output) pub guarantees: Vec<MutableAntichain<T>>, // per-input: guarantee made by parent scope in inputs pub capabilities: Vec<MutableAntichain<T>>, // per-output: capabilities retained by scope on outputs pub outstanding_messages: Vec<MutableAntichain<T>>, // per-input: counts of messages on each input internal_progress: Vec<CountMap<T>>, // per-output: temp buffer used to ask about internal progress consumed_messages: Vec<CountMap<T>>, // per-input: temp buffer used to ask about consumed messages produced_messages: Vec<CountMap<T>>, // per-output: temp buffer used to ask about produced messages pub guarantee_changes: Vec<CountMap<T>>, // per-input: temp storage for changes in some guarantee... } impl<T: Timestamp> ScopeWrapper<T> { pub fn new(mut scope: Box<Scope<T>>, index: u64, _path: String) -> ScopeWrapper<T> { let inputs = scope.inputs(); let outputs = scope.outputs(); let notify = scope.notify_me(); let (summary, work) = scope.get_internal_summary(); assert!(summary.len() as u64 == inputs); assert!(!summary.iter().any(|x| x.len() as u64 != outputs)); let mut result = ScopeWrapper { name: format!("{}[{}]", scope.name(), index), scope: Some(scope), index: index, inputs: inputs, outputs: outputs, edges: vec![Default::default(); outputs as usize], notify: notify, summary: summary, guarantees: vec![Default::default(); inputs as usize], capabilities: vec![Default::default(); outputs as usize], outstanding_messages: vec![Default::default(); inputs as usize], internal_progress: vec![CountMap::new(); outputs as usize], consumed_messages: vec![CountMap::new(); inputs as usize], produced_messages: vec![CountMap::new(); outputs as usize], guarantee_changes: vec![CountMap::new(); inputs as usize], }; // TODO : Gross. Fix. for (index, capability) in result.capabilities.iter_mut().enumerate() { capability.update_iter_and(work[index].elements().iter().map(|x|x.clone()), |_, _| {}); } return result; } pub fn set_external_summary(&mut self, summaries: Vec<Vec<Antichain<T::Summary>>>, frontier: &mut [CountMap<T>]) { self.scope.as_mut().map(|scope| scope.set_external_summary(summaries, frontier)); } pub fn push_pointstamps(&mut self, external_progress: &[CountMap<T>]) { assert!(self.scope.is_some() || external_progress.iter().all(|x| x.len() == 0)); if self.notify && external_progress.iter().any(|x| x.len() > 0) { for input_port in (0..self.inputs as usize) { self.guarantees[input_port] .update_into_cm(&external_progress[input_port], &mut self.guarantee_changes[input_port]); } // push any changes to the frontier to the subgraph. if self.guarantee_changes.iter().any(|x| x.len() > 0) { let changes = &mut self.guarantee_changes; self.scope.as_mut().map(|scope| scope.push_external_progress(changes)); // TODO : Shouldn't be necessary // for change in self.guarantee_changes.iter_mut() { change.clear(); } debug_assert!(!changes.iter().any(|x| x.len() > 0)); } } } pub fn pull_pointstamps<A: FnMut(u64,T,i64)->()>(&mut self, pointstamp_messages: &mut CountMap<(u64, u64, T)>, pointstamp_internal: &mut CountMap<(u64, u64, T)>, mut output_action: A) -> bool { let active = { if let &mut Some(ref mut scope) = &mut self.scope { scope.pull_internal_progress(&mut self.internal_progress, &mut self.consumed_messages, &mut self.produced_messages) } else { false } }; // shutting down if nothing left to do if self.scope.is_some() && !active && self.notify && // we don't track guarantees and capabilities for non-notify scopes. bug? self.guarantees.iter().all(|guarantee| guarantee.empty()) && self.capabilities.iter().all(|capability| capability.empty()) { // println!("Shutting down {}", self.name); self.scope = None;<|fim▁hole|> for output in (0..self.outputs as usize) { while let Some((time, delta)) = self.produced_messages[output].pop() { for &target in self.edges[output].iter() { match target { ScopeInput(tgt, tgt_in) => { pointstamp_messages.update(&(tgt, tgt_in, time), delta); }, GraphOutput(graph_output) => { output_action(graph_output, time, delta); }, } } } while let Some((time, delta)) = self.internal_progress[output as usize].pop() { pointstamp_internal.update(&(self.index, output as u64, time), delta); } } // for each input: consumed messages for input in (0..self.inputs as usize) { while let Some((time, delta)) = self.consumed_messages[input as usize].pop() { pointstamp_messages.update(&(self.index, input as u64, time), -delta); } } return active; } pub fn add_edge(&mut self, output: u64, target: Target) { self.edges[output as usize].push(target); } pub fn name(&self) -> String { self.name.clone() } }<|fim▁end|>
self.name = format!("{}(tombstone)", self.name); } // for each output: produced messages and internal progress
<|file_name|>pagination-simple.ts<|end_file_name|><|fim▁begin|>import { ensureSafeComponent } from '@embroider/util'; import ModelsTablePaginationSimpleComponent from '../../themes/default/pagination-simple'; import { getBsButton, getBsForm } from '../../../../utils/emt/themes/ebs'; export default class PaginationSimple extends ModelsTablePaginationSimpleComponent { get BsButton(): unknown { return ensureSafeComponent(getBsButton(), this);<|fim▁hole|> get BSForm(): unknown { return ensureSafeComponent(getBsForm(), this); } }<|fim▁end|>
}
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python ''' Created on Jan 6, 2018 @author: consultit ''' from panda3d.core import Filename import sys, os from subprocess import call ### NOTE: currently this script works only on GNU/Linux currdir = os.path.abspath(sys.path[0]) builddir = Filename.from_os_specific(os.path.join(currdir, '/ely/')).get_fullpath() elydir = Filename.fromOsSpecific(os.path.join(currdir, '/ely/')).getFullpath() lpref = '' mpref = '' lsuff = '.so' ### tools = 'libtools' modules = ['ai', 'audio', 'control', 'physics'] if __name__ == '__main__': # cwd os.chdir(currdir + builddir) # build 'tools' libtools = lpref + tools + lsuff print('building "' + libtools + '" ...') toolsdir = '..' + elydir + tools args = ['build.py', '--dir', toolsdir, '--clean'] call(['/usr/bin/python'] + args) #print('installing "' + libtools + '" ...') #args = [libtools, toolsdir] #call(['/usr/bin/install'] + args) # build modules for module in modules: modulelib = mpref + module + lsuff print('building "' + modulelib + '" ...') moduledir = '..' + elydir + module<|fim▁hole|> #print('installing "' + modulelib + '" ...') #args = [modulelib, moduledir] #call(['/usr/bin/install'] + args)<|fim▁end|>
args = ['build.py', '--dir', moduledir, '--libs', libtools, '--libs_src', toolsdir, '--clean'] call(['/usr/bin/python'] + args)
<|file_name|>TutorialTVScenes.py<|end_file_name|><|fim▁begin|># Fuck you Disyer. Stealing my fucking paypal. GET FUCKED: toontown.tutorial.TutorialTVScenes from panda3d.core import Camera from direct.task.Task import Task from otp.avatar import Emote from toontown.television.TVScenes import * from toontown.television.TVEffects import * from toontown.suit.Suit import Suit from toontown.suit.BossCog import BossCog from toontown.suit.SuitDNA import SuitDNA from toontown.toon import NPCToons, TTEmote import random class CEOScene(ThreeDScene): CameraPos = [(0, 203.5, 23.5, 0, 350, 0)] def __init__(self, effects = []): ThreeDScene.__init__(self, 'CEOScene', effects) self.geom = loader.loadModel('phase_12/models/bossbotHQ/BanquetInterior_1') self.geom.reparentTo(self) self.ceo = BossCog() dna = SuitDNA() dna.newBossCog('c') self.ceo.setDNA(dna) self.ceo.reparentTo(self) self.ceo.setPosHpr(0, 236.5, 0, 180, 0, 0) self.ceo.loop('Bb_neutral') def delete(self): if self.geom: self.geom.removeNode() self.geom = None if self.ceo: self.ceo.delete() self.ceo = None ThreeDScene.delete(self) return class HeadHunterScene(ThreeDScene): CameraPos = [(-22, -12.5, 7, 92, -6, 0)] def __init__(self, effects = []): ThreeDScene.__init__(self, 'HeadHunterScene', effects) self.geom = loader.loadModel('phase_12/models/bossbotHQ/BossbotEntranceRoom') self.geom.reparentTo(self) self.cog = Suit() dna = SuitDNA() dna.newSuit('hh') self.cog.setDNA(dna) self.cog.reparentTo(self) self.cog.setPosHpr(-32.5, -12.5, 0.02, 270, 0, 0) self.cog.nametag3d.removeNode() self.cog.nametag.destroy() self.cog.loop('neutral') def delete(self): if self.geom: self.geom.removeNode() self.geom = None if self.cog: self.cog.delete() self.cog = None ThreeDScene.delete(self) return class ScientistScene(ThreeDScene): CameraPos = [(-47.5, 0.5, 3.415, 90, 0, 0)] ToonPos = {2018: (-59, -1.5, 0.02, 270, 0, 0), 2019: (-59, 0.5, 0.02, 270, 0, 0), 2020: (-59, 2.5, 0.02, 270, 0, 0)} RandomEmotes = ['wave', 'angry', 'applause', 'cringe', 'confused', 'slip-forward', 'slip-backward', 'resistance-salute', 'surprise', 'cry', 'furious', 'laugh', 'idea', 'taunt', 'rage'] def __init__(self, effects = []): ThreeDScene.__init__(self, 'ScientistScene', effects) self.geom = loader.loadModel('phase_3.5/models/modules/tt_m_ara_int_toonhall') self.geom.reparentTo(self) self.taskStarted = False self.npcs = [] <|fim▁hole|> npc = NPCToons.createLocalNPC(id) npc.reparentTo(self.geom) npc.setPosHpr(*posHpr) npc.nametag3d.removeNode() npc.nametag.destroy() self.npcs.append(npc) def delete(self): if self.geom: self.geom.removeNode() self.geom = None for npc in self.npcs: taskMgr.remove(npc.uniqueName('randomEmote')) npc.delete() self.npcs = [] self.taskStarted = False ThreeDScene.delete(self) return def startTask(self): if self.taskStarted: return for i, npc in enumerate(self.npcs): taskMgr.doMethodLater(0.25 * i, lambda task, npc = npc: self.doRandomEmote(npc, task), npc.uniqueName('randomEmote')) self.taskStarted = True def stopTask(self): if not self.taskStarted: return for npc in self.npcs: taskMgr.remove(npc.uniqueName('randomEmote')) self.taskStarted = False def doRandomEmote(self, npc, task): Emote.globalEmote.doEmote(npc, TTEmote.Emotes.index(random.choice(self.RandomEmotes)), 0) task.delayTime = npc.emoteTrack.getDuration() + 1.0 return task.again<|fim▁end|>
for id, posHpr in self.ToonPos.iteritems():
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># __init__.py # Copyright (C) 2006, 2007, 2008, 2009, 2010 Michael Bayer [email protected] # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php <|fim▁hole|>__version__ = '0.3.4'<|fim▁end|>
<|file_name|>0003_auto_20170116_1142.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('autodidact', '0002_auto_20161004_1251'), ] operations = [ migrations.CreateModel( name='RightAnswer', fields=[ ('id', models.AutoField(serialize=False, primary_key=True, auto_created=True, verbose_name='ID')), ('value', models.CharField(help_text='This value can either be a case-insensitive string or a numeric value. For numeric values you can use the <a target="_blank" href="https://docs.moodle.org/23/en/GIFT_format">GIFT notation</a> of "answer:tolerance" or "low..high".', max_length=255)), ('step', models.ForeignKey(related_name='right_answers', to='autodidact.Step')), ], options={ }, bases=(models.Model,), ), migrations.CreateModel( name='WrongAnswer', fields=[ ('id', models.AutoField(serialize=False, primary_key=True, auto_created=True, verbose_name='ID')), ('value', models.CharField(help_text='Supplying one or more wrong answers will turn this into a multiple choice question.', max_length=255)), ('step', models.ForeignKey(related_name='wrong_answers', to='autodidact.Step')), ], options={ }, bases=(models.Model,),<|fim▁hole|> name='slug', field=models.SlugField(unique=True), preserve_default=True, ), migrations.AlterField( model_name='step', name='answer_required', field=models.BooleanField(default=False, help_text='If enabled, this step will show students an input field where they can enter their answer. Add one or more right answers below to have students’ answers checked for correctness.'), preserve_default=True, ), ]<|fim▁end|>
), migrations.AlterField( model_name='course',
<|file_name|>apps.py<|end_file_name|><|fim▁begin|>""" Application file for the code snippets app. """ from django.apps import AppConfig from django.utils.translation import ugettext_lazy as _ class SnippetsConfig(AppConfig): """ Application configuration class for the code snippets app. """ name = 'apps.snippets'<|fim▁hole|> verbose_name = _('Code snippets')<|fim▁end|>
<|file_name|>virtualmachinescalesetvms.go<|end_file_name|><|fim▁begin|>package compute // Copyright (c) Microsoft and contributors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // // See the License for the specific language governing permissions and // limitations under the License. // // Code generated by Microsoft (R) AutoRest Code Generator. // Changes may cause incorrect behavior and will be lost if the code is regenerated. import ( "context" "github.com/Azure/go-autorest/autorest" "github.com/Azure/go-autorest/autorest/azure" "net/http" ) // VirtualMachineScaleSetVMsClient is the compute Client type VirtualMachineScaleSetVMsClient struct { BaseClient } // NewVirtualMachineScaleSetVMsClient creates an instance of the VirtualMachineScaleSetVMsClient client. func NewVirtualMachineScaleSetVMsClient(subscriptionID string) VirtualMachineScaleSetVMsClient { return NewVirtualMachineScaleSetVMsClientWithBaseURI(DefaultBaseURI, subscriptionID) } // NewVirtualMachineScaleSetVMsClientWithBaseURI creates an instance of the VirtualMachineScaleSetVMsClient client. func NewVirtualMachineScaleSetVMsClientWithBaseURI(baseURI string, subscriptionID string) VirtualMachineScaleSetVMsClient { return VirtualMachineScaleSetVMsClient{NewWithBaseURI(baseURI, subscriptionID)} } // Deallocate deallocates a specific virtual machine in a VM scale set. Shuts down the virtual machine and releases the // compute resources it uses. You are not billed for the compute resources of this virtual machine once it is // deallocated. // // resourceGroupName is the name of the resource group. VMScaleSetName is the name of the VM scale set. instanceID // is the instance ID of the virtual machine. func (client VirtualMachineScaleSetVMsClient) Deallocate(ctx context.Context, resourceGroupName string, VMScaleSetName string, instanceID string) (result VirtualMachineScaleSetVMsDeallocateFuture, err error) { req, err := client.DeallocatePreparer(ctx, resourceGroupName, VMScaleSetName, instanceID) if err != nil { err = autorest.NewErrorWithError(err, "compute.VirtualMachineScaleSetVMsClient", "Deallocate", nil, "Failure preparing request") return } result, err = client.DeallocateSender(req) if err != nil { err = autorest.NewErrorWithError(err, "compute.VirtualMachineScaleSetVMsClient", "Deallocate", result.Response(), "Failure sending request") return } return } // DeallocatePreparer prepares the Deallocate request. func (client VirtualMachineScaleSetVMsClient) DeallocatePreparer(ctx context.Context, resourceGroupName string, VMScaleSetName string, instanceID string) (*http.Request, error) { pathParameters := map[string]interface{}{ "instanceId": autorest.Encode("path", instanceID), "resourceGroupName": autorest.Encode("path", resourceGroupName), "subscriptionId": autorest.Encode("path", client.SubscriptionID), "vmScaleSetName": autorest.Encode("path", VMScaleSetName), } const APIVersion = "2017-03-30" queryParameters := map[string]interface{}{ "api-version": APIVersion, } preparer := autorest.CreatePreparer( autorest.AsPost(), autorest.WithBaseURL(client.BaseURI), autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/deallocate", pathParameters), autorest.WithQueryParameters(queryParameters)) return preparer.Prepare((&http.Request{}).WithContext(ctx)) } // DeallocateSender sends the Deallocate request. The method will close the // http.Response Body if it receives an error. func (client VirtualMachineScaleSetVMsClient) DeallocateSender(req *http.Request) (future VirtualMachineScaleSetVMsDeallocateFuture, err error) { sender := autorest.DecorateSender(client, azure.DoRetryWithRegistration(client.Client)) future.Future = azure.NewFuture(req) future.req = req _, err = future.Done(sender) if err != nil { return } err = autorest.Respond(future.Response(), azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted)) return } // DeallocateResponder handles the response to the Deallocate request. The method always // closes the http.Response Body. func (client VirtualMachineScaleSetVMsClient) DeallocateResponder(resp *http.Response) (result OperationStatusResponse, err error) { err = autorest.Respond( resp, client.ByInspecting(), azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted), autorest.ByUnmarshallingJSON(&result), autorest.ByClosing()) result.Response = autorest.Response{Response: resp} return } // Delete deletes a virtual machine from a VM scale set. // // resourceGroupName is the name of the resource group. VMScaleSetName is the name of the VM scale set. instanceID // is the instance ID of the virtual machine. func (client VirtualMachineScaleSetVMsClient) Delete(ctx context.Context, resourceGroupName string, VMScaleSetName string, instanceID string) (result VirtualMachineScaleSetVMsDeleteFuture, err error) { req, err := client.DeletePreparer(ctx, resourceGroupName, VMScaleSetName, instanceID) if err != nil { err = autorest.NewErrorWithError(err, "compute.VirtualMachineScaleSetVMsClient", "Delete", nil, "Failure preparing request") return } result, err = client.DeleteSender(req) if err != nil { err = autorest.NewErrorWithError(err, "compute.VirtualMachineScaleSetVMsClient", "Delete", result.Response(), "Failure sending request") return } return } // DeletePreparer prepares the Delete request. func (client VirtualMachineScaleSetVMsClient) DeletePreparer(ctx context.Context, resourceGroupName string, VMScaleSetName string, instanceID string) (*http.Request, error) { pathParameters := map[string]interface{}{ "instanceId": autorest.Encode("path", instanceID), "resourceGroupName": autorest.Encode("path", resourceGroupName), "subscriptionId": autorest.Encode("path", client.SubscriptionID), "vmScaleSetName": autorest.Encode("path", VMScaleSetName), } const APIVersion = "2017-03-30" queryParameters := map[string]interface{}{ "api-version": APIVersion, } preparer := autorest.CreatePreparer( autorest.AsDelete(), autorest.WithBaseURL(client.BaseURI), autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}", pathParameters), autorest.WithQueryParameters(queryParameters)) return preparer.Prepare((&http.Request{}).WithContext(ctx)) } // DeleteSender sends the Delete request. The method will close the // http.Response Body if it receives an error. func (client VirtualMachineScaleSetVMsClient) DeleteSender(req *http.Request) (future VirtualMachineScaleSetVMsDeleteFuture, err error) { sender := autorest.DecorateSender(client, azure.DoRetryWithRegistration(client.Client)) future.Future = azure.NewFuture(req) future.req = req _, err = future.Done(sender) if err != nil { return } err = autorest.Respond(future.Response(), azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted, http.StatusNoContent)) return } // DeleteResponder handles the response to the Delete request. The method always // closes the http.Response Body. func (client VirtualMachineScaleSetVMsClient) DeleteResponder(resp *http.Response) (result OperationStatusResponse, err error) { err = autorest.Respond( resp, client.ByInspecting(), azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted, http.StatusNoContent), autorest.ByUnmarshallingJSON(&result), autorest.ByClosing()) result.Response = autorest.Response{Response: resp} return } // Get gets a virtual machine from a VM scale set. // // resourceGroupName is the name of the resource group. VMScaleSetName is the name of the VM scale set. instanceID // is the instance ID of the virtual machine. func (client VirtualMachineScaleSetVMsClient) Get(ctx context.Context, resourceGroupName string, VMScaleSetName string, instanceID string) (result VirtualMachineScaleSetVM, err error) { req, err := client.GetPreparer(ctx, resourceGroupName, VMScaleSetName, instanceID) if err != nil { err = autorest.NewErrorWithError(err, "compute.VirtualMachineScaleSetVMsClient", "Get", nil, "Failure preparing request") return } resp, err := client.GetSender(req) if err != nil { result.Response = autorest.Response{Response: resp} err = autorest.NewErrorWithError(err, "compute.VirtualMachineScaleSetVMsClient", "Get", resp, "Failure sending request") return } result, err = client.GetResponder(resp) if err != nil { err = autorest.NewErrorWithError(err, "compute.VirtualMachineScaleSetVMsClient", "Get", resp, "Failure responding to request") } return } // GetPreparer prepares the Get request. func (client VirtualMachineScaleSetVMsClient) GetPreparer(ctx context.Context, resourceGroupName string, VMScaleSetName string, instanceID string) (*http.Request, error) { pathParameters := map[string]interface{}{ "instanceId": autorest.Encode("path", instanceID), "resourceGroupName": autorest.Encode("path", resourceGroupName), "subscriptionId": autorest.Encode("path", client.SubscriptionID), "vmScaleSetName": autorest.Encode("path", VMScaleSetName), } const APIVersion = "2017-03-30" queryParameters := map[string]interface{}{ "api-version": APIVersion, } preparer := autorest.CreatePreparer( autorest.AsGet(), autorest.WithBaseURL(client.BaseURI), autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}", pathParameters), autorest.WithQueryParameters(queryParameters)) return preparer.Prepare((&http.Request{}).WithContext(ctx)) } // GetSender sends the Get request. The method will close the // http.Response Body if it receives an error. func (client VirtualMachineScaleSetVMsClient) GetSender(req *http.Request) (*http.Response, error) { return autorest.SendWithSender(client, req, azure.DoRetryWithRegistration(client.Client)) } // GetResponder handles the response to the Get request. The method always // closes the http.Response Body. func (client VirtualMachineScaleSetVMsClient) GetResponder(resp *http.Response) (result VirtualMachineScaleSetVM, err error) { err = autorest.Respond( resp, client.ByInspecting(), azure.WithErrorUnlessStatusCode(http.StatusOK), autorest.ByUnmarshallingJSON(&result), autorest.ByClosing()) result.Response = autorest.Response{Response: resp} return } // GetInstanceView gets the status of a virtual machine from a VM scale set. // // resourceGroupName is the name of the resource group. VMScaleSetName is the name of the VM scale set. instanceID // is the instance ID of the virtual machine. func (client VirtualMachineScaleSetVMsClient) GetInstanceView(ctx context.Context, resourceGroupName string, VMScaleSetName string, instanceID string) (result VirtualMachineScaleSetVMInstanceView, err error) { req, err := client.GetInstanceViewPreparer(ctx, resourceGroupName, VMScaleSetName, instanceID) if err != nil { err = autorest.NewErrorWithError(err, "compute.VirtualMachineScaleSetVMsClient", "GetInstanceView", nil, "Failure preparing request") return } resp, err := client.GetInstanceViewSender(req) if err != nil { result.Response = autorest.Response{Response: resp} err = autorest.NewErrorWithError(err, "compute.VirtualMachineScaleSetVMsClient", "GetInstanceView", resp, "Failure sending request") return } result, err = client.GetInstanceViewResponder(resp) if err != nil { err = autorest.NewErrorWithError(err, "compute.VirtualMachineScaleSetVMsClient", "GetInstanceView", resp, "Failure responding to request") } return } // GetInstanceViewPreparer prepares the GetInstanceView request. func (client VirtualMachineScaleSetVMsClient) GetInstanceViewPreparer(ctx context.Context, resourceGroupName string, VMScaleSetName string, instanceID string) (*http.Request, error) { pathParameters := map[string]interface{}{ "instanceId": autorest.Encode("path", instanceID), "resourceGroupName": autorest.Encode("path", resourceGroupName), "subscriptionId": autorest.Encode("path", client.SubscriptionID), "vmScaleSetName": autorest.Encode("path", VMScaleSetName), } const APIVersion = "2017-03-30" queryParameters := map[string]interface{}{ "api-version": APIVersion, } preparer := autorest.CreatePreparer( autorest.AsGet(), autorest.WithBaseURL(client.BaseURI), autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/instanceView", pathParameters), autorest.WithQueryParameters(queryParameters)) return preparer.Prepare((&http.Request{}).WithContext(ctx)) } // GetInstanceViewSender sends the GetInstanceView request. The method will close the // http.Response Body if it receives an error. func (client VirtualMachineScaleSetVMsClient) GetInstanceViewSender(req *http.Request) (*http.Response, error) { return autorest.SendWithSender(client, req, azure.DoRetryWithRegistration(client.Client)) } // GetInstanceViewResponder handles the response to the GetInstanceView request. The method always // closes the http.Response Body. func (client VirtualMachineScaleSetVMsClient) GetInstanceViewResponder(resp *http.Response) (result VirtualMachineScaleSetVMInstanceView, err error) { err = autorest.Respond( resp, client.ByInspecting(), azure.WithErrorUnlessStatusCode(http.StatusOK), autorest.ByUnmarshallingJSON(&result), autorest.ByClosing()) result.Response = autorest.Response{Response: resp} return } // List gets a list of all virtual machines in a VM scale sets. // // resourceGroupName is the name of the resource group. virtualMachineScaleSetName is the name of the VM scale set. // filter is the filter to apply to the operation. selectParameter is the list parameters. expand is the expand // expression to apply to the operation. func (client VirtualMachineScaleSetVMsClient) List(ctx context.Context, resourceGroupName string, virtualMachineScaleSetName string, filter string, selectParameter string, expand string) (result VirtualMachineScaleSetVMListResultPage, err error) { result.fn = client.listNextResults req, err := client.ListPreparer(ctx, resourceGroupName, virtualMachineScaleSetName, filter, selectParameter, expand) if err != nil { err = autorest.NewErrorWithError(err, "compute.VirtualMachineScaleSetVMsClient", "List", nil, "Failure preparing request") return } resp, err := client.ListSender(req) if err != nil { result.vmssvlr.Response = autorest.Response{Response: resp} err = autorest.NewErrorWithError(err, "compute.VirtualMachineScaleSetVMsClient", "List", resp, "Failure sending request") return } result.vmssvlr, err = client.ListResponder(resp) if err != nil { err = autorest.NewErrorWithError(err, "compute.VirtualMachineScaleSetVMsClient", "List", resp, "Failure responding to request") } return } // ListPreparer prepares the List request. func (client VirtualMachineScaleSetVMsClient) ListPreparer(ctx context.Context, resourceGroupName string, virtualMachineScaleSetName string, filter string, selectParameter string, expand string) (*http.Request, error) { pathParameters := map[string]interface{}{ "resourceGroupName": autorest.Encode("path", resourceGroupName), "subscriptionId": autorest.Encode("path", client.SubscriptionID), "virtualMachineScaleSetName": autorest.Encode("path", virtualMachineScaleSetName), } const APIVersion = "2017-03-30" queryParameters := map[string]interface{}{ "api-version": APIVersion, } if len(filter) > 0 { queryParameters["$filter"] = autorest.Encode("query", filter) } if len(selectParameter) > 0 { queryParameters["$select"] = autorest.Encode("query", selectParameter) } if len(expand) > 0 { queryParameters["$expand"] = autorest.Encode("query", expand) } preparer := autorest.CreatePreparer( autorest.AsGet(), autorest.WithBaseURL(client.BaseURI), autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{virtualMachineScaleSetName}/virtualMachines", pathParameters), autorest.WithQueryParameters(queryParameters)) return preparer.Prepare((&http.Request{}).WithContext(ctx)) } // ListSender sends the List request. The method will close the // http.Response Body if it receives an error. func (client VirtualMachineScaleSetVMsClient) ListSender(req *http.Request) (*http.Response, error) { return autorest.SendWithSender(client, req, azure.DoRetryWithRegistration(client.Client)) } // ListResponder handles the response to the List request. The method always // closes the http.Response Body. func (client VirtualMachineScaleSetVMsClient) ListResponder(resp *http.Response) (result VirtualMachineScaleSetVMListResult, err error) { err = autorest.Respond( resp, client.ByInspecting(), azure.WithErrorUnlessStatusCode(http.StatusOK), autorest.ByUnmarshallingJSON(&result), autorest.ByClosing()) result.Response = autorest.Response{Response: resp} return } // listNextResults retrieves the next set of results, if any. func (client VirtualMachineScaleSetVMsClient) listNextResults(lastResults VirtualMachineScaleSetVMListResult) (result VirtualMachineScaleSetVMListResult, err error) { req, err := lastResults.virtualMachineScaleSetVMListResultPreparer() if err != nil { return result, autorest.NewErrorWithError(err, "compute.VirtualMachineScaleSetVMsClient", "listNextResults", nil, "Failure preparing next results request") } if req == nil { return } resp, err := client.ListSender(req) if err != nil { result.Response = autorest.Response{Response: resp} return result, autorest.NewErrorWithError(err, "compute.VirtualMachineScaleSetVMsClient", "listNextResults", resp, "Failure sending next results request") } result, err = client.ListResponder(resp) if err != nil { err = autorest.NewErrorWithError(err, "compute.VirtualMachineScaleSetVMsClient", "listNextResults", resp, "Failure responding to next results request") } return } // ListComplete enumerates all values, automatically crossing page boundaries as required. func (client VirtualMachineScaleSetVMsClient) ListComplete(ctx context.Context, resourceGroupName string, virtualMachineScaleSetName string, filter string, selectParameter string, expand string) (result VirtualMachineScaleSetVMListResultIterator, err error) { result.page, err = client.List(ctx, resourceGroupName, virtualMachineScaleSetName, filter, selectParameter, expand) return } // PowerOff power off (stop) a virtual machine in a VM scale set. Note that resources are still attached and you are // getting charged for the resources. Instead, use deallocate to release resources and avoid charges. // // resourceGroupName is the name of the resource group. VMScaleSetName is the name of the VM scale set. instanceID // is the instance ID of the virtual machine. func (client VirtualMachineScaleSetVMsClient) PowerOff(ctx context.Context, resourceGroupName string, VMScaleSetName string, instanceID string) (result VirtualMachineScaleSetVMsPowerOffFuture, err error) { req, err := client.PowerOffPreparer(ctx, resourceGroupName, VMScaleSetName, instanceID) if err != nil { err = autorest.NewErrorWithError(err, "compute.VirtualMachineScaleSetVMsClient", "PowerOff", nil, "Failure preparing request") return } result, err = client.PowerOffSender(req) if err != nil { err = autorest.NewErrorWithError(err, "compute.VirtualMachineScaleSetVMsClient", "PowerOff", result.Response(), "Failure sending request") return } return } // PowerOffPreparer prepares the PowerOff request. func (client VirtualMachineScaleSetVMsClient) PowerOffPreparer(ctx context.Context, resourceGroupName string, VMScaleSetName string, instanceID string) (*http.Request, error) { pathParameters := map[string]interface{}{ "instanceId": autorest.Encode("path", instanceID), "resourceGroupName": autorest.Encode("path", resourceGroupName), "subscriptionId": autorest.Encode("path", client.SubscriptionID), "vmScaleSetName": autorest.Encode("path", VMScaleSetName), } const APIVersion = "2017-03-30" queryParameters := map[string]interface{}{ "api-version": APIVersion, } preparer := autorest.CreatePreparer( autorest.AsPost(), autorest.WithBaseURL(client.BaseURI), autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/poweroff", pathParameters), autorest.WithQueryParameters(queryParameters)) return preparer.Prepare((&http.Request{}).WithContext(ctx)) } // PowerOffSender sends the PowerOff request. The method will close the // http.Response Body if it receives an error. func (client VirtualMachineScaleSetVMsClient) PowerOffSender(req *http.Request) (future VirtualMachineScaleSetVMsPowerOffFuture, err error) { sender := autorest.DecorateSender(client, azure.DoRetryWithRegistration(client.Client)) future.Future = azure.NewFuture(req) future.req = req _, err = future.Done(sender) if err != nil { return } err = autorest.Respond(future.Response(), azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted)) return } // PowerOffResponder handles the response to the PowerOff request. The method always // closes the http.Response Body. func (client VirtualMachineScaleSetVMsClient) PowerOffResponder(resp *http.Response) (result OperationStatusResponse, err error) { err = autorest.Respond( resp, client.ByInspecting(), azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted), autorest.ByUnmarshallingJSON(&result), autorest.ByClosing()) result.Response = autorest.Response{Response: resp} return } // Reimage reimages (upgrade the operating system) a specific virtual machine in a VM scale set. // // resourceGroupName is the name of the resource group. VMScaleSetName is the name of the VM scale set. instanceID // is the instance ID of the virtual machine. func (client VirtualMachineScaleSetVMsClient) Reimage(ctx context.Context, resourceGroupName string, VMScaleSetName string, instanceID string) (result VirtualMachineScaleSetVMsReimageFuture, err error) { req, err := client.ReimagePreparer(ctx, resourceGroupName, VMScaleSetName, instanceID) if err != nil { err = autorest.NewErrorWithError(err, "compute.VirtualMachineScaleSetVMsClient", "Reimage", nil, "Failure preparing request") return } result, err = client.ReimageSender(req) if err != nil { err = autorest.NewErrorWithError(err, "compute.VirtualMachineScaleSetVMsClient", "Reimage", result.Response(), "Failure sending request") return } return } // ReimagePreparer prepares the Reimage request. func (client VirtualMachineScaleSetVMsClient) ReimagePreparer(ctx context.Context, resourceGroupName string, VMScaleSetName string, instanceID string) (*http.Request, error) { pathParameters := map[string]interface{}{ "instanceId": autorest.Encode("path", instanceID), "resourceGroupName": autorest.Encode("path", resourceGroupName), "subscriptionId": autorest.Encode("path", client.SubscriptionID), "vmScaleSetName": autorest.Encode("path", VMScaleSetName), } const APIVersion = "2017-03-30" queryParameters := map[string]interface{}{ "api-version": APIVersion, } preparer := autorest.CreatePreparer( autorest.AsPost(), autorest.WithBaseURL(client.BaseURI), autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/reimage", pathParameters), autorest.WithQueryParameters(queryParameters)) return preparer.Prepare((&http.Request{}).WithContext(ctx)) } // ReimageSender sends the Reimage request. The method will close the // http.Response Body if it receives an error. func (client VirtualMachineScaleSetVMsClient) ReimageSender(req *http.Request) (future VirtualMachineScaleSetVMsReimageFuture, err error) { sender := autorest.DecorateSender(client, azure.DoRetryWithRegistration(client.Client)) future.Future = azure.NewFuture(req) future.req = req _, err = future.Done(sender) if err != nil { return } err = autorest.Respond(future.Response(), azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted)) return } // ReimageResponder handles the response to the Reimage request. The method always // closes the http.Response Body. func (client VirtualMachineScaleSetVMsClient) ReimageResponder(resp *http.Response) (result OperationStatusResponse, err error) { err = autorest.Respond( resp, client.ByInspecting(), azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted), autorest.ByUnmarshallingJSON(&result), autorest.ByClosing()) result.Response = autorest.Response{Response: resp} return } // ReimageAll allows you to re-image all the disks ( including data disks ) in the a VM scale set instance. This // operation is only supported for managed disks. // // resourceGroupName is the name of the resource group. VMScaleSetName is the name of the VM scale set. instanceID // is the instance ID of the virtual machine. func (client VirtualMachineScaleSetVMsClient) ReimageAll(ctx context.Context, resourceGroupName string, VMScaleSetName string, instanceID string) (result VirtualMachineScaleSetVMsReimageAllFuture, err error) { req, err := client.ReimageAllPreparer(ctx, resourceGroupName, VMScaleSetName, instanceID) if err != nil { err = autorest.NewErrorWithError(err, "compute.VirtualMachineScaleSetVMsClient", "ReimageAll", nil, "Failure preparing request") return } result, err = client.ReimageAllSender(req) if err != nil { err = autorest.NewErrorWithError(err, "compute.VirtualMachineScaleSetVMsClient", "ReimageAll", result.Response(), "Failure sending request") return } return } // ReimageAllPreparer prepares the ReimageAll request. func (client VirtualMachineScaleSetVMsClient) ReimageAllPreparer(ctx context.Context, resourceGroupName string, VMScaleSetName string, instanceID string) (*http.Request, error) { pathParameters := map[string]interface{}{ "instanceId": autorest.Encode("path", instanceID), "resourceGroupName": autorest.Encode("path", resourceGroupName), "subscriptionId": autorest.Encode("path", client.SubscriptionID), "vmScaleSetName": autorest.Encode("path", VMScaleSetName), } const APIVersion = "2017-03-30" queryParameters := map[string]interface{}{ "api-version": APIVersion, } preparer := autorest.CreatePreparer( autorest.AsPost(), autorest.WithBaseURL(client.BaseURI), autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/reimageall", pathParameters), autorest.WithQueryParameters(queryParameters)) return preparer.Prepare((&http.Request{}).WithContext(ctx)) } // ReimageAllSender sends the ReimageAll request. The method will close the // http.Response Body if it receives an error. func (client VirtualMachineScaleSetVMsClient) ReimageAllSender(req *http.Request) (future VirtualMachineScaleSetVMsReimageAllFuture, err error) { sender := autorest.DecorateSender(client, azure.DoRetryWithRegistration(client.Client)) future.Future = azure.NewFuture(req) future.req = req _, err = future.Done(sender) if err != nil { return } err = autorest.Respond(future.Response(), azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted)) return } // ReimageAllResponder handles the response to the ReimageAll request. The method always // closes the http.Response Body. func (client VirtualMachineScaleSetVMsClient) ReimageAllResponder(resp *http.Response) (result OperationStatusResponse, err error) { err = autorest.Respond( resp, client.ByInspecting(), azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted), autorest.ByUnmarshallingJSON(&result), autorest.ByClosing()) result.Response = autorest.Response{Response: resp} return } // Restart restarts a virtual machine in a VM scale set. // // resourceGroupName is the name of the resource group. VMScaleSetName is the name of the VM scale set. instanceID // is the instance ID of the virtual machine. func (client VirtualMachineScaleSetVMsClient) Restart(ctx context.Context, resourceGroupName string, VMScaleSetName string, instanceID string) (result VirtualMachineScaleSetVMsRestartFuture, err error) { req, err := client.RestartPreparer(ctx, resourceGroupName, VMScaleSetName, instanceID) if err != nil { err = autorest.NewErrorWithError(err, "compute.VirtualMachineScaleSetVMsClient", "Restart", nil, "Failure preparing request") return } result, err = client.RestartSender(req)<|fim▁hole|> return } return } // RestartPreparer prepares the Restart request. func (client VirtualMachineScaleSetVMsClient) RestartPreparer(ctx context.Context, resourceGroupName string, VMScaleSetName string, instanceID string) (*http.Request, error) { pathParameters := map[string]interface{}{ "instanceId": autorest.Encode("path", instanceID), "resourceGroupName": autorest.Encode("path", resourceGroupName), "subscriptionId": autorest.Encode("path", client.SubscriptionID), "vmScaleSetName": autorest.Encode("path", VMScaleSetName), } const APIVersion = "2017-03-30" queryParameters := map[string]interface{}{ "api-version": APIVersion, } preparer := autorest.CreatePreparer( autorest.AsPost(), autorest.WithBaseURL(client.BaseURI), autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/restart", pathParameters), autorest.WithQueryParameters(queryParameters)) return preparer.Prepare((&http.Request{}).WithContext(ctx)) } // RestartSender sends the Restart request. The method will close the // http.Response Body if it receives an error. func (client VirtualMachineScaleSetVMsClient) RestartSender(req *http.Request) (future VirtualMachineScaleSetVMsRestartFuture, err error) { sender := autorest.DecorateSender(client, azure.DoRetryWithRegistration(client.Client)) future.Future = azure.NewFuture(req) future.req = req _, err = future.Done(sender) if err != nil { return } err = autorest.Respond(future.Response(), azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted)) return } // RestartResponder handles the response to the Restart request. The method always // closes the http.Response Body. func (client VirtualMachineScaleSetVMsClient) RestartResponder(resp *http.Response) (result OperationStatusResponse, err error) { err = autorest.Respond( resp, client.ByInspecting(), azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted), autorest.ByUnmarshallingJSON(&result), autorest.ByClosing()) result.Response = autorest.Response{Response: resp} return } // Start starts a virtual machine in a VM scale set. // // resourceGroupName is the name of the resource group. VMScaleSetName is the name of the VM scale set. instanceID // is the instance ID of the virtual machine. func (client VirtualMachineScaleSetVMsClient) Start(ctx context.Context, resourceGroupName string, VMScaleSetName string, instanceID string) (result VirtualMachineScaleSetVMsStartFuture, err error) { req, err := client.StartPreparer(ctx, resourceGroupName, VMScaleSetName, instanceID) if err != nil { err = autorest.NewErrorWithError(err, "compute.VirtualMachineScaleSetVMsClient", "Start", nil, "Failure preparing request") return } result, err = client.StartSender(req) if err != nil { err = autorest.NewErrorWithError(err, "compute.VirtualMachineScaleSetVMsClient", "Start", result.Response(), "Failure sending request") return } return } // StartPreparer prepares the Start request. func (client VirtualMachineScaleSetVMsClient) StartPreparer(ctx context.Context, resourceGroupName string, VMScaleSetName string, instanceID string) (*http.Request, error) { pathParameters := map[string]interface{}{ "instanceId": autorest.Encode("path", instanceID), "resourceGroupName": autorest.Encode("path", resourceGroupName), "subscriptionId": autorest.Encode("path", client.SubscriptionID), "vmScaleSetName": autorest.Encode("path", VMScaleSetName), } const APIVersion = "2017-03-30" queryParameters := map[string]interface{}{ "api-version": APIVersion, } preparer := autorest.CreatePreparer( autorest.AsPost(), autorest.WithBaseURL(client.BaseURI), autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/virtualMachineScaleSets/{vmScaleSetName}/virtualmachines/{instanceId}/start", pathParameters), autorest.WithQueryParameters(queryParameters)) return preparer.Prepare((&http.Request{}).WithContext(ctx)) } // StartSender sends the Start request. The method will close the // http.Response Body if it receives an error. func (client VirtualMachineScaleSetVMsClient) StartSender(req *http.Request) (future VirtualMachineScaleSetVMsStartFuture, err error) { sender := autorest.DecorateSender(client, azure.DoRetryWithRegistration(client.Client)) future.Future = azure.NewFuture(req) future.req = req _, err = future.Done(sender) if err != nil { return } err = autorest.Respond(future.Response(), azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted)) return } // StartResponder handles the response to the Start request. The method always // closes the http.Response Body. func (client VirtualMachineScaleSetVMsClient) StartResponder(resp *http.Response) (result OperationStatusResponse, err error) { err = autorest.Respond( resp, client.ByInspecting(), azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted), autorest.ByUnmarshallingJSON(&result), autorest.ByClosing()) result.Response = autorest.Response{Response: resp} return }<|fim▁end|>
if err != nil { err = autorest.NewErrorWithError(err, "compute.VirtualMachineScaleSetVMsClient", "Restart", result.Response(), "Failure sending request")
<|file_name|>a.after.py<|end_file_name|><|fim▁begin|><|fim▁hole|>from . import foo<|fim▁end|>
<|file_name|>dom_html_canvas_element.rs<|end_file_name|><|fim▁begin|>// This file was generated by gir (https://github.com/gtk-rs/gir) // from gir-files (https://github.com/gtk-rs/gir-files) // DO NOT EDIT use DOMElement; use DOMEventTarget; use DOMHTMLElement; use DOMNode; use DOMObject; use glib::object::Cast; use glib::object::IsA; use glib::signal::SignalHandlerId; use glib::signal::connect_raw; use glib::translate::*; use glib_sys; use libc; use std::boxed::Box as Box_; use std::fmt; use std::mem::transmute; use webkit2_webextension_sys; glib_wrapper! { pub struct DOMHTMLCanvasElement(Object<webkit2_webextension_sys::WebKitDOMHTMLCanvasElement, webkit2_webextension_sys::WebKitDOMHTMLCanvasElementClass, DOMHTMLCanvasElementClass>) @extends DOMHTMLElement, DOMElement, DOMNode, DOMObject, @implements DOMEventTarget; match fn { get_type => || webkit2_webextension_sys::webkit_dom_html_canvas_element_get_type(), } } pub const NONE_DOMHTML_CANVAS_ELEMENT: Option<&DOMHTMLCanvasElement> = None; pub trait DOMHTMLCanvasElementExt: 'static { #[cfg_attr(feature = "v2_22", deprecated)] fn get_height(&self) -> libc::c_long; #[cfg_attr(feature = "v2_22", deprecated)] fn get_width(&self) -> libc::c_long; #[cfg_attr(feature = "v2_22", deprecated)] fn set_height(&self, value: libc::c_long); #[cfg_attr(feature = "v2_22", deprecated)] fn set_width(&self, value: libc::c_long); fn connect_property_height_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; <|fim▁hole|> fn connect_property_width_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId; } impl<O: IsA<DOMHTMLCanvasElement>> DOMHTMLCanvasElementExt for O { fn get_height(&self) -> libc::c_long { unsafe { webkit2_webextension_sys::webkit_dom_html_canvas_element_get_height(self.as_ref().to_glib_none().0) } } fn get_width(&self) -> libc::c_long { unsafe { webkit2_webextension_sys::webkit_dom_html_canvas_element_get_width(self.as_ref().to_glib_none().0) } } fn set_height(&self, value: libc::c_long) { unsafe { webkit2_webextension_sys::webkit_dom_html_canvas_element_set_height(self.as_ref().to_glib_none().0, value); } } fn set_width(&self, value: libc::c_long) { unsafe { webkit2_webextension_sys::webkit_dom_html_canvas_element_set_width(self.as_ref().to_glib_none().0, value); } } fn connect_property_height_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_height_trampoline<P, F: Fn(&P) + 'static>(this: *mut webkit2_webextension_sys::WebKitDOMHTMLCanvasElement, _param_spec: glib_sys::gpointer, f: glib_sys::gpointer) where P: IsA<DOMHTMLCanvasElement> { let f: &F = &*(f as *const F); f(&DOMHTMLCanvasElement::from_glib_borrow(this).unsafe_cast()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw(self.as_ptr() as *mut _, b"notify::height\0".as_ptr() as *const _, Some(transmute(notify_height_trampoline::<Self, F> as usize)), Box_::into_raw(f)) } } fn connect_property_width_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId { unsafe extern "C" fn notify_width_trampoline<P, F: Fn(&P) + 'static>(this: *mut webkit2_webextension_sys::WebKitDOMHTMLCanvasElement, _param_spec: glib_sys::gpointer, f: glib_sys::gpointer) where P: IsA<DOMHTMLCanvasElement> { let f: &F = &*(f as *const F); f(&DOMHTMLCanvasElement::from_glib_borrow(this).unsafe_cast()) } unsafe { let f: Box_<F> = Box_::new(f); connect_raw(self.as_ptr() as *mut _, b"notify::width\0".as_ptr() as *const _, Some(transmute(notify_width_trampoline::<Self, F> as usize)), Box_::into_raw(f)) } } } impl fmt::Display for DOMHTMLCanvasElement { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "DOMHTMLCanvasElement") } }<|fim▁end|>
<|file_name|>drop-list-ref.ts<|end_file_name|><|fim▁begin|>/** * @license * Copyright Google LLC All Rights Reserved. * * Use of this source code is governed by an MIT-style license that can be * found in the LICENSE file at https://angular.io/license */ import {ElementRef, NgZone} from '@angular/core'; import {Direction} from '@angular/cdk/bidi'; import {coerceElement} from '@angular/cdk/coercion'; import {ViewportRuler} from '@angular/cdk/scrolling'; import {_getShadowRoot} from '@angular/cdk/platform'; import {Subject, Subscription, interval, animationFrameScheduler} from 'rxjs'; import {takeUntil} from 'rxjs/operators'; import {moveItemInArray} from './drag-utils'; import {DragDropRegistry} from './drag-drop-registry'; import {DragRefInternal as DragRef, Point} from './drag-ref'; import { isPointerNearClientRect, adjustClientRect, getMutableClientRect, isInsideClientRect, } from './client-rect'; import {ParentPositionTracker} from './parent-position-tracker'; import {DragCSSStyleDeclaration} from './drag-styling'; /** * Proximity, as a ratio to width/height, at which a * dragged item will affect the drop container. */ const DROP_PROXIMITY_THRESHOLD = 0.05; /** * Proximity, as a ratio to width/height at which to start auto-scrolling the drop list or the * viewport. The value comes from trying it out manually until it feels right. */ const SCROLL_PROXIMITY_THRESHOLD = 0.05; /** * Entry in the position cache for draggable items. * @docs-private */ interface CachedItemPosition { /** Instance of the drag item. */ drag: DragRef; /** Dimensions of the item. */ clientRect: ClientRect; /** Amount by which the item has been moved since dragging started. */ offset: number; } /** Vertical direction in which we can auto-scroll. */ const enum AutoScrollVerticalDirection {NONE, UP, DOWN} /** Horizontal direction in which we can auto-scroll. */ const enum AutoScrollHorizontalDirection {NONE, LEFT, RIGHT} /** * Internal compile-time-only representation of a `DropListRef`. * Used to avoid circular import issues between the `DropListRef` and the `DragRef`. * @docs-private */ export interface DropListRefInternal extends DropListRef {} /** * Reference to a drop list. Used to manipulate or dispose of the container. */ export class DropListRef<T = any> { /** Element that the drop list is attached to. */ element: HTMLElement | ElementRef<HTMLElement>; /** Whether starting a dragging sequence from this container is disabled. */ disabled: boolean = false; /** Whether sorting items within the list is disabled. */ sortingDisabled: boolean = false; /** Locks the position of the draggable elements inside the container along the specified axis. */ lockAxis: 'x' | 'y'; /** * Whether auto-scrolling the view when the user * moves their pointer close to the edges is disabled. */ autoScrollDisabled: boolean = false; /** Number of pixels to scroll for each frame when auto-scrolling an element. */ autoScrollStep: number = 2; /** * Function that is used to determine whether an item * is allowed to be moved into a drop container. */ enterPredicate: (drag: DragRef, drop: DropListRef) => boolean = () => true; /** Functions that is used to determine whether an item can be sorted into a particular index. */ sortPredicate: (index: number, drag: DragRef, drop: DropListRef) => boolean = () => true; /** Emits right before dragging has started. */ beforeStarted = new Subject<void>(); /** * Emits when the user has moved a new drag item into this container. */ entered = new Subject<{item: DragRef, container: DropListRef, currentIndex: number}>(); /** * Emits when the user removes an item from the container * by dragging it into another container. */ exited = new Subject<{item: DragRef, container: DropListRef}>(); /** Emits when the user drops an item inside the container. */ dropped = new Subject<{ item: DragRef, currentIndex: number, previousIndex: number, container: DropListRef, previousContainer: DropListRef, isPointerOverContainer: boolean, distance: Point; }>(); /** Emits as the user is swapping items while actively dragging. */ sorted = new Subject<{ previousIndex: number,<|fim▁hole|> }>(); /** Arbitrary data that can be attached to the drop list. */ data: T; /** Whether an item in the list is being dragged. */ private _isDragging = false; /** Cache of the dimensions of all the items inside the container. */ private _itemPositions: CachedItemPosition[] = []; /** Keeps track of the positions of any parent scrollable elements. */ private _parentPositions: ParentPositionTracker; /** Cached `ClientRect` of the drop list. */ private _clientRect: ClientRect | undefined; /** * Draggable items that are currently active inside the container. Includes the items * from `_draggables`, as well as any items that have been dragged in, but haven't * been dropped yet. */ private _activeDraggables: DragRef[]; /** * Keeps track of the item that was last swapped with the dragged item, as well as what direction * the pointer was moving in when the swap occured and whether the user's pointer continued to * overlap with the swapped item after the swapping occurred. */ private _previousSwap = {drag: null as DragRef | null, delta: 0, overlaps: false}; /** Draggable items in the container. */ private _draggables: ReadonlyArray<DragRef> = []; /** Drop lists that are connected to the current one. */ private _siblings: ReadonlyArray<DropListRef> = []; /** Direction in which the list is oriented. */ private _orientation: 'horizontal' | 'vertical' = 'vertical'; /** Connected siblings that currently have a dragged item. */ private _activeSiblings = new Set<DropListRef>(); /** Layout direction of the drop list. */ private _direction: Direction = 'ltr'; /** Subscription to the window being scrolled. */ private _viewportScrollSubscription = Subscription.EMPTY; /** Vertical direction in which the list is currently scrolling. */ private _verticalScrollDirection = AutoScrollVerticalDirection.NONE; /** Horizontal direction in which the list is currently scrolling. */ private _horizontalScrollDirection = AutoScrollHorizontalDirection.NONE; /** Node that is being auto-scrolled. */ private _scrollNode: HTMLElement | Window; /** Used to signal to the current auto-scroll sequence when to stop. */ private _stopScrollTimers = new Subject<void>(); /** Shadow root of the current element. Necessary for `elementFromPoint` to resolve correctly. */ private _cachedShadowRoot: DocumentOrShadowRoot | null = null; /** Reference to the document. */ private _document: Document; /** Elements that can be scrolled while the user is dragging. */ private _scrollableElements: HTMLElement[]; /** Initial value for the element's `scroll-snap-type` style. */ private _initialScrollSnap: string; constructor( element: ElementRef<HTMLElement> | HTMLElement, private _dragDropRegistry: DragDropRegistry<DragRef, DropListRef>, _document: any, private _ngZone: NgZone, private _viewportRuler: ViewportRuler) { this.element = coerceElement(element); this._document = _document; this.withScrollableParents([this.element]); _dragDropRegistry.registerDropContainer(this); this._parentPositions = new ParentPositionTracker(_document, _viewportRuler); } /** Removes the drop list functionality from the DOM element. */ dispose() { this._stopScrolling(); this._stopScrollTimers.complete(); this._viewportScrollSubscription.unsubscribe(); this.beforeStarted.complete(); this.entered.complete(); this.exited.complete(); this.dropped.complete(); this.sorted.complete(); this._activeSiblings.clear(); this._scrollNode = null!; this._parentPositions.clear(); this._dragDropRegistry.removeDropContainer(this); } /** Whether an item from this list is currently being dragged. */ isDragging() { return this._isDragging; } /** Starts dragging an item. */ start(): void { this._draggingStarted(); this._notifyReceivingSiblings(); } /** * Emits an event to indicate that the user moved an item into the container. * @param item Item that was moved into the container. * @param pointerX Position of the item along the X axis. * @param pointerY Position of the item along the Y axis. * @param index Index at which the item entered. If omitted, the container will try to figure it * out automatically. */ enter(item: DragRef, pointerX: number, pointerY: number, index?: number): void { this._draggingStarted(); // If sorting is disabled, we want the item to return to its starting // position if the user is returning it to its initial container. let newIndex: number; if (index == null) { newIndex = this.sortingDisabled ? this._draggables.indexOf(item) : -1; if (newIndex === -1) { // We use the coordinates of where the item entered the drop // zone to figure out at which index it should be inserted. newIndex = this._getItemIndexFromPointerPosition(item, pointerX, pointerY); } } else { newIndex = index; } const activeDraggables = this._activeDraggables; const currentIndex = activeDraggables.indexOf(item); const placeholder = item.getPlaceholderElement(); let newPositionReference: DragRef | undefined = activeDraggables[newIndex]; // If the item at the new position is the same as the item that is being dragged, // it means that we're trying to restore the item to its initial position. In this // case we should use the next item from the list as the reference. if (newPositionReference === item) { newPositionReference = activeDraggables[newIndex + 1]; } // Since the item may be in the `activeDraggables` already (e.g. if the user dragged it // into another container and back again), we have to ensure that it isn't duplicated. if (currentIndex > -1) { activeDraggables.splice(currentIndex, 1); } // Don't use items that are being dragged as a reference, because // their element has been moved down to the bottom of the body. if (newPositionReference && !this._dragDropRegistry.isDragging(newPositionReference)) { const element = newPositionReference.getRootElement(); element.parentElement!.insertBefore(placeholder, element); activeDraggables.splice(newIndex, 0, item); } else if (this._shouldEnterAsFirstChild(pointerX, pointerY)) { const reference = activeDraggables[0].getRootElement(); reference.parentNode!.insertBefore(placeholder, reference); activeDraggables.unshift(item); } else { coerceElement(this.element).appendChild(placeholder); activeDraggables.push(item); } // The transform needs to be cleared so it doesn't throw off the measurements. placeholder.style.transform = ''; // Note that the positions were already cached when we called `start` above, // but we need to refresh them since the amount of items has changed and also parent rects. this._cacheItemPositions(); this._cacheParentPositions(); // Notify siblings at the end so that the item has been inserted into the `activeDraggables`. this._notifyReceivingSiblings(); this.entered.next({item, container: this, currentIndex: this.getItemIndex(item)}); } /** * Removes an item from the container after it was dragged into another container by the user. * @param item Item that was dragged out. */ exit(item: DragRef): void { this._reset(); this.exited.next({item, container: this}); } /** * Drops an item into this container. * @param item Item being dropped into the container. * @param currentIndex Index at which the item should be inserted. * @param previousIndex Index of the item when dragging started. * @param previousContainer Container from which the item got dragged in. * @param isPointerOverContainer Whether the user's pointer was over the * container when the item was dropped. * @param distance Distance the user has dragged since the start of the dragging sequence. */ drop(item: DragRef, currentIndex: number, previousIndex: number, previousContainer: DropListRef, isPointerOverContainer: boolean, distance: Point): void { this._reset(); this.dropped.next({ item, currentIndex, previousIndex, container: this, previousContainer, isPointerOverContainer, distance }); } /** * Sets the draggable items that are a part of this list. * @param items Items that are a part of this list. */ withItems(items: DragRef[]): this { const previousItems = this._draggables; this._draggables = items; items.forEach(item => item._withDropContainer(this)); if (this.isDragging()) { const draggedItems = previousItems.filter(item => item.isDragging()); // If all of the items being dragged were removed // from the list, abort the current drag sequence. if (draggedItems.every(item => items.indexOf(item) === -1)) { this._reset(); } else { this._cacheItems(); } } return this; } /** Sets the layout direction of the drop list. */ withDirection(direction: Direction): this { this._direction = direction; return this; } /** * Sets the containers that are connected to this one. When two or more containers are * connected, the user will be allowed to transfer items between them. * @param connectedTo Other containers that the current containers should be connected to. */ connectedTo(connectedTo: DropListRef[]): this { this._siblings = connectedTo.slice(); return this; } /** * Sets the orientation of the container. * @param orientation New orientation for the container. */ withOrientation(orientation: 'vertical' | 'horizontal'): this { this._orientation = orientation; return this; } /** * Sets which parent elements are can be scrolled while the user is dragging. * @param elements Elements that can be scrolled. */ withScrollableParents(elements: HTMLElement[]): this { const element = coerceElement(this.element); // We always allow the current element to be scrollable // so we need to ensure that it's in the array. this._scrollableElements = elements.indexOf(element) === -1 ? [element, ...elements] : elements.slice(); return this; } /** Gets the scrollable parents that are registered with this drop container. */ getScrollableParents(): ReadonlyArray<HTMLElement> { return this._scrollableElements; } /** * Figures out the index of an item in the container. * @param item Item whose index should be determined. */ getItemIndex(item: DragRef): number { if (!this._isDragging) { return this._draggables.indexOf(item); } // Items are sorted always by top/left in the cache, however they flow differently in RTL. // The rest of the logic still stands no matter what orientation we're in, however // we need to invert the array when determining the index. const items = this._orientation === 'horizontal' && this._direction === 'rtl' ? this._itemPositions.slice().reverse() : this._itemPositions; return findIndex(items, currentItem => currentItem.drag === item); } /** * Whether the list is able to receive the item that * is currently being dragged inside a connected drop list. */ isReceiving(): boolean { return this._activeSiblings.size > 0; } /** * Sorts an item inside the container based on its position. * @param item Item to be sorted. * @param pointerX Position of the item along the X axis. * @param pointerY Position of the item along the Y axis. * @param pointerDelta Direction in which the pointer is moving along each axis. */ _sortItem(item: DragRef, pointerX: number, pointerY: number, pointerDelta: {x: number, y: number}): void { // Don't sort the item if sorting is disabled or it's out of range. if (this.sortingDisabled || !this._clientRect || !isPointerNearClientRect(this._clientRect, DROP_PROXIMITY_THRESHOLD, pointerX, pointerY)) { return; } const siblings = this._itemPositions; const newIndex = this._getItemIndexFromPointerPosition(item, pointerX, pointerY, pointerDelta); if (newIndex === -1 && siblings.length > 0) { return; } const isHorizontal = this._orientation === 'horizontal'; const currentIndex = findIndex(siblings, currentItem => currentItem.drag === item); const siblingAtNewPosition = siblings[newIndex]; const currentPosition = siblings[currentIndex].clientRect; const newPosition = siblingAtNewPosition.clientRect; const delta = currentIndex > newIndex ? 1 : -1; // How many pixels the item's placeholder should be offset. const itemOffset = this._getItemOffsetPx(currentPosition, newPosition, delta); // How many pixels all the other items should be offset. const siblingOffset = this._getSiblingOffsetPx(currentIndex, siblings, delta); // Save the previous order of the items before moving the item to its new index. // We use this to check whether an item has been moved as a result of the sorting. const oldOrder = siblings.slice(); // Shuffle the array in place. moveItemInArray(siblings, currentIndex, newIndex); this.sorted.next({ previousIndex: currentIndex, currentIndex: newIndex, container: this, item }); siblings.forEach((sibling, index) => { // Don't do anything if the position hasn't changed. if (oldOrder[index] === sibling) { return; } const isDraggedItem = sibling.drag === item; const offset = isDraggedItem ? itemOffset : siblingOffset; const elementToOffset = isDraggedItem ? item.getPlaceholderElement() : sibling.drag.getRootElement(); // Update the offset to reflect the new position. sibling.offset += offset; // Since we're moving the items with a `transform`, we need to adjust their cached // client rects to reflect their new position, as well as swap their positions in the cache. // Note that we shouldn't use `getBoundingClientRect` here to update the cache, because the // elements may be mid-animation which will give us a wrong result. if (isHorizontal) { // Round the transforms since some browsers will // blur the elements, for sub-pixel transforms. elementToOffset.style.transform = `translate3d(${Math.round(sibling.offset)}px, 0, 0)`; adjustClientRect(sibling.clientRect, 0, offset); } else { elementToOffset.style.transform = `translate3d(0, ${Math.round(sibling.offset)}px, 0)`; adjustClientRect(sibling.clientRect, offset, 0); } }); // Note that it's important that we do this after the client rects have been adjusted. this._previousSwap.overlaps = isInsideClientRect(newPosition, pointerX, pointerY); this._previousSwap.drag = siblingAtNewPosition.drag; this._previousSwap.delta = isHorizontal ? pointerDelta.x : pointerDelta.y; } /** * Checks whether the user's pointer is close to the edges of either the * viewport or the drop list and starts the auto-scroll sequence. * @param pointerX User's pointer position along the x axis. * @param pointerY User's pointer position along the y axis. */ _startScrollingIfNecessary(pointerX: number, pointerY: number) { if (this.autoScrollDisabled) { return; } let scrollNode: HTMLElement | Window | undefined; let verticalScrollDirection = AutoScrollVerticalDirection.NONE; let horizontalScrollDirection = AutoScrollHorizontalDirection.NONE; // Check whether we should start scrolling any of the parent containers. this._parentPositions.positions.forEach((position, element) => { // We have special handling for the `document` below. Also this would be // nicer with a for...of loop, but it requires changing a compiler flag. if (element === this._document || !position.clientRect || scrollNode) { return; } if (isPointerNearClientRect(position.clientRect, DROP_PROXIMITY_THRESHOLD, pointerX, pointerY)) { [verticalScrollDirection, horizontalScrollDirection] = getElementScrollDirections( element as HTMLElement, position.clientRect, pointerX, pointerY); if (verticalScrollDirection || horizontalScrollDirection) { scrollNode = element as HTMLElement; } } }); // Otherwise check if we can start scrolling the viewport. if (!verticalScrollDirection && !horizontalScrollDirection) { const {width, height} = this._viewportRuler.getViewportSize(); const clientRect = {width, height, top: 0, right: width, bottom: height, left: 0}; verticalScrollDirection = getVerticalScrollDirection(clientRect, pointerY); horizontalScrollDirection = getHorizontalScrollDirection(clientRect, pointerX); scrollNode = window; } if (scrollNode && (verticalScrollDirection !== this._verticalScrollDirection || horizontalScrollDirection !== this._horizontalScrollDirection || scrollNode !== this._scrollNode)) { this._verticalScrollDirection = verticalScrollDirection; this._horizontalScrollDirection = horizontalScrollDirection; this._scrollNode = scrollNode; if ((verticalScrollDirection || horizontalScrollDirection) && scrollNode) { this._ngZone.runOutsideAngular(this._startScrollInterval); } else { this._stopScrolling(); } } } /** Stops any currently-running auto-scroll sequences. */ _stopScrolling() { this._stopScrollTimers.next(); } /** Starts the dragging sequence within the list. */ private _draggingStarted() { const styles = coerceElement(this.element).style as DragCSSStyleDeclaration; this.beforeStarted.next(); this._isDragging = true; // We need to disable scroll snapping while the user is dragging, because it breaks automatic // scrolling. The browser seems to round the value based on the snapping points which means // that we can't increment/decrement the scroll position. this._initialScrollSnap = styles.msScrollSnapType || styles.scrollSnapType || ''; styles.scrollSnapType = styles.msScrollSnapType = 'none'; this._cacheItems(); this._viewportScrollSubscription.unsubscribe(); this._listenToScrollEvents(); } /** Caches the positions of the configured scrollable parents. */ private _cacheParentPositions() { const element = coerceElement(this.element); this._parentPositions.cache(this._scrollableElements); // The list element is always in the `scrollableElements` // so we can take advantage of the cached `ClientRect`. this._clientRect = this._parentPositions.positions.get(element)!.clientRect!; } /** Refreshes the position cache of the items and sibling containers. */ private _cacheItemPositions() { const isHorizontal = this._orientation === 'horizontal'; this._itemPositions = this._activeDraggables.map(drag => { const elementToMeasure = drag.getVisibleElement(); return {drag, offset: 0, clientRect: getMutableClientRect(elementToMeasure)}; }).sort((a, b) => { return isHorizontal ? a.clientRect.left - b.clientRect.left : a.clientRect.top - b.clientRect.top; }); } /** Resets the container to its initial state. */ private _reset() { this._isDragging = false; const styles = coerceElement(this.element).style as DragCSSStyleDeclaration; styles.scrollSnapType = styles.msScrollSnapType = this._initialScrollSnap; // TODO(crisbeto): may have to wait for the animations to finish. this._activeDraggables.forEach(item => { const rootElement = item.getRootElement(); if (rootElement) { rootElement.style.transform = ''; } }); this._siblings.forEach(sibling => sibling._stopReceiving(this)); this._activeDraggables = []; this._itemPositions = []; this._previousSwap.drag = null; this._previousSwap.delta = 0; this._previousSwap.overlaps = false; this._stopScrolling(); this._viewportScrollSubscription.unsubscribe(); this._parentPositions.clear(); } /** * Gets the offset in pixels by which the items that aren't being dragged should be moved. * @param currentIndex Index of the item currently being dragged. * @param siblings All of the items in the list. * @param delta Direction in which the user is moving. */ private _getSiblingOffsetPx(currentIndex: number, siblings: CachedItemPosition[], delta: 1 | -1) { const isHorizontal = this._orientation === 'horizontal'; const currentPosition = siblings[currentIndex].clientRect; const immediateSibling = siblings[currentIndex + delta * -1]; let siblingOffset = currentPosition[isHorizontal ? 'width' : 'height'] * delta; if (immediateSibling) { const start = isHorizontal ? 'left' : 'top'; const end = isHorizontal ? 'right' : 'bottom'; // Get the spacing between the start of the current item and the end of the one immediately // after it in the direction in which the user is dragging, or vice versa. We add it to the // offset in order to push the element to where it will be when it's inline and is influenced // by the `margin` of its siblings. if (delta === -1) { siblingOffset -= immediateSibling.clientRect[start] - currentPosition[end]; } else { siblingOffset += currentPosition[start] - immediateSibling.clientRect[end]; } } return siblingOffset; } /** * Gets the offset in pixels by which the item that is being dragged should be moved. * @param currentPosition Current position of the item. * @param newPosition Position of the item where the current item should be moved. * @param delta Direction in which the user is moving. */ private _getItemOffsetPx(currentPosition: ClientRect, newPosition: ClientRect, delta: 1 | -1) { const isHorizontal = this._orientation === 'horizontal'; let itemOffset = isHorizontal ? newPosition.left - currentPosition.left : newPosition.top - currentPosition.top; // Account for differences in the item width/height. if (delta === -1) { itemOffset += isHorizontal ? newPosition.width - currentPosition.width : newPosition.height - currentPosition.height; } return itemOffset; } /** * Checks if pointer is entering in the first position * @param pointerX Position of the user's pointer along the X axis. * @param pointerY Position of the user's pointer along the Y axis. */ private _shouldEnterAsFirstChild(pointerX: number, pointerY: number) { if (!this._activeDraggables.length) { return false; } const itemPositions = this._itemPositions; const isHorizontal = this._orientation === 'horizontal'; // `itemPositions` are sorted by position while `activeDraggables` are sorted by child index // check if container is using some sort of "reverse" ordering (eg: flex-direction: row-reverse) const reversed = itemPositions[0].drag !== this._activeDraggables[0]; if (reversed) { const lastItemRect = itemPositions[itemPositions.length - 1].clientRect; return isHorizontal ? pointerX >= lastItemRect.right : pointerY >= lastItemRect.bottom; } else { const firstItemRect = itemPositions[0].clientRect; return isHorizontal ? pointerX <= firstItemRect.left : pointerY <= firstItemRect.top; } } /** * Gets the index of an item in the drop container, based on the position of the user's pointer. * @param item Item that is being sorted. * @param pointerX Position of the user's pointer along the X axis. * @param pointerY Position of the user's pointer along the Y axis. * @param delta Direction in which the user is moving their pointer. */ private _getItemIndexFromPointerPosition(item: DragRef, pointerX: number, pointerY: number, delta?: {x: number, y: number}): number { const isHorizontal = this._orientation === 'horizontal'; const index = findIndex(this._itemPositions, ({drag, clientRect}, _, array) => { if (drag === item) { // If there's only one item left in the container, it must be // the dragged item itself so we use it as a reference. return array.length < 2; } if (delta) { const direction = isHorizontal ? delta.x : delta.y; // If the user is still hovering over the same item as last time, their cursor hasn't left // the item after we made the swap, and they didn't change the direction in which they're // dragging, we don't consider it a direction swap. if (drag === this._previousSwap.drag && this._previousSwap.overlaps && direction === this._previousSwap.delta) { return false; } } return isHorizontal ? // Round these down since most browsers report client rects with // sub-pixel precision, whereas the pointer coordinates are rounded to pixels. pointerX >= Math.floor(clientRect.left) && pointerX < Math.floor(clientRect.right) : pointerY >= Math.floor(clientRect.top) && pointerY < Math.floor(clientRect.bottom); }); return (index === -1 || !this.sortPredicate(index, item, this)) ? -1 : index; } /** Caches the current items in the list and their positions. */ private _cacheItems(): void { this._activeDraggables = this._draggables.slice(); this._cacheItemPositions(); this._cacheParentPositions(); } /** Starts the interval that'll auto-scroll the element. */ private _startScrollInterval = () => { this._stopScrolling(); interval(0, animationFrameScheduler) .pipe(takeUntil(this._stopScrollTimers)) .subscribe(() => { const node = this._scrollNode; const scrollStep = this.autoScrollStep; if (this._verticalScrollDirection === AutoScrollVerticalDirection.UP) { incrementVerticalScroll(node, -scrollStep); } else if (this._verticalScrollDirection === AutoScrollVerticalDirection.DOWN) { incrementVerticalScroll(node, scrollStep); } if (this._horizontalScrollDirection === AutoScrollHorizontalDirection.LEFT) { incrementHorizontalScroll(node, -scrollStep); } else if (this._horizontalScrollDirection === AutoScrollHorizontalDirection.RIGHT) { incrementHorizontalScroll(node, scrollStep); } }); } /** * Checks whether the user's pointer is positioned over the container. * @param x Pointer position along the X axis. * @param y Pointer position along the Y axis. */ _isOverContainer(x: number, y: number): boolean { return this._clientRect != null && isInsideClientRect(this._clientRect, x, y); } /** * Figures out whether an item should be moved into a sibling * drop container, based on its current position. * @param item Drag item that is being moved. * @param x Position of the item along the X axis. * @param y Position of the item along the Y axis. */ _getSiblingContainerFromPosition(item: DragRef, x: number, y: number): DropListRef | undefined { return this._siblings.find(sibling => sibling._canReceive(item, x, y)); } /** * Checks whether the drop list can receive the passed-in item. * @param item Item that is being dragged into the list. * @param x Position of the item along the X axis. * @param y Position of the item along the Y axis. */ _canReceive(item: DragRef, x: number, y: number): boolean { if (!this._clientRect || !isInsideClientRect(this._clientRect, x, y) || !this.enterPredicate(item, this)) { return false; } const elementFromPoint = this._getShadowRoot().elementFromPoint(x, y) as HTMLElement | null; // If there's no element at the pointer position, then // the client rect is probably scrolled out of the view. if (!elementFromPoint) { return false; } const nativeElement = coerceElement(this.element); // The `ClientRect`, that we're using to find the container over which the user is // hovering, doesn't give us any information on whether the element has been scrolled // out of the view or whether it's overlapping with other containers. This means that // we could end up transferring the item into a container that's invisible or is positioned // below another one. We use the result from `elementFromPoint` to get the top-most element // at the pointer position and to find whether it's one of the intersecting drop containers. return elementFromPoint === nativeElement || nativeElement.contains(elementFromPoint); } /** * Called by one of the connected drop lists when a dragging sequence has started. * @param sibling Sibling in which dragging has started. */ _startReceiving(sibling: DropListRef, items: DragRef[]) { const activeSiblings = this._activeSiblings; if (!activeSiblings.has(sibling) && items.every(item => { // Note that we have to add an exception to the `enterPredicate` for items that started off // in this drop list. The drag ref has logic that allows an item to return to its initial // container, if it has left the initial container and none of the connected containers // allow it to enter. See `DragRef._updateActiveDropContainer` for more context. return this.enterPredicate(item, this) || this._draggables.indexOf(item) > -1; })) { activeSiblings.add(sibling); this._cacheParentPositions(); this._listenToScrollEvents(); } } /** * Called by a connected drop list when dragging has stopped. * @param sibling Sibling whose dragging has stopped. */ _stopReceiving(sibling: DropListRef) { this._activeSiblings.delete(sibling); this._viewportScrollSubscription.unsubscribe(); } /** * Starts listening to scroll events on the viewport. * Used for updating the internal state of the list. */ private _listenToScrollEvents() { this._viewportScrollSubscription = this._dragDropRegistry.scroll.subscribe(event => { if (this.isDragging()) { const scrollDifference = this._parentPositions.handleScroll(event); if (scrollDifference) { // Since we know the amount that the user has scrolled we can shift all of the // client rectangles ourselves. This is cheaper than re-measuring everything and // we can avoid inconsistent behavior where we might be measuring the element before // its position has changed. this._itemPositions.forEach(({clientRect}) => { adjustClientRect(clientRect, scrollDifference.top, scrollDifference.left); }); // We need two loops for this, because we want all of the cached // positions to be up-to-date before we re-sort the item. this._itemPositions.forEach(({drag}) => { if (this._dragDropRegistry.isDragging(drag)) { // We need to re-sort the item manually, because the pointer move // events won't be dispatched while the user is scrolling. drag._sortFromLastPointerPosition(); } }); } } else if (this.isReceiving()) { this._cacheParentPositions(); } }); } /** * Lazily resolves and returns the shadow root of the element. We do this in a function, rather * than saving it in property directly on init, because we want to resolve it as late as possible * in order to ensure that the element has been moved into the shadow DOM. Doing it inside the * constructor might be too early if the element is inside of something like `ngFor` or `ngIf`. */ private _getShadowRoot(): DocumentOrShadowRoot { if (!this._cachedShadowRoot) { const shadowRoot = _getShadowRoot(coerceElement(this.element)); this._cachedShadowRoot = shadowRoot || this._document; } return this._cachedShadowRoot; } /** Notifies any siblings that may potentially receive the item. */ private _notifyReceivingSiblings() { const draggedItems = this._activeDraggables.filter(item => item.isDragging()); this._siblings.forEach(sibling => sibling._startReceiving(this, draggedItems)); } } /** * Finds the index of an item that matches a predicate function. Used as an equivalent * of `Array.prototype.findIndex` which isn't part of the standard Google typings. * @param array Array in which to look for matches. * @param predicate Function used to determine whether an item is a match. */ function findIndex<T>(array: T[], predicate: (value: T, index: number, obj: T[]) => boolean): number { for (let i = 0; i < array.length; i++) { if (predicate(array[i], i, array)) { return i; } } return -1; } /** * Increments the vertical scroll position of a node. * @param node Node whose scroll position should change. * @param amount Amount of pixels that the `node` should be scrolled. */ function incrementVerticalScroll(node: HTMLElement | Window, amount: number) { if (node === window) { (node as Window).scrollBy(0, amount); } else { // Ideally we could use `Element.scrollBy` here as well, but IE and Edge don't support it. (node as HTMLElement).scrollTop += amount; } } /** * Increments the horizontal scroll position of a node. * @param node Node whose scroll position should change. * @param amount Amount of pixels that the `node` should be scrolled. */ function incrementHorizontalScroll(node: HTMLElement | Window, amount: number) { if (node === window) { (node as Window).scrollBy(amount, 0); } else { // Ideally we could use `Element.scrollBy` here as well, but IE and Edge don't support it. (node as HTMLElement).scrollLeft += amount; } } /** * Gets whether the vertical auto-scroll direction of a node. * @param clientRect Dimensions of the node. * @param pointerY Position of the user's pointer along the y axis. */ function getVerticalScrollDirection(clientRect: ClientRect, pointerY: number) { const {top, bottom, height} = clientRect; const yThreshold = height * SCROLL_PROXIMITY_THRESHOLD; if (pointerY >= top - yThreshold && pointerY <= top + yThreshold) { return AutoScrollVerticalDirection.UP; } else if (pointerY >= bottom - yThreshold && pointerY <= bottom + yThreshold) { return AutoScrollVerticalDirection.DOWN; } return AutoScrollVerticalDirection.NONE; } /** * Gets whether the horizontal auto-scroll direction of a node. * @param clientRect Dimensions of the node. * @param pointerX Position of the user's pointer along the x axis. */ function getHorizontalScrollDirection(clientRect: ClientRect, pointerX: number) { const {left, right, width} = clientRect; const xThreshold = width * SCROLL_PROXIMITY_THRESHOLD; if (pointerX >= left - xThreshold && pointerX <= left + xThreshold) { return AutoScrollHorizontalDirection.LEFT; } else if (pointerX >= right - xThreshold && pointerX <= right + xThreshold) { return AutoScrollHorizontalDirection.RIGHT; } return AutoScrollHorizontalDirection.NONE; } /** * Gets the directions in which an element node should be scrolled, * assuming that the user's pointer is already within it scrollable region. * @param element Element for which we should calculate the scroll direction. * @param clientRect Bounding client rectangle of the element. * @param pointerX Position of the user's pointer along the x axis. * @param pointerY Position of the user's pointer along the y axis. */ function getElementScrollDirections(element: HTMLElement, clientRect: ClientRect, pointerX: number, pointerY: number): [AutoScrollVerticalDirection, AutoScrollHorizontalDirection] { const computedVertical = getVerticalScrollDirection(clientRect, pointerY); const computedHorizontal = getHorizontalScrollDirection(clientRect, pointerX); let verticalScrollDirection = AutoScrollVerticalDirection.NONE; let horizontalScrollDirection = AutoScrollHorizontalDirection.NONE; // Note that we here we do some extra checks for whether the element is actually scrollable in // a certain direction and we only assign the scroll direction if it is. We do this so that we // can allow other elements to be scrolled, if the current element can't be scrolled anymore. // This allows us to handle cases where the scroll regions of two scrollable elements overlap. if (computedVertical) { const scrollTop = element.scrollTop; if (computedVertical === AutoScrollVerticalDirection.UP) { if (scrollTop > 0) { verticalScrollDirection = AutoScrollVerticalDirection.UP; } } else if (element.scrollHeight - scrollTop > element.clientHeight) { verticalScrollDirection = AutoScrollVerticalDirection.DOWN; } } if (computedHorizontal) { const scrollLeft = element.scrollLeft; if (computedHorizontal === AutoScrollHorizontalDirection.LEFT) { if (scrollLeft > 0) { horizontalScrollDirection = AutoScrollHorizontalDirection.LEFT; } } else if (element.scrollWidth - scrollLeft > element.clientWidth) { horizontalScrollDirection = AutoScrollHorizontalDirection.RIGHT; } } return [verticalScrollDirection, horizontalScrollDirection]; }<|fim▁end|>
currentIndex: number, container: DropListRef, item: DragRef
<|file_name|>default.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- """ pygments.styles.default ~~~~~~~~~~~~~~~~~~~~~~~ The default highlighting style. :copyright: 2007 by Tiberius Teng. :copyright: 2006 by Georg Brandl. :license: BSD, see LICENSE for more details. """ from pygments.style import Style from pygments.token import Keyword, Name, Comment, String, Error, \ Number, Operator, Generic, Whitespace class DefaultStyle(Style): """ The default style (inspired by Emacs 22). """ background_color = "#f8f8f8" default_style = "" styles = { Whitespace: "#bbbbbb", Comment: "italic #408080", Comment.Preproc: "noitalic #BC7A00", #Keyword: "bold #AA22FF", Keyword: "bold #008000", Keyword.Pseudo: "nobold", Keyword.Type: "nobold #B00040", Operator: "#666666", Operator.Word: "bold #AA22FF", <|fim▁hole|> Name.Exception: "bold #D2413A", Name.Variable: "#19177C", Name.Constant: "#880000", Name.Label: "#A0A000", Name.Entity: "bold #999999", Name.Attribute: "#7D9029", Name.Tag: "bold #008000", Name.Decorator: "#AA22FF", String: "#BA2121", String.Doc: "italic", String.Interpol: "bold #BB6688", String.Escape: "bold #BB6622", String.Regex: "#BB6688", #String.Symbol: "#B8860B", String.Symbol: "#19177C", String.Other: "#008000", Number: "#666666", Generic.Heading: "bold #000080", Generic.Subheading: "bold #800080", Generic.Deleted: "#A00000", Generic.Inserted: "#00A000", Generic.Error: "#FF0000", Generic.Emph: "italic", Generic.Strong: "bold", Generic.Prompt: "bold #000080", Generic.Output: "#888", Generic.Traceback: "#04D", Error: "border:#FF0000" }<|fim▁end|>
Name.Builtin: "#008000", Name.Function: "#0000FF", Name.Class: "bold #0000FF", Name.Namespace: "bold #0000FF",
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|># coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # -------------------------------------------------------------------------- from ._logs_query_client_async import LogsQueryClient from ._metrics_query_client_async import MetricsQueryClient __all__ = ["LogsQueryClient", "MetricsQueryClient"]<|fim▁end|>
<|file_name|>newlines.py<|end_file_name|><|fim▁begin|>from SCons.Script import * def exists(env): return (env["PLATFORM"]=="win32") def ConvertNewlines(target,source,env): for t,s in zip(target,source): f_in=open(str(s),"rb") f_out=open(str(t),"wb") f_out.write(f_in.read().replace("\n","\r\n")) f_out.close() f_in.close() return None def ConvertNewlinesB(target,source,env): for t,s in zip(target,source): f_in=open(str(s),"rb") f_out=open(str(t),"wb") f_out.write("\xef\xbb\xbf") f_out.write(f_in.read().replace("\n","\r\n")) f_out.close() f_in.close() return None def generate(env): env["BUILDERS"]["ConvertNewlines"]=Builder(action=ConvertNewlines,suffix=".txt")<|fim▁hole|><|fim▁end|>
env["BUILDERS"]["ConvertNewlinesB"]=Builder(action=ConvertNewlinesB,suffix=".txt")
<|file_name|>HttpRequest.java<|end_file_name|><|fim▁begin|>package jenkins.plugins.http_request; import static com.google.common.base.Preconditions.checkArgument; import java.io.IOException; import java.io.PrintStream; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; import javax.annotation.Nonnull; import org.kohsuke.stapler.AncestorInPath; import org.kohsuke.stapler.DataBoundConstructor; import org.kohsuke.stapler.DataBoundSetter; import org.kohsuke.stapler.QueryParameter; import com.cloudbees.plugins.credentials.common.AbstractIdCredentialsListBoxModel; import com.cloudbees.plugins.credentials.common.StandardCredentials; import com.cloudbees.plugins.credentials.common.StandardListBoxModel; import com.cloudbees.plugins.credentials.common.StandardUsernamePasswordCredentials; import com.cloudbees.plugins.credentials.domains.URIRequirementBuilder; import com.google.common.base.Strings; import com.google.common.collect.Range; import com.google.common.collect.Ranges; import hudson.EnvVars; import hudson.Extension; import hudson.FilePath; import hudson.Launcher; import hudson.init.InitMilestone; import hudson.init.Initializer; import hudson.model.AbstractBuild; import hudson.model.AbstractProject; import hudson.model.BuildListener; import hudson.model.Item; import hudson.model.Items; import hudson.model.TaskListener; import hudson.security.ACL; import hudson.tasks.BuildStepDescriptor; import hudson.tasks.Builder; import hudson.util.FormValidation; import hudson.util.ListBoxModel; import hudson.util.ListBoxModel.Option; import jenkins.plugins.http_request.auth.BasicDigestAuthentication; import jenkins.plugins.http_request.auth.FormAuthentication; import jenkins.plugins.http_request.util.HttpClientUtil; import jenkins.plugins.http_request.util.HttpRequestNameValuePair; /** * @author Janario Oliveira */ public class HttpRequest extends Builder { private @Nonnull String url; private Boolean ignoreSslErrors = DescriptorImpl.ignoreSslErrors; private HttpMode httpMode = DescriptorImpl.httpMode; private String httpProxy = DescriptorImpl.httpProxy; private Boolean passBuildParameters = DescriptorImpl.passBuildParameters; private String validResponseCodes = DescriptorImpl.validResponseCodes; private String validResponseContent = DescriptorImpl.validResponseContent; private MimeType acceptType = DescriptorImpl.acceptType; private MimeType contentType = DescriptorImpl.contentType; private String outputFile = DescriptorImpl.outputFile; private Integer timeout = DescriptorImpl.timeout; private Boolean consoleLogResponseBody = DescriptorImpl.consoleLogResponseBody; private Boolean quiet = DescriptorImpl.quiet; private String authentication = DescriptorImpl.authentication; private String requestBody = DescriptorImpl.requestBody; private List<HttpRequestNameValuePair> customHeaders = DescriptorImpl.customHeaders; @DataBoundConstructor public HttpRequest(@Nonnull String url) { this.url = url; } @Nonnull public String getUrl() { return url; } public Boolean getIgnoreSslErrors() { return ignoreSslErrors; } @DataBoundSetter public void setIgnoreSslErrors(Boolean ignoreSslErrors) { this.ignoreSslErrors = ignoreSslErrors; } public HttpMode getHttpMode() { return httpMode; } @DataBoundSetter public void setHttpMode(HttpMode httpMode) { this.httpMode = httpMode; } public String getHttpProxy() { return httpProxy; } @DataBoundSetter public void setHttpProxy(String httpProxy) { this.httpProxy = httpProxy; } public Boolean getPassBuildParameters() { return passBuildParameters; } @DataBoundSetter public void setPassBuildParameters(Boolean passBuildParameters) { this.passBuildParameters = passBuildParameters; } @Nonnull public String getValidResponseCodes() { return validResponseCodes; } @DataBoundSetter public void setValidResponseCodes(String validResponseCodes) { this.validResponseCodes = validResponseCodes; } public String getValidResponseContent() { return validResponseContent; } @DataBoundSetter public void setValidResponseContent(String validResponseContent) { this.validResponseContent = validResponseContent; } public MimeType getAcceptType() { return acceptType; } @DataBoundSetter public void setAcceptType(MimeType acceptType) { this.acceptType = acceptType; } public MimeType getContentType() { return contentType; } @DataBoundSetter public void setContentType(MimeType contentType) { this.contentType = contentType; } public String getOutputFile() { return outputFile; } @DataBoundSetter public void setOutputFile(String outputFile) { this.outputFile = outputFile; } public Integer getTimeout() { return timeout; } @DataBoundSetter public void setTimeout(Integer timeout) { this.timeout = timeout; } public Boolean getConsoleLogResponseBody() { return consoleLogResponseBody; } @DataBoundSetter public void setConsoleLogResponseBody(Boolean consoleLogResponseBody) { this.consoleLogResponseBody = consoleLogResponseBody; } public Boolean getQuiet() { return quiet; } @DataBoundSetter public void setQuiet(Boolean quiet) { this.quiet = quiet; } public String getAuthentication() { return authentication; } @DataBoundSetter public void setAuthentication(String authentication) { this.authentication = authentication; } public String getRequestBody() { return requestBody; } @DataBoundSetter public void setRequestBody(String requestBody) { this.requestBody = requestBody; } public List<HttpRequestNameValuePair> getCustomHeaders() { return customHeaders; } @DataBoundSetter public void setCustomHeaders(List<HttpRequestNameValuePair> customHeaders) { this.customHeaders = customHeaders; } @Initializer(before = InitMilestone.PLUGINS_STARTED) public static void xStreamCompatibility() { Items.XSTREAM2.aliasField("logResponseBody", HttpRequest.class, "consoleLogResponseBody"); Items.XSTREAM2.aliasField("consoleLogResponseBody", HttpRequest.class, "consoleLogResponseBody"); Items.XSTREAM2.alias("pair", HttpRequestNameValuePair.class); } protected Object readResolve() { if (customHeaders == null) { customHeaders = DescriptorImpl.customHeaders; } if (validResponseCodes == null || validResponseCodes.trim().isEmpty()) { validResponseCodes = DescriptorImpl.validResponseCodes; } if (ignoreSslErrors == null) { //default for new job false(DescriptorImpl.ignoreSslErrors) for old ones true to keep same behavior ignoreSslErrors = true; } if (quiet == null) { quiet = false; } return this; } private List<HttpRequestNameValuePair> createParams(EnvVars envVars, AbstractBuild<?, ?> build, TaskListener listener) throws IOException { Map<String, String> buildVariables = build.getBuildVariables(); if (buildVariables.isEmpty()) { return Collections.emptyList(); } PrintStream logger = listener.getLogger(); logger.println("Parameters: "); List<HttpRequestNameValuePair> l = new ArrayList<>(); for (Map.Entry<String, String> entry : buildVariables.entrySet()) { String value = envVars.expand(entry.getValue()); logger.println(" " + entry.getKey() + " = " + value); l.add(new HttpRequestNameValuePair(entry.getKey(), value)); } return l; } String resolveUrl(EnvVars envVars, AbstractBuild<?, ?> build, TaskListener listener) throws IOException { String url = envVars.expand(getUrl()); if (Boolean.TRUE.equals(getPassBuildParameters()) && getHttpMode() == HttpMode.GET) { List<HttpRequestNameValuePair> params = createParams(envVars, build, listener); if (!params.isEmpty()) { url = HttpClientUtil.appendParamsToUrl(url, params); } } return url; } List<HttpRequestNameValuePair> resolveHeaders(EnvVars envVars) { final List<HttpRequestNameValuePair> headers = new ArrayList<>(); if (contentType != null && contentType != MimeType.NOT_SET) { headers.add(new HttpRequestNameValuePair("Content-type", contentType.getContentType().toString())); } if (acceptType != null && acceptType != MimeType.NOT_SET) { headers.add(new HttpRequestNameValuePair("Accept", acceptType.getValue())); } for (HttpRequestNameValuePair header : customHeaders) { String headerName = envVars.expand(header.getName()); String headerValue = envVars.expand(header.getValue()); boolean maskValue = headerName.equalsIgnoreCase("Authorization") || header.getMaskValue(); headers.add(new HttpRequestNameValuePair(headerName, headerValue, maskValue)); } return headers; } String resolveBody(EnvVars envVars, AbstractBuild<?, ?> build, TaskListener listener) throws IOException { String body = envVars.expand(getRequestBody()); if (Strings.isNullOrEmpty(body) && Boolean.TRUE.equals(getPassBuildParameters())) { List<HttpRequestNameValuePair> params = createParams(envVars, build, listener); if (!params.isEmpty()) { body = HttpClientUtil.paramsToString(params); } } return body; } FilePath resolveOutputFile(EnvVars envVars, AbstractBuild<?,?> build) { if (outputFile == null || outputFile.trim().isEmpty()) { return null; } String filePath = envVars.expand(outputFile); FilePath workspace = build.getWorkspace(); if (workspace == null) { throw new IllegalStateException("Could not find workspace to save file outputFile: " + outputFile); } return workspace.child(filePath); } @Override public boolean perform(AbstractBuild<?,?> build, Launcher launcher, BuildListener listener) throws InterruptedException, IOException { EnvVars envVars = build.getEnvironment(listener); for (Map.Entry<String, String> e : build.getBuildVariables().entrySet()) { envVars.put(e.getKey(), e.getValue()); } HttpRequestExecution exec = HttpRequestExecution.from(this, envVars, build, this.getQuiet() ? TaskListener.NULL : listener); launcher.getChannel().call(exec); return true; } @Extension public static final class DescriptorImpl extends BuildStepDescriptor<Builder> { public static final boolean ignoreSslErrors = false; public static final HttpMode httpMode = HttpMode.GET; public static final String httpProxy = ""; public static final Boolean passBuildParameters = false; public static final String validResponseCodes = "100:399"; public static final String validResponseContent = ""; public static final MimeType acceptType = MimeType.NOT_SET; public static final MimeType contentType = MimeType.NOT_SET; public static final String outputFile = ""; public static final int timeout = 0; public static final Boolean consoleLogResponseBody = false; public static final Boolean quiet = false; public static final String authentication = ""; public static final String requestBody = ""; public static final List <HttpRequestNameValuePair> customHeaders = Collections.<HttpRequestNameValuePair>emptyList(); public DescriptorImpl() { load(); } @SuppressWarnings("rawtypes") @Override public boolean isApplicable(Class<? extends AbstractProject> aClass) { return true; } @Override public String getDisplayName() { return "HTTP Request"; } public ListBoxModel doFillHttpModeItems() { return HttpMode.getFillItems(); } public ListBoxModel doFillAcceptTypeItems() { return MimeType.getContentTypeFillItems(); } public ListBoxModel doFillContentTypeItems() { return MimeType.getContentTypeFillItems(); } public ListBoxModel doFillAuthenticationItems(@AncestorInPath Item project, @QueryParameter String url) { return fillAuthenticationItems(project, url); } public static ListBoxModel fillAuthenticationItems(Item project, String url) { if (project == null || !project.hasPermission(Item.CONFIGURE)) { return new StandardListBoxModel(); } List<Option> options = new ArrayList<>(); for (BasicDigestAuthentication basic : HttpRequestGlobalConfig.get().getBasicDigestAuthentications()) { options.add(new Option("(deprecated - use Jenkins Credentials) " + basic.getKeyName(), basic.getKeyName())); } for (FormAuthentication formAuthentication : HttpRequestGlobalConfig.get().getFormAuthentications()) { options.add(new Option(formAuthentication.getKeyName())); } AbstractIdCredentialsListBoxModel<StandardListBoxModel, StandardCredentials> items = new StandardListBoxModel() .includeEmptyValue() .includeAs(ACL.SYSTEM, project, StandardUsernamePasswordCredentials.class, URIRequirementBuilder.fromUri(url).build()); items.addMissing(options); return items; } public static List<Range<Integer>> parseToRange(String value) { List<Range<Integer>> validRanges = new ArrayList<Range<Integer>>(); String[] codes = value.split(","); for (String code : codes) {<|fim▁hole|> checkArgument(fromTo.length <= 2, "Code %s should be an interval from:to or a single value", code); Integer from; try { from = Integer.parseInt(fromTo[0]); } catch (NumberFormatException nfe) { throw new IllegalArgumentException("Invalid number "+fromTo[0]); } Integer to = from; if (fromTo.length != 1) { try { to = Integer.parseInt(fromTo[1]); } catch (NumberFormatException nfe) { throw new IllegalArgumentException("Invalid number "+fromTo[1]); } } checkArgument(from <= to, "Interval %s should be FROM less than TO", code); validRanges.add(Ranges.closed(from, to)); } return validRanges; } public FormValidation doCheckValidResponseCodes(@QueryParameter String value) { return checkValidResponseCodes(value); } public static FormValidation checkValidResponseCodes(String value) { if (value == null || value.trim().isEmpty()) { return FormValidation.ok(); } try { parseToRange(value); } catch (IllegalArgumentException iae) { return FormValidation.error("Response codes expected is wrong. "+iae.getMessage()); } return FormValidation.ok(); } } }<|fim▁end|>
String[] fromTo = code.trim().split(":");
<|file_name|>_pgsql.py<|end_file_name|><|fim▁begin|># The Admin4 Project # (c) 2013-2014 Andreas Pflug # # Licensed under the Apache License, # see LICENSE.TXT for conditions of usage # http://initd.org/psycopg/docs/ import psycopg2 import select import logger import adm import re import threading from wh import xlt, modPath from Crypto.PublicKey._slowmath import rsa_construct sqlKeywords=[] moreKeywords=['serial', 'bigserial'] colKeywords=[] def getSqlKeywords(): global colKeywords global sqlKeywords if not sqlKeywords: f=open(modPath("kwlist.h", __name__)) lines=f.read() f.close() for line in lines.splitlines(): if line.startswith("PG_KEYWORD("): tokens=line.split(',') keyword=tokens[0][12:-1].lower() # RESERVED, UNRESERVED, TYPE_FUNC_NAME, COL_NAME if tokens[2].lstrip().startswith('COL_NAME'): colKeywords.append(keyword) else: sqlKeywords.append(keyword) colKeywords.extend(moreKeywords) return sqlKeywords identMatchPattern=re.compile("^[a-z][a-z0-9_]+$") def quoteIdent(ident): if identMatchPattern.match(ident) and ident not in getSqlKeywords(): return ident return '"%s"' % ident.replace('"', '""') def quoteValue(val, conn=None): if isinstance(val, unicode): # psycopg2 quoting has some problems with unicode return "'%s'" % val.replace("'", "''").replace("\\", "\\\\") adapter=psycopg2.extensions.adapt(val) if conn and hasattr(adapter, 'prepare'): if isinstance(conn, pgConnection): conn=conn.conn elif isinstance(conn, pgCursor): conn=conn.conn.conn adapter.prepare(conn) return adapter.getquoted() class SqlException(adm.ServerException): def __init__(self, sql, error): logger.querylog(sql, error=error) self.error=error self.sql=sql Exception.__init__(self, sql, error) def __str__(self): return self.error ###################################################################### class pgType: def __init__(self, row): self.oid=row['oid'] self.name=row['typname'] self.namespace=row['nspname'] self.category=row['typcategory'] def IsNumeric(self): return self.category == 'N' class pgTypeCache: def __init__(self, rowset): self.cache={} self.Add(rowset) def Add(self, rowset): if not isinstance(rowset, pgRowset): rowset=[rowset] typ=None for row in rowset: typ=pgType(row) self.cache[typ.oid] = typ return typ def Get(self, oid): return self.cache.get(oid) ###################################################################### class pgCursorResult: def __init__(self, cursor, colNames=None): self.cursor=cursor if colNames: self.colNames=colNames else: self.colNames=[] for d in cursor.GetDescription(): self.colNames.append(d.name) class pgRow(pgCursorResult): def __init__(self, cursor, row, colNames=None): pgCursorResult.__init__(self, cursor, colNames) self.row=row def getTuple(self): return tuple(self.getList()) def getList(self): l=[] for i in range(len(self.colNames)): l.append(self.getItem(i)) return l def getDict(self): d={} for i in range(len(self.colNames)): item=self.getItem(i) # aggregate functions deliver [None] with empty left joins; we want [] if isinstance(item, list) and len(item) == 1 and item[0] == None: item=[] d[self.colNames[i]] = item return d def __str__(self): cols=[] for i in range(len(self.colNames)): val=unicode(self.getItem(i)) cols.append("%s=%s" % (self.colNames[i], val)) return "( %s )" % ",".join(cols) def hasAttr(self, colName): try: self.colNames.index(colName) return True except: return False def getItem(self, i): val=self.row[i] if isinstance(val, str): return val.decode('utf8') return val def __getitem__(self, colName): try: if isinstance(colName, (str, unicode)): i=self.colNames.index(colName) else: i=colName return self.getItem(i) except Exception as _e: logger.debug("Column %s not found" % colName) return None class pgRowset(pgCursorResult): def __init__(self, cursor): pgCursorResult.__init__(self, cursor) self.__fetchone() def GetRowcount(self): return self.cursor.GetRowcount() def __fetchone(self): if self.cursor.GetRowcount() > 0: row = self.cursor.FetchOne() else: row=None if row: self.curRow = pgRow(self.cursor, row, self.colNames) else: self.curRow=None def HasMore(self): return self.curRow != None def Next(self): row=self.curRow if row: self.__fetchone() return row def getDict(self): d={} for row in self: d[row[0]] = row.getDict() return d def getDictList(self): d=[] for row in self: d.append(row.getDict()) return d def getList(self): d=[] for row in self: d.append(row[0]) return d def __iter__(self): class RowsetIterator: def __init__(self, outer): self.outer=outer def __iter__(self): return self def next(self): row=self.outer.Next() if row: return row else: raise StopIteration() return RowsetIterator(self) ###################################################################### class pgConnection: def __init__(self, dsn, pool=None): self.pool=pool self.conn=None self.cursor=None self.inUse=False self.lastError=None self.trapSqlException=True self.conn=psycopg2.connect(dsn, async=True) self.wait("Connect") self.cursor=self.conn.cursor() def disconnect(self): self.cursor=None if self.conn: self.conn.close() self.conn=None if self.pool: self.pool.RemoveConnection(self) def wait(self, spot=""): if self.conn.async: while self.conn.isexecuting(): try: state = self.conn.poll() except Exception as e: self._handleException(e) return False if state == psycopg2.extensions.POLL_OK: return True elif state == psycopg2.extensions.POLL_WRITE: select.select([], [self.conn.fileno()], []) elif state == psycopg2.extensions.POLL_READ: select.select([self.conn.fileno()], [], []) else: raise adm.ConnectionException(self.node, xlt("WAIT %s" % spot), self.lastError) return False def _handleException(self, exception): if self.cursor and self.cursor.query: cmd=self.cursor.query else: cmd=None exception.message=errlines=exception.message.decode('utf8') logger.querylog(cmd, error=errlines) if self.trapSqlException: self.lastError=errlines if self.pool: self.pool.lastError=errlines adm.StopWaiting(adm.mainframe) if self.conn and self.conn.closed: self.disconnect() if self.trapSqlException: raise SqlException(cmd, errlines) else: raise exception def isRunning(self): return self.conn.poll() != psycopg2.extensions.POLL_OK def GetCursor(self): return pgCursor(self) ###################################################################### class pgCursor(): def __init__(self, conn): conn.trapSqlException=True self.conn=conn self.cursor=self.conn.cursor def __del__(self): self.Close() def SetThrowSqlException(self, how): """ SetThrowSqlException(bool) If set to false, will throw psycopg exception instead of SqlException. Use this to catch expected exception without GUI display """ self.conn.trapSqlException=how def Close(self): if self.conn: # logger.trace(2, 4, "RELEASING %s", str(self.conn)) self.conn.inUse=False self.conn=None self.cursor=None def GetPid(self): return self.conn.conn.get_backend_pid() def Quote(self, val): return quoteValue(val, self) def GetDescription(self): if self.cursor.description: return self.cursor.description return [] def GetRowcount(self): return self.cursor.rowcount def FetchOne(self): row=self.cursor.fetchone() return row # def Rollback(self): # self.cursor.execute("ROLLBACK") # self.cursor.wait("ROLLBACK") # # def Commit(self): # self.cursor.execute("COMMIT") # self.cursor.wait("COMMIT") def execute(self, cmd, args=None): if args: if isinstance(args, list): args=tuple(args) elif isinstance(args, tuple): pass else: args=(args,) try: self.cursor.execute(cmd, args) except Exception as e: print "EXcept", e, unicode(e) self.conn._handleException(e) def wait(self, spot=""): return self.conn.wait(spot) def ExecuteSet(self, cmd, args=None): frame=adm.StartWaiting() try: self.execute(cmd, args) self.wait("ExecuteSet") rowset=pgRowset(self) logger.querylog(self.cursor.query, result="%d rows" % rowset.GetRowcount()) adm.StopWaiting(frame) return rowset except Exception as e: adm.StopWaiting(frame, e.error) raise e def ExecuteList(self, cmd, args=None): rowset=self.ExecuteSet(cmd, args) if rowset: return rowset.getList() return None<|fim▁hole|> def ExecuteDictList(self, cmd, args=None): rowset=self.ExecuteSet(cmd, args) if rowset: return rowset.getDictList() return None def ExecuteRow(self, cmd, args=None): frame=adm.StartWaiting() try: self.execute(cmd, args) self.wait("ExecuteRow") row=self.cursor.fetchone() adm.StopWaiting(frame) except Exception as e: adm.StopWaiting(frame, e) raise e if row: row=pgRow(self, row) logger.querylog(self.cursor.query, result=unicode(row)) return row return None def Execute(self, cmd, args=None, spot=None): frame=adm.StartWaiting() try: self.execute(cmd, args) self.wait("Execute") adm.StopWaiting(frame) except Exception as e: adm.StopWaiting(frame, e) raise e rc=self.GetRowcount() if spot: spot += " " else: spot="" logger.querylog(self.cursor.query, result=spot+ xlt("%d rows") % rc) return rc def ExecuteSingle(self, cmd, args=None): frame=adm.StartWaiting() try: self.execute(cmd, args) self.wait("ExecuteSingle") try: row=self.cursor.fetchone() except Exception as _e: #print e row=None adm.StopWaiting(frame) except Exception as e: adm.StopWaiting(frame, e) raise e if row: result=row[0] logger.querylog(self.cursor.query, result="%s" % result) return result else: logger.querylog(self.cursor.query, result=xlt("no result")) return None def Insert(self, cmd, returning=None): if returning: cmd += "\nRETURNING %s" % returning rowset=self.ExecuteSet(cmd) if not self.GetRowcount(): return None result=[] for row in rowset: line=row.getTuple() if len(line) > 1: result.append(line) else: result.append(line[0]) if len(result) > 1: return result else: return result[0] else: self.ExecuteSingle(cmd) return self.cursor.lastrowid def ExecuteDict(self, cmd, args=None): set=self.ExecuteSet(cmd, args) d={} for row in set: d[row[0]] = row[1] return d def ExecuteAsync(self, cmd, args=None): worker=QueryWorker(self, cmd, args) return worker ############################################################################# class pgConnectionPool: def __init__(self, node, dsn): self.node=node self.lastError=None self.connections=[] self.lock=threading.Lock() self.dsn=dsn # create first connection to make sure params are ok conn=self.CreateConnection() with self.lock: self.connections.append(conn) def __del__(self): self.Disconnect() def ServerVersion(self): if not self.connections: return None v=self.connections[0].conn.server_version return int(v/10000) + ((v%10000)/100)*0.1 def HasFailed(self): return len(self.connections) == 0 def Disconnect(self): for conn in self.connections: conn.disconnect() self.connections=[] def RemoveConnection(self, conn): try: self.connections.remove(conn) except: pass def GetCursor(self): conn=None with self.lock: for c in self.connections: if not c.inUse: conn=c # logger.trace(2, 4, "USING %s", str(c)) c.inUse=True break if not conn: conn=self.CreateConnection() # logger.trace(2, 4, "CREATING %s", str(c)) return conn.GetCursor() def CreateConnection(self): try: conn=pgConnection(self.dsn, self) return conn except Exception as e: self.lastError = unicode(e) raise adm.ConnectionException(self.node, xlt("Connect"), self.lastError) ########################################################## class QueryWorker(threading.Thread): def __init__(self, cursor, cmd, args): threading.Thread.__init__(self) self.cursor=cursor self.cmd=cmd self.args=args self.running=True def __del__(self): self.cancel() self.cursor=None def run(self): self.cancelled=False self.error=None try: self.cursor.execute(self.cmd, self.args) self.cursor.wait("AsyncWorker") except Exception as e: self.error=e self.running=False def cancel(self): if self.running: self.cancelled=True self.running=False self.cursor.conn.conn.cancel() def GetRowcount(self): return self.cursor.GetRowcount() def GetResult(self): rs=None try: rs=pgRowset(self.cursor) except: pass self.cursor=None return rs def IsRunning(self): return self.running def Cancel(self): if self.running: self.cancel() ####################################################################### class pgQuery: def __init__(self, tab=None, cursor=None): self.columns=[] self.vals=[] self.tables=[] self.where=[] self.order=[] self.group=[] self.cursor=cursor if tab: self.tables.append(tab) def quoteIdent(self, identifier): return quoteIdent(identifier) def SetCursor(self, cursor): self.cursor=cursor def AddCol(self, name, quoted=False): if name: if isinstance(name, list): map(lambda x: self.AddCol(x, quoted), name) else: if quoted: name=quoteIdent(name) self.columns.append(name) def AddColVal(self, name, val, quoted=False): if name: if quoted: name=quoteIdent(name) self.columns.append(name) self.vals.append(val) def AddJoin(self, tab): if tab: self.tables.append("JOIN %s" % tab) def AddLeft(self, tab): if tab: self.tables.append("LEFT OUTER JOIN %s" % tab) def AddWhere(self, where, val=None): if where: if val: where="%s=%s" % (quoteIdent(where), quoteValue(val)) self.where.append(where) def AddOrder(self, order, quoted=False): if order: if quoted: order=quoteIdent(order) self.order.append(order) def AddGroup(self, group): if group: self.group.append(group) def groupJoin(self, partList, sep=', ', breakLen=80): result=[] line="" for part in partList: if line: line += "%s%s" % (sep, part) else: line=part if len(line) > breakLen: result.append(line) line="" if line: result.append(line) return ",\n ".join(result) def SelectQueryString(self): sql=["SELECT %s" % self.groupJoin(self.columns), " FROM %s" % "\n ".join(self.tables) ] if self.where: sql.append(" WHERE %s" % "\n AND ".join(self.where)) if self.group: sql.append(" GROUP BY %s" % ", ".join(self.group)) if self.order: sql.append(" ORDER BY %s" % ", ".join(self.order)) return "\n".join(sql) def Select(self): return self.cursor.ExecuteSet(self.SelectQueryString()) def Insert(self, returning=None): if len(self.tables) != 1: raise Exception("pgQuery: INSERT with single table only") sql=["INSERT INTO %s (%s)" % (self.tables[0], ",".join(self.columns))] values=[] for col in range(len(self.columns)): values.append("%s" % quoteValue(self.vals[col], self.cursor)) sql.append(" VALUES (%s)" % self.groupJoin(values)) return self.cursor.Insert("\n".join(sql), returning) def Update(self): if len(self.tables) != 1: raise Exception("pgQuery: UPDATE with single table only") sql=["UPDATE %s" % self.tables[0]] cols=[] for col in range(len(self.columns)): val=quoteValue(self.vals[col], self.cursor) cols.append( "%s=%s" % ( self.columns[col], val )) sql.append(" SET %s" % self.groupJoin(cols)) sql.append(" WHERE %s" % "\n AND ".join(self.where)) return self.cursor.Execute("\n".join(sql), spot="UPDATE") def Delete(self): if len(self.tables) != 1: raise Exception("pgQuery: DELETE with single table only") sql=["DELETE FROM %s" % self.tables[0]] sql.append(" WHERE %s" % "\n AND ".join(self.where)) return self.cursor.Execute("\n".join(sql), spot="DELETE")<|fim▁end|>
<|file_name|>_axis.py<|end_file_name|><|fim▁begin|>"""Axis class and associated.""" # --- import -------------------------------------------------------------------------------------- import re import numexpr import operator import functools import numpy as np from .. import exceptions as wt_exceptions from .. import kit as wt_kit from .. import units as wt_units # --- define -------------------------------------------------------------------------------------- __all__ = ["Axis"] operator_to_identifier = {} operator_to_identifier["/"] = "__d__" operator_to_identifier["="] = "__e__" operator_to_identifier["-"] = "__m__" operator_to_identifier["+"] = "__p__" operator_to_identifier["*"] = "__t__" identifier_to_operator = {value: key for key, value in operator_to_identifier.items()} operators = "".join(operator_to_identifier.keys()) # --- class --------------------------------------------------------------------------------------- class Axis(object): """Axis class.""" def __init__(self, parent, expression, units=None): """Data axis. Parameters ---------- parent : WrightTools.Data Parent data object. expression : string Axis expression. units : string (optional) Axis units. Default is None. """ self.parent = parent self.expression = expression if units is None: self.units = self.variables[0].units else: self.units = units def __getitem__(self, index): vs = {} for variable in self.variables: arr = variable[index] vs[variable.natural_name] = wt_units.converter(arr, variable.units, self.units) return numexpr.evaluate(self.expression.split("=")[0], local_dict=vs) def __repr__(self) -> str: return "<WrightTools.Axis {0} ({1}) at {2}>".format( self.expression, str(self.units), id(self) ) @property def _leaf(self): out = self.expression if self.units is not None: out += " ({0})".format(self.units) out += " {0}".format(self.shape) return out @property def full(self) -> np.ndarray: """Axis expression evaluated and repeated to match the shape of the parent data object.""" arr = self[:] for i in range(arr.ndim): if arr.shape[i] == 1: arr = np.repeat(arr, self.parent.shape[i], axis=i) return arr @property def identity(self) -> str: """Complete identifier written to disk in data.attrs['axes'].""" return self.expression + " {%s}" % self.units @property def label(self) -> str: """A latex formatted label representing axis expression.""" label = self.expression.replace("_", "\\;") if self.units_kind: symbol = wt_units.get_symbol(self.units) if symbol is not None: for v in self.variables: vl = "%s_{%s}" % (symbol, v.label) vl = vl.replace("_{}", "") # label can be empty, no empty subscripts label = label.replace(v.natural_name, vl) label += rf"\,\left({wt_units.ureg.Unit(self.units):~}\right)" label = r"$\mathsf{%s}$" % label return label @property def natural_name(self) -> str: """Valid python identifier representation of the expession.""" name = self.expression.strip() for op in operators: name = name.replace(op, operator_to_identifier[op]) return wt_kit.string2identifier(name) @property def ndim(self) -> int: """Get number of dimensions.""" try: assert self._ndim is not None except (AssertionError, AttributeError): self._ndim = self.variables[0].ndim finally: return self._ndim @property def points(self) -> np.ndarray: """Squeezed array.""" return np.squeeze(self[:]) @property def shape(self) -> tuple: """Shape.""" return wt_kit.joint_shape(*self.variables) @property def size(self) -> int: """Size.""" return functools.reduce(operator.mul, self.shape) @property def units(self): return self._units @units.setter def units(self, value): if value == "None": value = None if value is not None and value not in wt_units.ureg: raise ValueError(f"'{value}' is not in the unit registry") self._units = value<|fim▁hole|> return wt_units.kind(self.units) @property def variables(self) -> list: """Variables.""" try: assert self._variables is not None except (AssertionError, AttributeError): pattern = "|".join(map(re.escape, operators)) keys = re.split(pattern, self.expression) indices = [] for key in keys: if key in self.parent.variable_names: indices.append(self.parent.variable_names.index(key)) self._variables = [self.parent.variables[i] for i in indices] finally: return self._variables @property def masked(self) -> np.ndarray: """Axis expression evaluated, and masked with NaN shared from data channels.""" arr = self[:] arr.shape = self.shape arr = wt_kit.share_nans(arr, *self.parent.channels)[0] return np.nanmean( arr, keepdims=True, axis=tuple(i for i in range(self.ndim) if self.shape[i] == 1) ) def convert(self, destination_units, *, convert_variables=False): """Convert axis to destination_units. Parameters ---------- destination_units : string Destination units. convert_variables : boolean (optional) Toggle conversion of stored arrays. Default is False. """ if self.units is None and (destination_units is None or destination_units == "None"): return if not wt_units.is_valid_conversion(self.units, destination_units): valid = wt_units.get_valid_conversions(self.units) raise wt_exceptions.UnitsError(valid, destination_units) if convert_variables: for v in self.variables: v.convert(destination_units) self.units = destination_units self.parent._on_axes_updated() def max(self): """Axis max.""" return np.nanmax(self[:]) def min(self): """Axis min.""" return np.nanmin(self[:])<|fim▁end|>
@property def units_kind(self) -> str: """Units kind."""
<|file_name|>SearchExpressionTests.cpp<|end_file_name|><|fim▁begin|>#include "catch.hpp" #include "SearchExpression.hpp" using namespace quip; <|fim▁hole|>TEST_CASE("Search expressions can be constructed from an empty expression.", "[SearchExpressionTests]") { SearchExpression expression(""); REQUIRE_FALSE(expression.valid()); } TEST_CASE("Search expressions can be constructed from a simple expression.", "[SearchExpressionTests]") { SearchExpression expression("foo"); REQUIRE(expression.valid()); REQUIRE(expression.expression() == "foo"); } TEST_CASE("Search expressions can be constructed from an expression with a trailing class.", "[SearchExpressionTests]") { SearchExpression expression("[a-z"); REQUIRE_FALSE(expression.valid()); } TEST_CASE("Search expressions can be constructed from an expression with a trailing slash.", "[SearchExpressionTests]") { SearchExpression expression("\\"); REQUIRE_FALSE(expression.valid()); }<|fim▁end|>
<|file_name|>f5bigip_sys_crypto_cert.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python # -*- coding: utf-8 -*- # # Copyright 2016-2018, Eric Jacob <[email protected]> # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ANSIBLE_METADATA = { 'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community' } DOCUMENTATION = ''' --- module: f5bigip_sys_crypto_cert short_description: BIG-IP sys crypto cert module description: - Manage cryptographic certificates on the BIG-IP system. version_added: "2.4" author: - "Eric Jacob (@erjac77)" options: city: description: - Specifies the x509 city field to be used in creation of the certificate associated with the given key. command: description: - Specifies the command to execute. choices: ['install'] common_name: description: - Specifies the x509 common-name to be used in creation of the certificate associated with the given key. consumer: description: - Specifies the system component by which a key and/or associated cryptographic file will be consumed. default: ltm choices: ['enterprise-manager', 'iquery', 'iquery-big3d', 'ltm', 'webserver'] country: description: - Specifies the x509 country to be used in creation of the certificate associated with the given key. email_address: description: - Specifies the x509 email-address to be used in creation of the certificate associated with the given key. from_editor: description: - Specifies that the key should be obtained from a text editor session. from_local_file: description: - Specifies a local file path from which a key is to be copied. from_url: description: - Specifies a URI which is to be used to obtain a key for import into the configuration of the system. key: description: - Specifies a key from which a certificate should be generated when using the create command. required: true lifetime: description: - Specifies the certificate life time to be used in creation of the certificate associated with the given<|fim▁hole|> key. default: 365 organization: description: - Specifies the x509 organization to be used in creation of the certificate associated with the given key. ou: description: - Specifies the x509 organizational unit to be used in creation of the certificate associated with the given key. name: description: - Specifies unique name for the component. required: true no_overwrite: description: - Specifies option of not overwriting a key if it is in the scope. default: true type: bool partition: description: - Displays the administrative partition in which the component object resides. default: Common state: description: - Specifies the state of the component on the BIG-IP system. default: present choices: ['absent', 'present'] state_province: description: - Specifies the x509 state or province of the certificate associated with the given key. subject_alternative_name: description: - Specifies standard X.509 extensions as shown in RFC 2459. requirements: - BIG-IP >= 12.0 - ansible-common-f5 - f5-sdk ''' EXAMPLES = ''' - name: Install SYS Crypto Cert from local file f5bigip_sys_crypto_cert: f5_hostname: 172.16.227.35 f5_username: admin f5_password: admin f5_port: 443 name: exemple.localhost.crt partition: Common from_local_file: /tmp/exemple.localhost.crt state: present delegate_to: localhost - name: Create SYS Crypto Cert f5bigip_sys_crypto_cert: f5_hostname: 172.16.227.35 f5_username: admin f5_password: admin f5_port: 443 name: exemple.localhost.crt partition: Common key: exemple.localhost.key common_name: exemple.localhost city: city state_province: state country: US email_address: 'admin@localhost' organization: My Org ou: My Div state: present delegate_to: localhost ''' RETURN = ''' # ''' from ansible.module_utils.basic import AnsibleModule from ansible_common_f5.base import AnsibleF5Error from ansible_common_f5.base import F5_NAMED_OBJ_ARGS from ansible_common_f5.base import F5_PROVIDER_ARGS from ansible_common_f5.bigip import F5BigIpNamedObject class ModuleParams(object): @property def argument_spec(self): argument_spec = dict( consumer=dict(type='str', choices=['enterprise-manager', 'iquery', 'iquery-big3d', 'ltm', 'webserver']), # create city=dict(type='str'), common_name=dict(type='str'), country=dict(type='str'), email_address=dict(type='str'), key=dict(type='str'), lifetime=dict(type='int'), organization=dict(type='str'), ou=dict(type='str'), state_province=dict(type='str'), subject_alternative_name=dict(type='str'), # install command=dict(type='str', choices=['install']), from_editor=dict(type='str'), from_local_file=dict(type='str'), from_url=dict(type='str'), no_overwrite=dict(type='bool') ) argument_spec.update(F5_PROVIDER_ARGS) argument_spec.update(F5_NAMED_OBJ_ARGS) return argument_spec @property def supports_check_mode(self): return True @property def mutually_exclusive(self): return [ ['from_editor', 'from_local_file', 'from_url'] ] @property def tr(self): # Translation dict for conflictual params return {'state_province': 'state'} class F5BigIpSysCryptoCert(F5BigIpNamedObject): def _set_crud_methods(self): self._methods = { 'create': self._api.tm.sys.crypto.certs.cert.create, 'read': self._api.tm.sys.crypto.certs.cert.load, 'update': self._api.tm.sys.crypto.certs.cert.update, 'delete': self._api.tm.sys.crypto.certs.cert.delete, 'exists': self._api.tm.sys.crypto.certs.cert.exists, 'exec_cmd': self._api.tm.sys.crypto.certs.exec_cmd } def _install(self): """Upload the key on the BIG-IP system.""" name = self._params['name'] param_set = {} if self._params['fromEditor']: param_set = {'from-editor': self._params['fromEditor']} if self._params['fromLocalFile']: param_set = {'from-local-file': self._params['fromLocalFile']} if self._params['fromUrl']: param_set = {'from-url': self._params['fromUrl']} if param_set: param_set.update({'name': name}) if self._params['consumer']: param_set.update({'consumer': self._params['consumer']}) if self._params['noOverwrite']: param_set.update({'no-overwrite': self._params['noOverwrite']}) # Install the key self._methods['exec_cmd']('install', **param_set) else: raise AnsibleF5Error("Missing required parameter 'from-*' to install the cert.") # Make sure it is installed if not self._exists(): raise AnsibleF5Error("Failed to create the object.") return True def _present(self): has_changed = False if self._params['command'] == 'install': if not self._exists() or (self._params['noOverwrite'] is not None and self._params['noOverwrite'] is False): has_changed = self._install() else: if not self._exists(): has_changed = self._create() return has_changed def main(): params = ModuleParams() module = AnsibleModule(argument_spec=params.argument_spec, supports_check_mode=params.supports_check_mode, mutually_exclusive=params.mutually_exclusive) try: obj = F5BigIpSysCryptoCert(check_mode=module.check_mode, tr=params.tr, **module.params) result = obj.flush() module.exit_json(**result) except Exception as exc: module.fail_json(msg=str(exc)) if __name__ == '__main__': main()<|fim▁end|>
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>#![crate_type="staticlib"] #![feature(alloc)] #![feature(allocator)] #![feature(arc_counts)] #![feature(augmented_assignments)] #![feature(asm)] #![feature(box_syntax)] #![feature(collections)] #![feature(const_fn)] #![feature(core_intrinsics)] #![feature(core_str_ext)] #![feature(core_slice_ext)] #![feature(fnbox)] #![feature(fundamental)] #![feature(lang_items)] #![feature(op_assign_traits)] #![feature(unboxed_closures)] #![feature(unsafe_no_drop_flag)] #![feature(unwind_attributes)] #![feature(vec_push_all)] #![feature(zero_one)] #![no_std] #[macro_use] extern crate alloc; #[macro_use] extern crate collections; use acpi::Acpi; use alloc::boxed::Box; use collections::string::{String, ToString}; use collections::vec::Vec; use core::cell::UnsafeCell; use core::{ptr, mem, usize}; use core::slice::SliceExt; use common::event::{self, EVENT_KEY, EventOption}; use common::memory; use common::paging::Page; use common::time::Duration; use drivers::pci; use drivers::pio::*; use drivers::ps2::*; use drivers::rtc::*; use drivers::serial::*; use env::Environment; pub use externs::*; use graphics::display; use programs::executor::execute; use programs::scheme::*; use scheduler::{Context, Regs, TSS}; use scheduler::context::context_switch; use schemes::Url; use schemes::arp::*; use schemes::context::*; use schemes::debug::*; use schemes::ethernet::*; use schemes::icmp::*; use schemes::interrupt::*; use schemes::ip::*; use schemes::memory::*; // use schemes::display::*; use syscall::handle::*; /// Common std-like functionality #[macro_use] pub mod common; /// ACPI pub mod acpi; /// Allocation pub mod alloc_system; /// Audio pub mod audio; /// Disk drivers pub mod disk; /// Various drivers pub mod drivers; /// Environment pub mod env; /// Externs pub mod externs; /// Filesystems pub mod fs; /// Various graphical methods pub mod graphics; /// Network pub mod network; /// Panic pub mod panic; /// Programs pub mod programs; /// Schemes pub mod schemes; /// Scheduling pub mod scheduler; /// Sync primatives pub mod sync; /// System calls pub mod syscall; /// USB input/output pub mod usb; pub static mut TSS_PTR: Option<&'static mut TSS> = None; pub static mut ENV_PTR: Option<&'static mut Environment> = None; pub fn env() -> &'static Environment { unsafe { match ENV_PTR { Some(&mut ref p) => p, None => unreachable!(),<|fim▁hole|> } } /// Pit duration static PIT_DURATION: Duration = Duration { secs: 0, nanos: 2250286, }; /// Idle loop (active while idle) unsafe fn idle_loop() { loop { asm!("cli" : : : : "intel", "volatile"); let mut halt = true; for i in env().contexts.lock().iter().skip(1) { if i.interrupted { halt = false; break; } } if halt { asm!("sti" : : : : "intel", "volatile"); asm!("hlt" : : : : "intel", "volatile"); } else { asm!("sti" : : : : "intel", "volatile"); } context_switch(false); } } /// Event poll loop fn poll_loop() { loop { env().on_poll(); unsafe { context_switch(false) }; } } /// Event loop fn event_loop() { { let mut console = env().console.lock(); console.instant = false; } let mut cmd = String::new(); loop { loop { let mut console = env().console.lock(); match env().events.lock().pop_front() { Some(event) => { if console.draw { match event.to_option() { EventOption::Key(key_event) => { if key_event.pressed { match key_event.scancode { event::K_F2 => { console.draw = false; } event::K_BKSP => if !cmd.is_empty() { console.write(&[8]); cmd.pop(); }, _ => match key_event.character { '\0' => (), '\n' => { console.command = Some(cmd.clone()); cmd.clear(); console.write(&[10]); } _ => { cmd.push(key_event.character); console.write(&[key_event.character as u8]); } }, } } } _ => (), } } else { if event.code == EVENT_KEY && event.b as u8 == event::K_F1 && event.c > 0 { console.draw = true; console.redraw = true; } else { // TODO: Magical orbital hack unsafe { for scheme in env().schemes.iter() { if (*scheme.get()).scheme() == "orbital" { (*scheme.get()).event(&event); break; } } } } } } None => break, } } { let mut console = env().console.lock(); console.instant = false; if console.draw && console.redraw { console.redraw = false; console.display.flip(); } } unsafe { context_switch(false) }; } } static BSS_TEST_ZERO: usize = 0; static BSS_TEST_NONZERO: usize = usize::MAX; /// Initialize kernel unsafe fn init(font_data: usize, tss_data: usize) { // Zero BSS, this initializes statics that are set to 0 { extern { static mut __bss_start: u8; static mut __bss_end: u8; } let start_ptr = &mut __bss_start; let end_ptr = &mut __bss_end; if start_ptr as *const _ as usize <= end_ptr as *const _ as usize { let size = end_ptr as *const _ as usize - start_ptr as *const _ as usize; memset(start_ptr, 0, size); } assert_eq!(BSS_TEST_ZERO, 0); assert_eq!(BSS_TEST_NONZERO, usize::MAX); } // Setup paging, this allows for memory allocation Page::init(); memory::cluster_init(); // Unmap first page to catch null pointer errors (after reading memory map) Page::new(0).unmap(); display::fonts = font_data; TSS_PTR = Some(&mut *(tss_data as *mut TSS)); ENV_PTR = Some(&mut *Box::into_raw(Environment::new())); match ENV_PTR { Some(ref mut env) => { env.contexts.lock().push(Context::root()); env.console.lock().draw = true; debug!("Redox {} bits\n", mem::size_of::<usize>() * 8); if let Some(acpi) = Acpi::new() { env.schemes.push(UnsafeCell::new(acpi)); } *(env.clock_realtime.lock()) = Rtc::new().time(); env.schemes.push(UnsafeCell::new(Ps2::new())); env.schemes.push(UnsafeCell::new(Serial::new(0x3F8, 0x4))); pci::pci_init(env); env.schemes.push(UnsafeCell::new(DebugScheme::new())); env.schemes.push(UnsafeCell::new(box ContextScheme)); env.schemes.push(UnsafeCell::new(box InterruptScheme)); env.schemes.push(UnsafeCell::new(box MemoryScheme)); // session.items.push(box RandomScheme); // session.items.push(box TimeScheme); env.schemes.push(UnsafeCell::new(box EthernetScheme)); env.schemes.push(UnsafeCell::new(box ArpScheme)); env.schemes.push(UnsafeCell::new(box IcmpScheme)); env.schemes.push(UnsafeCell::new(box IpScheme { arp: Vec::new() })); // session.items.push(box DisplayScheme); Context::spawn("kpoll".to_string(), box move || { poll_loop(); }); Context::spawn("kevent".to_string(), box move || { event_loop(); }); Context::spawn("karp".to_string(), box move || { ArpScheme::reply_loop(); }); Context::spawn("kicmp".to_string(), box move || { IcmpScheme::reply_loop(); }); env.contexts.lock().enabled = true; if let Ok(mut resource) = Url::from_str("file:/schemes/").open() { let mut vec: Vec<u8> = Vec::new(); let _ = resource.read_to_end(&mut vec); for folder in String::from_utf8_unchecked(vec).lines() { if folder.ends_with('/') { let scheme_item = SchemeItem::from_url(&Url::from_string("file:/schemes/" .to_string() + &folder)); env.schemes.push(UnsafeCell::new(scheme_item)); } } } Context::spawn("kinit".to_string(), box move || { { let wd_c = "file:/\0"; do_sys_chdir(wd_c.as_ptr()); let stdio_c = "debug:\0"; do_sys_open(stdio_c.as_ptr(), 0); do_sys_open(stdio_c.as_ptr(), 0); do_sys_open(stdio_c.as_ptr(), 0); } execute(Url::from_str("file:/apps/login/main.bin"), Vec::new()); debug!("INIT: Failed to execute\n"); loop { context_switch(false); } }); }, None => unreachable!(), } } #[cold] #[inline(never)] #[no_mangle] /// Take regs for kernel calls and exceptions pub extern "cdecl" fn kernel(interrupt: usize, mut regs: &mut Regs) { macro_rules! exception_inner { ($name:expr) => ({ { let contexts = ::env().contexts.lock(); if let Some(context) = contexts.current() { debugln!("PID {}: {}", context.pid, context.name); } } debugln!(" INT {:X}: {}", interrupt, $name); debugln!(" CS: {:08X} IP: {:08X} FLG: {:08X}", regs.cs, regs.ip, regs.flags); debugln!(" SS: {:08X} SP: {:08X} BP: {:08X}", regs.ss, regs.sp, regs.bp); debugln!(" AX: {:08X} BX: {:08X} CX: {:08X} DX: {:08X}", regs.ax, regs.bx, regs.cx, regs.dx); debugln!(" DI: {:08X} SI: {:08X}", regs.di, regs.di); let cr0: usize; let cr2: usize; let cr3: usize; let cr4: usize; unsafe { asm!("mov $0, cr0" : "=r"(cr0) : : : "intel", "volatile"); asm!("mov $0, cr2" : "=r"(cr2) : : : "intel", "volatile"); asm!("mov $0, cr3" : "=r"(cr3) : : : "intel", "volatile"); asm!("mov $0, cr4" : "=r"(cr4) : : : "intel", "volatile"); } debugln!(" CR0: {:08X} CR2: {:08X} CR3: {:08X} CR4: {:08X}", cr0, cr2, cr3, cr4); let sp = regs.sp as *const u32; for y in -15..16 { debug!(" {:>3}:", y * 8 * 4); for x in 0..8 { debug!(" {:08X}", unsafe { ptr::read(sp.offset(-(x + y * 8))) }); } debug!("\n"); } }) }; macro_rules! exception { ($name:expr) => ({ exception_inner!($name); loop { do_sys_exit(usize::MAX); } }) }; macro_rules! exception_error { ($name:expr) => ({ let error = regs.ip; regs.ip = regs.cs; regs.cs = regs.flags; regs.flags = regs.sp; regs.sp = regs.ss; regs.ss = 0; //regs.ss = regs.error; exception_inner!($name); debugln!(" ERR: {:08X}", error); loop { do_sys_exit(usize::MAX); } }) }; if interrupt >= 0x20 && interrupt < 0x30 { if interrupt >= 0x28 { unsafe { Pio8::new(0xA0).write(0x20) }; } unsafe { Pio8::new(0x20).write(0x20) }; } //Do not catch init interrupt if interrupt < 0xFF { env().interrupts.lock()[interrupt as usize] += 1; } match interrupt { 0x20 => { { let mut clock_monotonic = env().clock_monotonic.lock(); *clock_monotonic = *clock_monotonic + PIT_DURATION; } { let mut clock_realtime = env().clock_realtime.lock(); *clock_realtime = *clock_realtime + PIT_DURATION; } let switch = { let mut contexts = ::env().contexts.lock(); if let Some(mut context) = contexts.current_mut() { context.slices -= 1; context.slice_total += 1; context.slices == 0 } else { false } }; if switch { unsafe { context_switch(true) }; } } i @ 0x21 ... 0x2F => env().on_irq(i as u8 - 0x20), 0x80 => if !syscall_handle(regs) { exception!("Unknown Syscall"); }, 0xFF => { unsafe { init(regs.ax, regs.bx); idle_loop(); } }, 0x0 => exception!("Divide by zero exception"), 0x1 => exception!("Debug exception"), 0x2 => exception!("Non-maskable interrupt"), 0x3 => exception!("Breakpoint exception"), 0x4 => exception!("Overflow exception"), 0x5 => exception!("Bound range exceeded exception"), 0x6 => exception!("Invalid opcode exception"), 0x7 => exception!("Device not available exception"), 0x8 => exception_error!("Double fault"), 0x9 => exception!("Coprocessor Segment Overrun"), // legacy 0xA => exception_error!("Invalid TSS exception"), 0xB => exception_error!("Segment not present exception"), 0xC => exception_error!("Stack-segment fault"), 0xD => exception_error!("General protection fault"), 0xE => exception_error!("Page fault"), 0x10 => exception!("x87 floating-point exception"), 0x11 => exception_error!("Alignment check exception"), 0x12 => exception!("Machine check exception"), 0x13 => exception!("SIMD floating-point exception"), 0x14 => exception!("Virtualization exception"), 0x1E => exception_error!("Security exception"), _ => exception!("Unknown Interrupt"), } }<|fim▁end|>
}
<|file_name|>IGridColumn.ts<|end_file_name|><|fim▁begin|>export interface IGridColumn { name: string; title: string;<|fim▁hole|>}<|fim▁end|>
type: string; value?: any;
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # Copyright (c) 2008 Aldo Cortesi # Copyright (c) 2011 Mounier Florian # Copyright (c) 2012 dmpayton # Copyright (c) 2014 Sean Vig # Copyright (c) 2014 roger # Copyright (c) 2014 Pedro Algarvio # Copyright (c) 2014-2015 Tycho Andersen # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal<|fim▁hole|># to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. import sys import textwrap from setuptools import setup from setuptools.command.install import install class CheckCairoXcb(install): def cairo_xcb_check(self): try: from cairocffi import cairo cairo.cairo_xcb_surface_create return True except AttributeError: return False def finalize_options(self): if not self.cairo_xcb_check(): print(textwrap.dedent(""" It looks like your cairocffi was not built with xcffib support. To fix this: - Ensure a recent xcffib is installed (pip install 'xcffib>=0.3.2') - The pip cache is cleared (remove ~/.cache/pip, if it exists) - Reinstall cairocffi, either: pip install --no-deps --ignore-installed cairocffi or pip uninstall cairocffi && pip install cairocffi """)) sys.exit(1) install.finalize_options(self) long_description = """ A pure-Python tiling window manager. Features ======== * Simple, small and extensible. It's easy to write your own layouts, widgets and commands. * Configured in Python. * Command shell that allows all aspects of Qtile to be managed and inspected. * Complete remote scriptability - write scripts to set up workspaces, manipulate windows, update status bar widgets and more. * Qtile's remote scriptability makes it one of the most thoroughly unit-tested window mangers around. """ if '_cffi_backend' in sys.builtin_module_names: import _cffi_backend requires_cffi = "cffi==" + _cffi_backend.__version__ else: requires_cffi = "cffi>=1.1.0" # PyPy < 2.6 compatibility if requires_cffi.startswith("cffi==0."): cffi_args = dict( zip_safe=False ) else: cffi_args = dict(cffi_modules=[ 'libqtile/ffi_build.py:pango_ffi', 'libqtile/ffi_build.py:xcursors_ffi' ]) dependencies = ['xcffib>=0.3.2', 'cairocffi>=0.7', 'six>=1.4.1', requires_cffi] if sys.version_info >= (3, 4): pass elif sys.version_info >= (3, 3): dependencies.append('asyncio') else: dependencies.append('trollius') setup( name="qtile", version="0.10.6", description="A pure-Python tiling window manager.", long_description=long_description, classifiers=[ "Intended Audience :: End Users/Desktop", "License :: OSI Approved :: MIT License", "Development Status :: 3 - Alpha", "Programming Language :: Python", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", "Operating System :: Unix", "Topic :: Desktop Environment :: Window Managers", ], keywords="qtile tiling window manager", author="Aldo Cortesi", author_email="[email protected]", maintainer="Tycho Andersen", maintainer_email="[email protected]", url="http://qtile.org", license="MIT", install_requires=dependencies, setup_requires=dependencies, extras_require={ 'ipython': ["ipykernel", "jupyter_console"], }, packages=['libqtile', 'libqtile.interactive', 'libqtile.layout', 'libqtile.scripts', 'libqtile.widget', 'libqtile.resources' ], package_data={'libqtile.resources': ['battery-icons/*.png']}, entry_points={ 'console_scripts': [ 'qtile = libqtile.scripts.qtile:main', 'qtile-run = libqtile.scripts.qtile_run:main', 'qtile-top = libqtile.scripts.qtile_top:main', 'qshell = libqtile.scripts.qshell:main', ] }, scripts=[ 'bin/iqshell', ], data_files=[ ('share/man/man1', ['resources/qtile.1', 'resources/qshell.1'])], cmdclass={'install': CheckCairoXcb}, **cffi_args )<|fim▁end|>
# in the Software without restriction, including without limitation the rights
<|file_name|>test_octodns_provider_dnsimple.py<|end_file_name|><|fim▁begin|># # # from __future__ import absolute_import, division, print_function, \ unicode_literals from mock import Mock, call from os.path import dirname, join from requests import HTTPError from requests_mock import ANY, mock as requests_mock from unittest import TestCase from octodns.record import Record from octodns.provider.dnsimple import DnsimpleClientNotFound, DnsimpleProvider from octodns.provider.yaml import YamlProvider from octodns.zone import Zone class TestDnsimpleProvider(TestCase): expected = Zone('unit.tests.', []) source = YamlProvider('test', join(dirname(__file__), 'config')) source.populate(expected) # Our test suite differs a bit, add our NS and remove the simple one expected.add_record(Record.new(expected, 'under', { 'ttl': 3600, 'type': 'NS', 'values': [ 'ns1.unit.tests.', 'ns2.unit.tests.', ] })) for record in list(expected.records): if record.name == 'sub' and record._type == 'NS': expected._remove_record(record) break def test_populate(self): provider = DnsimpleProvider('test', 'token', 42) # Bad auth with requests_mock() as mock: mock.get(ANY, status_code=401, text='{"message": "Authentication failed"}') with self.assertRaises(Exception) as ctx: zone = Zone('unit.tests.', []) provider.populate(zone) self.assertEquals('Unauthorized', ctx.exception.message) # General error with requests_mock() as mock: mock.get(ANY, status_code=502, text='Things caught fire') with self.assertRaises(HTTPError) as ctx: zone = Zone('unit.tests.', []) provider.populate(zone) self.assertEquals(502, ctx.exception.response.status_code) # Non-existant zone doesn't populate anything with requests_mock() as mock: mock.get(ANY, status_code=404, text='{"message": "Domain `foo.bar` not found"}') zone = Zone('unit.tests.', []) provider.populate(zone) self.assertEquals(set(), zone.records) # No diffs == no changes with requests_mock() as mock: base = 'https://api.dnsimple.com/v2/42/zones/unit.tests/' \ 'records?page=' with open('tests/fixtures/dnsimple-page-1.json') as fh: mock.get('{}{}'.format(base, 1), text=fh.read()) with open('tests/fixtures/dnsimple-page-2.json') as fh: mock.get('{}{}'.format(base, 2), text=fh.read()) zone = Zone('unit.tests.', []) provider.populate(zone) self.assertEquals(14, len(zone.records)) changes = self.expected.changes(zone, provider) self.assertEquals(0, len(changes)) # 2nd populate makes no network calls/all from cache again = Zone('unit.tests.', []) provider.populate(again) self.assertEquals(14, len(again.records)) # bust the cache del provider._zone_records[zone.name] # test handling of invalid content with requests_mock() as mock: with open('tests/fixtures/dnsimple-invalid-content.json') as fh: mock.get(ANY, text=fh.read()) zone = Zone('unit.tests.', []) provider.populate(zone) self.assertEquals(set([ Record.new(zone, '', { 'ttl': 3600, 'type': 'SSHFP', 'values': [] }), Record.new(zone, '_srv._tcp', { 'ttl': 600, 'type': 'SRV', 'values': [] }), Record.new(zone, 'naptr', { 'ttl': 600, 'type': 'NAPTR', 'values': [] }), ]), zone.records) def test_apply(self): provider = DnsimpleProvider('test', 'token', 42) resp = Mock() resp.json = Mock() provider._client._request = Mock(return_value=resp) # non-existant domain, create everything resp.json.side_effect = [ DnsimpleClientNotFound, # no zone in populate DnsimpleClientNotFound, # no domain during apply ] plan = provider.plan(self.expected) # No root NS, no ignored n = len(self.expected.records) - 2 self.assertEquals(n, len(plan.changes)) self.assertEquals(n, provider.apply(plan)) provider._client._request.assert_has_calls([ # created the domain call('POST', '/domains', data={'name': 'unit.tests'}), # created at least one of the record with expected data call('POST', '/zones/unit.tests/records', data={ 'content': '20 30 foo-1.unit.tests.', 'priority': 10, 'type': 'SRV', 'name': '_srv._tcp', 'ttl': 600 }), ]) # expected number of total calls self.assertEquals(26, provider._client._request.call_count) provider._client._request.reset_mock() # delete 1 and update 1 provider._client.records = Mock(return_value=[ { 'id': 11189897, 'name': 'www', 'content': '1.2.3.4', 'ttl': 300, 'type': 'A', }, { 'id': 11189898, 'name': 'www', 'content': '2.2.3.4', 'ttl': 300, 'type': 'A', }, { 'id': 11189899, 'name': 'ttl', 'content': '3.2.3.4', 'ttl': 600, 'type': 'A', } ]) # Domain exists, we don't care about return resp.json.side_effect = ['{}'] wanted = Zone('unit.tests.', []) wanted.add_record(Record.new(wanted, 'ttl', { 'ttl': 300, 'type': 'A', 'value': '3.2.3.4' })) plan = provider.plan(wanted) self.assertEquals(2, len(plan.changes)) self.assertEquals(2, provider.apply(plan)) # recreate for update, and deletes for the 2 parts of the other provider._client._request.assert_has_calls([ call('POST', '/zones/unit.tests/records', data={ 'content': '3.2.3.4', 'type': 'A', 'name': 'ttl', 'ttl': 300 }),<|fim▁hole|> call('DELETE', '/zones/unit.tests/records/11189898') ], any_order=True)<|fim▁end|>
call('DELETE', '/zones/unit.tests/records/11189899'), call('DELETE', '/zones/unit.tests/records/11189897'),
<|file_name|>test_base.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from __future__ import print_function import re import sys from datetime import datetime, timedelta import numpy as np import pandas as pd import pandas.compat as compat import pandas.core.common as com import pandas.util.testing as tm from pandas import (Series, Index, DatetimeIndex, TimedeltaIndex, PeriodIndex, Timedelta) from pandas.compat import u, StringIO from pandas.core.base import (FrozenList, FrozenNDArray, PandasDelegate, NoNewAttributesMixin) from pandas.tseries.base import DatetimeIndexOpsMixin from pandas.util.testing import (assertRaisesRegexp, assertIsInstance) class CheckStringMixin(object): def test_string_methods_dont_fail(self): repr(self.container) str(self.container) bytes(self.container) if not compat.PY3: unicode(self.container) # noqa def test_tricky_container(self): if not hasattr(self, 'unicode_container'): raise nose.SkipTest('Need unicode_container to test with this') repr(self.unicode_container) str(self.unicode_container) bytes(self.unicode_container) if not compat.PY3: unicode(self.unicode_container) # noqa class CheckImmutable(object): mutable_regex = re.compile('does not support mutable operations') def check_mutable_error(self, *args, **kwargs): # pass whatever functions you normally would to assertRaises (after the # Exception kind) assertRaisesRegexp(TypeError, self.mutable_regex, *args, **kwargs) def test_no_mutable_funcs(self): def setitem(): self.container[0] = 5 self.check_mutable_error(setitem) def setslice(): self.container[1:2] = 3 self.check_mutable_error(setslice) def delitem(): del self.container[0] self.check_mutable_error(delitem) def delslice(): del self.container[0:3] self.check_mutable_error(delslice) mutable_methods = getattr(self, "mutable_methods", []) for meth in mutable_methods: self.check_mutable_error(getattr(self.container, meth)) def test_slicing_maintains_type(self): result = self.container[1:2] expected = self.lst[1:2] self.check_result(result, expected) def check_result(self, result, expected, klass=None): klass = klass or self.klass assertIsInstance(result, klass) self.assertEqual(result, expected) class TestFrozenList(CheckImmutable, CheckStringMixin, tm.TestCase): mutable_methods = ('extend', 'pop', 'remove', 'insert') unicode_container = FrozenList([u("\u05d0"), u("\u05d1"), "c"]) def setUp(self): self.lst = [1, 2, 3, 4, 5] self.container = FrozenList(self.lst) self.klass = FrozenList def test_add(self): result = self.container + (1, 2, 3) expected = FrozenList(self.lst + [1, 2, 3]) self.check_result(result, expected) result = (1, 2, 3) + self.container expected = FrozenList([1, 2, 3] + self.lst) self.check_result(result, expected) def test_inplace(self): q = r = self.container q += [5] self.check_result(q, self.lst + [5]) # other shouldn't be mutated self.check_result(r, self.lst) class TestFrozenNDArray(CheckImmutable, CheckStringMixin, tm.TestCase): mutable_methods = ('put', 'itemset', 'fill') unicode_container = FrozenNDArray([u("\u05d0"), u("\u05d1"), "c"]) def setUp(self): self.lst = [3, 5, 7, -2] self.container = FrozenNDArray(self.lst) self.klass = FrozenNDArray def test_shallow_copying(self): original = self.container.copy() assertIsInstance(self.container.view(), FrozenNDArray) self.assertFalse(isinstance( self.container.view(np.ndarray), FrozenNDArray)) self.assertIsNot(self.container.view(), self.container) self.assert_numpy_array_equal(self.container, original) # shallow copy should be the same too assertIsInstance(self.container._shallow_copy(), FrozenNDArray) # setting should not be allowed def testit(container): container[0] = 16 self.check_mutable_error(testit, self.container) def test_values(self): original = self.container.view(np.ndarray).copy() n = original[0] + 15 vals = self.container.values() self.assert_numpy_array_equal(original, vals) self.assertIsNot(original, vals) vals[0] = n self.assert_numpy_array_equal(self.container, original) self.assertEqual(vals[0], n) class TestPandasDelegate(tm.TestCase): def setUp(self): pass def test_invalida_delgation(self): # these show that in order for the delegation to work # the _delegate_* methods need to be overriden to not raise a TypeError class Delegator(object): _properties = ['foo'] _methods = ['bar'] def _set_foo(self, value): self.foo = value def _get_foo(self): return self.foo foo = property(_get_foo, _set_foo, doc="foo property") def bar(self, *args, **kwargs): """ a test bar method """ pass class Delegate(PandasDelegate): def __init__(self, obj): self.obj = obj Delegate._add_delegate_accessors(delegate=Delegator, accessors=Delegator._properties, typ='property') Delegate._add_delegate_accessors(delegate=Delegator, accessors=Delegator._methods, typ='method') delegate = Delegate(Delegator()) def f(): delegate.foo self.assertRaises(TypeError, f) def f(): delegate.foo = 5 self.assertRaises(TypeError, f) def f(): delegate.foo() self.assertRaises(TypeError, f) class Ops(tm.TestCase): def _allow_na_ops(self, obj): """Whether to skip test cases including NaN""" if (isinstance(obj, Index) and (obj.is_boolean() or not obj._can_hold_na)): # don't test boolean / int64 index return False return True def setUp(self): self.bool_index = tm.makeBoolIndex(10, name='a') self.int_index = tm.makeIntIndex(10, name='a') self.float_index = tm.makeFloatIndex(10, name='a') self.dt_index = tm.makeDateIndex(10, name='a') self.dt_tz_index = tm.makeDateIndex(10, name='a').tz_localize( tz='US/Eastern') self.period_index = tm.makePeriodIndex(10, name='a') self.string_index = tm.makeStringIndex(10, name='a') self.unicode_index = tm.makeUnicodeIndex(10, name='a') arr = np.random.randn(10) self.int_series = Series(arr, index=self.int_index, name='a') self.float_series = Series(arr, index=self.float_index, name='a') self.dt_series = Series(arr, index=self.dt_index, name='a') self.dt_tz_series = self.dt_tz_index.to_series(keep_tz=True) self.period_series = Series(arr, index=self.period_index, name='a') self.string_series = Series(arr, index=self.string_index, name='a') types = ['bool', 'int', 'float', 'dt', 'dt_tz', 'period', 'string', 'unicode'] fmts = ["{0}_{1}".format(t, f) for t in types for f in ['index', 'series']] self.objs = [getattr(self, f) for f in fmts if getattr(self, f, None) is not None] def check_ops_properties(self, props, filter=None, ignore_failures=False): for op in props: for o in self.is_valid_objs: # if a filter, skip if it doesn't match if filter is not None: filt = o.index if isinstance(o, Series) else o if not filter(filt): continue try: if isinstance(o, Series): expected = Series( getattr(o.index, op), index=o.index, name='a') else: expected = getattr(o, op) except (AttributeError): if ignore_failures: continue result = getattr(o, op) # these couuld be series, arrays or scalars if isinstance(result, Series) and isinstance(expected, Series): tm.assert_series_equal(result, expected) elif isinstance(result, Index) and isinstance(expected, Index): tm.assert_index_equal(result, expected) elif isinstance(result, np.ndarray) and isinstance(expected, np.ndarray): self.assert_numpy_array_equal(result, expected) else: self.assertEqual(result, expected) # freq raises AttributeError on an Int64Index because its not # defined we mostly care about Series hwere anyhow if not ignore_failures: for o in self.not_valid_objs: # an object that is datetimelike will raise a TypeError, # otherwise an AttributeError if issubclass(type(o), DatetimeIndexOpsMixin): self.assertRaises(TypeError, lambda: getattr(o, op)) else: self.assertRaises(AttributeError, lambda: getattr(o, op)) def test_binary_ops_docs(self): from pandas import DataFrame, Panel op_map = {'add': '+', 'sub': '-', 'mul': '*', 'mod': '%', 'pow': '**', 'truediv': '/', 'floordiv': '//'} for op_name in ['add', 'sub', 'mul', 'mod', 'pow', 'truediv', 'floordiv']: for klass in [Series, DataFrame, Panel]: operand1 = klass.__name__.lower() operand2 = 'other' op = op_map[op_name] expected_str = ' '.join([operand1, op, operand2]) self.assertTrue(expected_str in getattr(klass, op_name).__doc__) # reverse version of the binary ops expected_str = ' '.join([operand2, op, operand1]) self.assertTrue(expected_str in getattr(klass, 'r' + op_name).__doc__) class TestIndexOps(Ops): def setUp(self): super(TestIndexOps, self).setUp() self.is_valid_objs = [o for o in self.objs if o._allow_index_ops] self.not_valid_objs = [o for o in self.objs if not o._allow_index_ops] def test_none_comparison(self): # bug brought up by #1079 # changed from TypeError in 0.17.0 for o in self.is_valid_objs: if isinstance(o, Series): o[0] = np.nan # noinspection PyComparisonWithNone result = o == None # noqa self.assertFalse(result.iat[0]) self.assertFalse(result.iat[1]) # noinspection PyComparisonWithNone result = o != None # noqa self.assertTrue(result.iat[0]) self.assertTrue(result.iat[1]) result = None == o # noqa self.assertFalse(result.iat[0]) self.assertFalse(result.iat[1]) # this fails for numpy < 1.9 # and oddly for *some* platforms # result = None != o # noqa # self.assertTrue(result.iat[0]) # self.assertTrue(result.iat[1]) result = None > o self.assertFalse(result.iat[0]) self.assertFalse(result.iat[1]) result = o < None self.assertFalse(result.iat[0]) self.assertFalse(result.iat[1]) def test_ndarray_compat_properties(self): for o in self.objs: # check that we work for p in ['shape', 'dtype', 'flags', 'T', 'strides', 'itemsize', 'nbytes']: self.assertIsNotNone(getattr(o, p, None)) self.assertTrue(hasattr(o, 'base')) # if we have a datetimelike dtype then needs a view to work # but the user is responsible for that try: self.assertIsNotNone(o.data) except ValueError: pass self.assertRaises(ValueError, o.item) # len > 1 self.assertEqual(o.ndim, 1) self.assertEqual(o.size, len(o)) self.assertEqual(Index([1]).item(), 1) self.assertEqual(Series([1]).item(), 1) def test_ops(self): for op in ['max', 'min']: for o in self.objs: result = getattr(o, op)() if not isinstance(o, PeriodIndex): expected = getattr(o.values, op)() else: expected = pd.Period(ordinal=getattr(o.values, op)(), freq=o.freq) try: self.assertEqual(result, expected) except TypeError: # comparing tz-aware series with np.array results in # TypeError expected = expected.astype('M8[ns]').astype('int64') self.assertEqual(result.value, expected) def test_nanops(self): # GH 7261 for op in ['max', 'min']: for klass in [Index, Series]: obj = klass([np.nan, 2.0]) self.assertEqual(getattr(obj, op)(), 2.0) obj = klass([np.nan]) self.assertTrue(pd.isnull(getattr(obj, op)())) obj = klass([]) self.assertTrue(pd.isnull(getattr(obj, op)())) obj = klass([pd.NaT, datetime(2011, 11, 1)]) # check DatetimeIndex monotonic path self.assertEqual(getattr(obj, op)(), datetime(2011, 11, 1)) obj = klass([pd.NaT, datetime(2011, 11, 1), pd.NaT]) # check DatetimeIndex non-monotonic path self.assertEqual(getattr(obj, op)(), datetime(2011, 11, 1)) # argmin/max obj = Index(np.arange(5, dtype='int64')) self.assertEqual(obj.argmin(), 0) self.assertEqual(obj.argmax(), 4) obj = Index([np.nan, 1, np.nan, 2]) self.assertEqual(obj.argmin(), 1) self.assertEqual(obj.argmax(), 3) obj = Index([np.nan]) self.assertEqual(obj.argmin(), -1) self.assertEqual(obj.argmax(), -1) obj = Index([pd.NaT, datetime(2011, 11, 1), datetime(2011, 11, 2), pd.NaT]) self.assertEqual(obj.argmin(), 1) self.assertEqual(obj.argmax(), 2) obj = Index([pd.NaT]) self.assertEqual(obj.argmin(), -1) self.assertEqual(obj.argmax(), -1) def test_value_counts_unique_nunique(self): for o in self.objs: klass = type(o) values = o.values # create repeated values, 'n'th element is repeated by n+1 times if isinstance(o, PeriodIndex): # freq must be specified because repeat makes freq ambiguous # resets name from Index expected_index = pd.Index(o[::-1]) expected_index.name = None # attach name to klass o = o.repeat(range(1, len(o) + 1)) o.name = 'a' elif isinstance(o, DatetimeIndex): # resets name from Index expected_index = pd.Index(o[::-1]) expected_index.name = None # attach name to klass o = o.repeat(range(1, len(o) + 1)) o.name = 'a' # don't test boolean elif isinstance(o, Index) and o.is_boolean(): continue elif isinstance(o, Index): expected_index = pd.Index(values[::-1]) expected_index.name = None o = o.repeat(range(1, len(o) + 1)) o.name = 'a' else: expected_index = pd.Index(values[::-1]) idx = o.index.repeat(range(1, len(o) + 1)) o = klass( np.repeat(values, range(1, len(o) + 1)), index=idx, name='a') expected_s = Series( range(10, 0, - 1), index=expected_index, dtype='int64', name='a') result = o.value_counts() tm.assert_series_equal(result, expected_s) self.assertTrue(result.index.name is None) self.assertEqual(result.name, 'a') result = o.unique() if isinstance(o, (DatetimeIndex, PeriodIndex)): self.assertTrue(isinstance(result, o.__class__)) self.assertEqual(result.name, o.name) self.assertEqual(result.freq, o.freq) self.assert_numpy_array_equal(result, values) self.assertEqual(o.nunique(), len(np.unique(o.values))) for null_obj in [np.nan, None]: for o in self.objs: klass = type(o) values = o.values if not self._allow_na_ops(o): continue # special assign to the numpy array if com.is_datetimetz(o): if isinstance(o, DatetimeIndex): v = o.asi8 v[0:2] = pd.tslib.iNaT values = o._shallow_copy(v) else: o = o.copy() o[0:2] = pd.tslib.iNaT values = o.values elif o.values.dtype == 'datetime64[ns]' or isinstance( o, PeriodIndex): values[0:2] = pd.tslib.iNaT else: values[0:2] = null_obj # create repeated values, 'n'th element is repeated by n+1 # times if isinstance(o, PeriodIndex): # freq must be specified because repeat makes freq # ambiguous # resets name from Index expected_index = pd.Index(o, name=None) # attach name to klass o = klass( np.repeat(values, range( 1, len(o) + 1)), freq=o.freq, name='a') elif isinstance(o, Index): expected_index = pd.Index(values, name=None) o = klass( np.repeat(values, range(1, len(o) + 1)), name='a') else: expected_index = pd.Index(values, name=None) idx = np.repeat(o.index.values, range(1, len(o) + 1)) o = klass( np.repeat(values, range( 1, len(o) + 1)), index=idx, name='a') expected_s_na = Series(list(range(10, 2, -1)) + [3], index=expected_index[9:0:-1], dtype='int64', name='a') expected_s = Series(list(range(10, 2, -1)), index=expected_index[9:1:-1], dtype='int64', name='a') result_s_na = o.value_counts(dropna=False) tm.assert_series_equal(result_s_na, expected_s_na) self.assertTrue(result_s_na.index.name is None) self.assertEqual(result_s_na.name, 'a') result_s = o.value_counts() tm.assert_series_equal(o.value_counts(), expected_s) self.assertTrue(result_s.index.name is None) self.assertEqual(result_s.name, 'a') # numpy_array_equal cannot compare arrays includes nan result = o.unique() self.assert_numpy_array_equal(result[1:], values[2:]) if isinstance(o, (DatetimeIndex, PeriodIndex)): self.assertTrue(result.asi8[0] == pd.tslib.iNaT) else: self.assertTrue(pd.isnull(result[0])) self.assertEqual(o.nunique(), 8) self.assertEqual(o.nunique(dropna=False), 9) def test_value_counts_inferred(self): klasses = [Index, Series] for klass in klasses: s_values = ['a', 'b', 'b', 'b', 'b', 'c', 'd', 'd', 'a', 'a'] s = klass(s_values) expected = Series([4, 3, 2, 1], index=['b', 'a', 'd', 'c']) tm.assert_series_equal(s.value_counts(), expected) self.assert_numpy_array_equal(s.unique(), np.unique(s_values)) self.assertEqual(s.nunique(), 4) # don't sort, have to sort after the fact as not sorting is # platform-dep hist = s.value_counts(sort=False).sort_values() expected = Series([3, 1, 4, 2], index=list('acbd')).sort_values() tm.assert_series_equal(hist, expected) # sort ascending hist = s.value_counts(ascending=True) expected = Series([1, 2, 3, 4], index=list('cdab')) tm.assert_series_equal(hist, expected) # relative histogram. hist = s.value_counts(normalize=True) expected = Series([.4, .3, .2, .1], index=['b', 'a', 'd', 'c']) tm.assert_series_equal(hist, expected) # bins self.assertRaises(TypeError, lambda bins: s.value_counts(bins=bins), 1) s1 = Series([1, 1, 2, 3]) res1 = s1.value_counts(bins=1) exp1 = Series({0.998: 4}) tm.assert_series_equal(res1, exp1) res1n = s1.value_counts(bins=1, normalize=True) exp1n = Series({0.998: 1.0}) tm.assert_series_equal(res1n, exp1n) self.assert_numpy_array_equal(s1.unique(), np.array([1, 2, 3])) self.assertEqual(s1.nunique(), 3) res4 = s1.value_counts(bins=4) exp4 = Series({0.998: 2, 1.5: 1, 2.0: 0, 2.5: 1}, index=[0.998, 2.5, 1.5, 2.0]) tm.assert_series_equal(res4, exp4) res4n = s1.value_counts(bins=4, normalize=True) exp4n = Series( {0.998: 0.5, 1.5: 0.25, 2.0: 0.0, 2.5: 0.25}, index=[0.998, 2.5, 1.5, 2.0]) tm.assert_series_equal(res4n, exp4n) # handle NA's properly s_values = ['a', 'b', 'b', 'b', np.nan, np.nan, 'd', 'd', 'a', 'a', 'b'] s = klass(s_values) expected = Series([4, 3, 2], index=['b', 'a', 'd']) tm.assert_series_equal(s.value_counts(), expected) self.assert_numpy_array_equal(s.unique(), np.array( ['a', 'b', np.nan, 'd'], dtype='O')) self.assertEqual(s.nunique(), 3) s = klass({}) expected = Series([], dtype=np.int64) tm.assert_series_equal(s.value_counts(), expected, check_index_type=False) self.assert_numpy_array_equal(s.unique(), np.array([])) self.assertEqual(s.nunique(), 0) # GH 3002, datetime64[ns] # don't test names though txt = "\n".join(['xxyyzz20100101PIE', 'xxyyzz20100101GUM', 'xxyyzz20100101EGG', 'xxyyww20090101EGG', 'foofoo20080909PIE', 'foofoo20080909GUM']) f = StringIO(txt) df = pd.read_fwf(f, widths=[6, 8, 3], names=["person_id", "dt", "food"], parse_dates=["dt"]) s = klass(df['dt'].copy()) s.name = None idx = pd.to_datetime( ['2010-01-01 00:00:00Z', '2008-09-09 00:00:00Z', '2009-01-01 00:00:00X']) expected_s = Series([3, 2, 1], index=idx) tm.assert_series_equal(s.value_counts(), expected_s) expected = np.array(['2010-01-01 00:00:00Z', '2009-01-01 00:00:00Z', '2008-09-09 00:00:00Z'], dtype='datetime64[ns]') if isinstance(s, DatetimeIndex): expected = DatetimeIndex(expected) self.assertTrue(s.unique().equals(expected)) else: self.assert_numpy_array_equal(s.unique(), expected) self.assertEqual(s.nunique(), 3) # with NaT s = df['dt'].copy() s = klass([v for v in s.values] + [pd.NaT]) result = s.value_counts() self.assertEqual(result.index.dtype, 'datetime64[ns]') tm.assert_series_equal(result, expected_s) result = s.value_counts(dropna=False) expected_s[pd.NaT] = 1 tm.assert_series_equal(result, expected_s) unique = s.unique() self.assertEqual(unique.dtype, 'datetime64[ns]') # numpy_array_equal cannot compare pd.NaT self.assert_numpy_array_equal(unique[:3], expected) self.assertTrue(unique[3] is pd.NaT or unique[3].astype('int64') == pd.tslib.iNaT) self.assertEqual(s.nunique(), 3) self.assertEqual(s.nunique(dropna=False), 4) # timedelta64[ns] td = df.dt - df.dt + timedelta(1) td = klass(td, name='dt') result = td.value_counts() expected_s = Series([6], index=[Timedelta('1day')], name='dt') tm.assert_series_equal(result, expected_s) expected = TimedeltaIndex(['1 days']) if isinstance(td, TimedeltaIndex): self.assertTrue(td.unique().equals(expected)) else: self.assert_numpy_array_equal(td.unique(), expected.values) td2 = timedelta(1) + (df.dt - df.dt) td2 = klass(td2, name='dt') result2 = td2.value_counts() tm.assert_series_equal(result2, expected_s) def test_factorize(self): for o in self.objs: if isinstance(o, Index) and o.is_boolean(): exp_arr = np.array([0, 1] + [0] * 8) exp_uniques = o exp_uniques = Index([False, True]) else: exp_arr = np.array(range(len(o))) exp_uniques = o labels, uniques = o.factorize() self.assert_numpy_array_equal(labels, exp_arr) if isinstance(o, Series): expected = Index(o.values) self.assert_numpy_array_equal(uniques, expected) else: self.assertTrue(uniques.equals(exp_uniques)) for o in self.objs: # don't test boolean if isinstance(o, Index) and o.is_boolean(): continue # sort by value, and create duplicates if isinstance(o, Series): o = o.sort_values() n = o.iloc[5:].append(o) else: indexer = o.argsort() o = o.take(indexer) n = o[5:].append(o) exp_arr = np.array([5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9]) labels, uniques = n.factorize(sort=True) self.assert_numpy_array_equal(labels, exp_arr) if isinstance(o, Series): expected = Index(o.values) self.assert_numpy_array_equal(uniques, expected) else: self.assertTrue(uniques.equals(o)) exp_arr = np.array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4]) labels, uniques = n.factorize(sort=False) self.assert_numpy_array_equal(labels, exp_arr) if isinstance(o, Series): expected = Index(np.concatenate([o.values[5:10], o.values[:5] ])) self.assert_numpy_array_equal(uniques, expected) else: expected = o[5:].append(o[:5]) self.assertTrue(uniques.equals(expected)) def test_duplicated_drop_duplicates(self): # GH 4060 for original in self.objs: if isinstance(original, Index): # special case if original.is_boolean(): result = original.drop_duplicates() expected = Index([False, True], name='a') tm.assert_index_equal(result, expected) continue # original doesn't have duplicates expected = np.array([False] * len(original), dtype=bool) duplicated = original.duplicated() tm.assert_numpy_array_equal(duplicated, expected) self.assertTrue(duplicated.dtype == bool) result = original.drop_duplicates() tm.assert_index_equal(result, original)<|fim▁hole|> self.assertFalse(result is original) # has_duplicates self.assertFalse(original.has_duplicates) # create repeated values, 3rd and 5th values are duplicated idx = original[list(range(len(original))) + [5, 3]] expected = np.array([False] * len(original) + [True, True], dtype=bool) duplicated = idx.duplicated() tm.assert_numpy_array_equal(duplicated, expected) self.assertTrue(duplicated.dtype == bool) tm.assert_index_equal(idx.drop_duplicates(), original) base = [False] * len(idx) base[3] = True base[5] = True expected = np.array(base) duplicated = idx.duplicated(keep='last') tm.assert_numpy_array_equal(duplicated, expected) self.assertTrue(duplicated.dtype == bool) result = idx.drop_duplicates(keep='last') tm.assert_index_equal(result, idx[~expected]) # deprecate take_last with tm.assert_produces_warning(FutureWarning): duplicated = idx.duplicated(take_last=True) tm.assert_numpy_array_equal(duplicated, expected) self.assertTrue(duplicated.dtype == bool) with tm.assert_produces_warning(FutureWarning): result = idx.drop_duplicates(take_last=True) tm.assert_index_equal(result, idx[~expected]) base = [False] * len(original) + [True, True] base[3] = True base[5] = True expected = np.array(base) duplicated = idx.duplicated(keep=False) tm.assert_numpy_array_equal(duplicated, expected) self.assertTrue(duplicated.dtype == bool) result = idx.drop_duplicates(keep=False) tm.assert_index_equal(result, idx[~expected]) with tm.assertRaisesRegexp( TypeError, "drop_duplicates\(\) got an unexpected " "keyword argument"): idx.drop_duplicates(inplace=True) else: expected = Series([False] * len(original), index=original.index, name='a') tm.assert_series_equal(original.duplicated(), expected) result = original.drop_duplicates() tm.assert_series_equal(result, original) self.assertFalse(result is original) idx = original.index[list(range(len(original))) + [5, 3]] values = original._values[list(range(len(original))) + [5, 3]] s = Series(values, index=idx, name='a') expected = Series([False] * len(original) + [True, True], index=idx, name='a') tm.assert_series_equal(s.duplicated(), expected) tm.assert_series_equal(s.drop_duplicates(), original) base = [False] * len(idx) base[3] = True base[5] = True expected = Series(base, index=idx, name='a') tm.assert_series_equal(s.duplicated(keep='last'), expected) tm.assert_series_equal(s.drop_duplicates(keep='last'), s[~np.array(base)]) # deprecate take_last with tm.assert_produces_warning(FutureWarning): tm.assert_series_equal( s.duplicated(take_last=True), expected) with tm.assert_produces_warning(FutureWarning): tm.assert_series_equal(s.drop_duplicates(take_last=True), s[~np.array(base)]) base = [False] * len(original) + [True, True] base[3] = True base[5] = True expected = Series(base, index=idx, name='a') tm.assert_series_equal(s.duplicated(keep=False), expected) tm.assert_series_equal(s.drop_duplicates(keep=False), s[~np.array(base)]) s.drop_duplicates(inplace=True) tm.assert_series_equal(s, original) def test_fillna(self): # # GH 11343 # though Index.fillna and Series.fillna has separate impl, # test here to confirm these works as the same def get_fill_value(obj): if isinstance(obj, pd.tseries.base.DatetimeIndexOpsMixin): return obj.asobject.values[0] else: return obj.values[0] for o in self.objs: klass = type(o) values = o.values # values will not be changed result = o.fillna(get_fill_value(o)) if isinstance(o, Index): self.assert_index_equal(o, result) else: self.assert_series_equal(o, result) # check shallow_copied self.assertFalse(o is result) for null_obj in [np.nan, None]: for o in self.objs: klass = type(o) values = o.values.copy() if not self._allow_na_ops(o): continue # value for filling fill_value = get_fill_value(o) # special assign to the numpy array if o.values.dtype == 'datetime64[ns]' or isinstance( o, PeriodIndex): values[0:2] = pd.tslib.iNaT else: values[0:2] = null_obj if isinstance(o, PeriodIndex): # freq must be specified because repeat makes freq # ambiguous expected = [fill_value.ordinal] * 2 + list(values[2:]) expected = klass(ordinal=expected, freq=o.freq) o = klass(ordinal=values, freq=o.freq) else: expected = [fill_value] * 2 + list(values[2:]) expected = klass(expected) o = klass(values) result = o.fillna(fill_value) if isinstance(o, Index): self.assert_index_equal(result, expected) else: self.assert_series_equal(result, expected) # check shallow_copied self.assertFalse(o is result) def test_memory_usage(self): for o in self.objs: res = o.memory_usage() res_deep = o.memory_usage(deep=True) if (com.is_object_dtype(o) or (isinstance(o, Series) and com.is_object_dtype(o.index))): # if there are objects, only deep will pick them up self.assertTrue(res_deep > res) else: self.assertEqual(res, res_deep) if isinstance(o, Series): self.assertEqual( (o.memory_usage(index=False) + o.index.memory_usage()), o.memory_usage(index=True) ) # sys.getsizeof will call the .memory_usage with # deep=True, and add on some GC overhead diff = res_deep - sys.getsizeof(o) self.assertTrue(abs(diff) < 100) class TestFloat64HashTable(tm.TestCase): def test_lookup_nan(self): from pandas.hashtable import Float64HashTable xs = np.array([2.718, 3.14, np.nan, -7, 5, 2, 3]) m = Float64HashTable() m.map_locations(xs) self.assert_numpy_array_equal(m.lookup(xs), np.arange(len(xs))) class TestNoNewAttributesMixin(tm.TestCase): def test_mixin(self): class T(NoNewAttributesMixin): pass t = T() self.assertFalse(hasattr(t, "__frozen")) t.a = "test" self.assertEqual(t.a, "test") t._freeze() # self.assertTrue("__frozen" not in dir(t)) self.assertIs(getattr(t, "__frozen"), True) def f(): t.b = "test" self.assertRaises(AttributeError, f) self.assertFalse(hasattr(t, "b")) if __name__ == '__main__': import nose nose.runmodule(argv=[__file__, '-vvs', '-x', '--pdb', '--pdb-failure'], # '--with-coverage', '--cover-package=pandas.core'], exit=False)<|fim▁end|>
<|file_name|>layout.rs<|end_file_name|><|fim▁begin|>// This file was generated by gir (b7f5189) from gir-files (71d73f0) // DO NOT EDIT use Adjustment; use Container; use Scrollable; use Widget; use ffi; use gdk; use glib::Value; use glib::object::Downcast; use glib::object::IsA; use glib::translate::*; use gobject_ffi; use std::mem; glib_wrapper! { pub struct Layout(Object<ffi::GtkLayout>): Container, Widget, Scrollable; match fn { get_type => || ffi::gtk_layout_get_type(), } } impl Layout { pub fn new(hadjustment: Option<&Adjustment>, vadjustment: Option<&Adjustment>) -> Layout { assert_initialized_main_thread!(); unsafe { Widget::from_glib_none(ffi::gtk_layout_new(hadjustment.to_glib_none().0, vadjustment.to_glib_none().0)).downcast_unchecked() } } pub fn get_bin_window(&self) -> Option<gdk::Window> { unsafe { from_glib_none(ffi::gtk_layout_get_bin_window(self.to_glib_none().0)) } } pub fn get_size(&self) -> (u32, u32) { unsafe { let mut width = mem::uninitialized(); let mut height = mem::uninitialized(); ffi::gtk_layout_get_size(self.to_glib_none().0, &mut width, &mut height); (width, height) } } pub fn move_<T: IsA<Widget>>(&self, child_widget: &T, x: i32, y: i32) { unsafe { ffi::gtk_layout_move(self.to_glib_none().0, child_widget.to_glib_none().0, x, y); } } pub fn put<T: IsA<Widget>>(&self, child_widget: &T, x: i32, y: i32) { unsafe { ffi::gtk_layout_put(self.to_glib_none().0, child_widget.to_glib_none().0, x, y); } } pub fn set_size(&self, width: u32, height: u32) { unsafe { ffi::gtk_layout_set_size(self.to_glib_none().0, width, height); } } pub fn get_property_height(&self) -> u32 { let mut value = Value::from(&0u32); unsafe { gobject_ffi::g_object_get_property(self.to_glib_none().0, "height".to_glib_none().0, value.to_glib_none_mut().0); } value.get().unwrap() } pub fn set_property_height(&self, height: u32) {<|fim▁hole|> } } pub fn get_property_width(&self) -> u32 { let mut value = Value::from(&0u32); unsafe { gobject_ffi::g_object_get_property(self.to_glib_none().0, "width".to_glib_none().0, value.to_glib_none_mut().0); } value.get().unwrap() } pub fn set_property_width(&self, width: u32) { unsafe { gobject_ffi::g_object_set_property(self.to_glib_none().0, "width".to_glib_none().0, Value::from(&width).to_glib_none().0); } } pub fn get_child_x<T: IsA<Widget>>(&self, item: &T) -> i32 { let mut value = Value::from(&0); unsafe { ffi::gtk_container_child_get_property(self.to_glib_none().0, item.to_glib_none().0, "x".to_glib_none().0, value.to_glib_none_mut().0); } value.get().unwrap() } pub fn set_child_x<T: IsA<Widget>>(&self, item: &T, x: i32) { unsafe { ffi::gtk_container_child_set_property(self.to_glib_none().0, item.to_glib_none().0, "x".to_glib_none().0, Value::from(&x).to_glib_none().0); } } pub fn get_child_y<T: IsA<Widget>>(&self, item: &T) -> i32 { let mut value = Value::from(&0); unsafe { ffi::gtk_container_child_get_property(self.to_glib_none().0, item.to_glib_none().0, "y".to_glib_none().0, value.to_glib_none_mut().0); } value.get().unwrap() } pub fn set_child_y<T: IsA<Widget>>(&self, item: &T, y: i32) { unsafe { ffi::gtk_container_child_set_property(self.to_glib_none().0, item.to_glib_none().0, "y".to_glib_none().0, Value::from(&y).to_glib_none().0); } } }<|fim▁end|>
unsafe { gobject_ffi::g_object_set_property(self.to_glib_none().0, "height".to_glib_none().0, Value::from(&height).to_glib_none().0);
<|file_name|>bitcoin_cs.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" ?><!DOCTYPE TS><TS language="cs" version="2.0"> <defaultcodec>UTF-8</defaultcodec> <context> <name>AboutDialog</name> <message> <location filename="../forms/aboutdialog.ui" line="+14"/> <source>About taobaocoin</source> <translation>O taobaocoinu</translation> </message> <message> <location line="+39"/> <source>&lt;b&gt;taobaocoin&lt;/b&gt; version</source> <translation>&lt;b&gt;taobaocoin&lt;/b&gt; verze</translation> </message> <message> <location line="+57"/> <source> This is experimental software. Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php. This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young ([email protected]) and UPnP software written by Thomas Bernard.</source> <translation> Tohle je experimentální program. Šířen pod licencí MIT/X11, viz přiložený soubor COPYING nebo http://www.opensource.org/licenses/mit-license.php. Tento produkt zahrnuje programy vyvinuté OpenSSL Projektem pro použití v OpenSSL Toolkitu (http://www.openssl.org/) a kryptografický program od Erika Younga ([email protected]) a program UPnP od Thomase Bernarda.</translation> </message> <message> <location filename="../aboutdialog.cpp" line="+14"/> <source>Copyright</source> <translation>Copyright</translation> </message> <message> <location line="+0"/> <source>The taobaocoin developers</source> <translation>Vývojáři taobaocoinu</translation> </message> </context> <context> <name>AddressBookPage</name> <message> <location filename="../forms/addressbookpage.ui" line="+14"/> <source>Address Book</source> <translation>Adresář</translation> </message> <message> <location line="+19"/> <source>Double-click to edit address or label</source> <translation>Dvojklikem myši začneš upravovat označení adresy</translation> </message> <message> <location line="+27"/> <source>Create a new address</source> <translation>Vytvoř novou adresu</translation> </message> <message> <location line="+14"/> <source>Copy the currently selected address to the system clipboard</source> <translation>Zkopíruj aktuálně vybranou adresu do systémové schránky</translation> </message> <message> <location line="-11"/> <source>&amp;New Address</source> <translation>Nová &amp;adresa</translation> </message> <message> <location filename="../addressbookpage.cpp" line="+63"/> <source>These are your taobaocoin addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source> <translation>Tohle jsou tvé taobaocoinové adresy pro příjem plateb. Můžeš dát pokaždé každému plátci novou adresu, abys věděl, kdo ti kdy kolik platil.</translation> </message> <message> <location filename="../forms/addressbookpage.ui" line="+14"/> <source>&amp;Copy Address</source> <translation>&amp;Kopíruj adresu</translation> </message> <message> <location line="+11"/> <source>Show &amp;QR Code</source> <translation>Zobraz &amp;QR kód</translation><|fim▁hole|> <location line="+11"/> <source>Sign a message to prove you own a taobaocoin address</source> <translation>Podepiš zprávu, čímž prokážeš, že jsi vlastníkem taobaocoinové adresy</translation> </message> <message> <location line="+3"/> <source>Sign &amp;Message</source> <translation>Po&amp;depiš zprávu</translation> </message> <message> <location line="+25"/> <source>Delete the currently selected address from the list</source> <translation>Smaž zvolenou adresu ze seznamu</translation> </message> <message> <location line="+27"/> <source>Export the data in the current tab to a file</source> <translation>Exportuj data z tohoto panelu do souboru</translation> </message> <message> <location line="+3"/> <source>&amp;Export</source> <translation>&amp;Export</translation> </message> <message> <location line="-44"/> <source>Verify a message to ensure it was signed with a specified taobaocoin address</source> <translation>Ověř zprávu, aby ses ujistil, že byla podepsána danou taobaocoinovou adresou</translation> </message> <message> <location line="+3"/> <source>&amp;Verify Message</source> <translation>&amp;Ověř zprávu</translation> </message> <message> <location line="+14"/> <source>&amp;Delete</source> <translation>S&amp;maž</translation> </message> <message> <location filename="../addressbookpage.cpp" line="-5"/> <source>These are your taobaocoin addresses for sending payments. Always check the amount and the receiving address before sending coins.</source> <translation>Tohle jsou tvé taobaocoinové adresy pro posílání plateb. Před odesláním mincí si vždy zkontroluj částku a cílovou adresu.</translation> </message> <message> <location line="+13"/> <source>Copy &amp;Label</source> <translation>Kopíruj &amp;označení</translation> </message> <message> <location line="+1"/> <source>&amp;Edit</source> <translation>&amp;Uprav</translation> </message> <message> <location line="+1"/> <source>Send &amp;Coins</source> <translation>Pošli min&amp;ce</translation> </message> <message> <location line="+260"/> <source>Export Address Book Data</source> <translation>Exportuj data adresáře</translation> </message> <message> <location line="+1"/> <source>Comma separated file (*.csv)</source> <translation>CSV formát (*.csv)</translation> </message> <message> <location line="+13"/> <source>Error exporting</source> <translation>Chyba při exportu</translation> </message> <message> <location line="+0"/> <source>Could not write to file %1.</source> <translation>Nemohu zapisovat do souboru %1.</translation> </message> </context> <context> <name>AddressTableModel</name> <message> <location filename="../addresstablemodel.cpp" line="+144"/> <source>Label</source> <translation>Označení</translation> </message> <message> <location line="+0"/> <source>Address</source> <translation>Adresa</translation> </message> <message> <location line="+36"/> <source>(no label)</source> <translation>(bez označení)</translation> </message> </context> <context> <name>AskPassphraseDialog</name> <message> <location filename="../forms/askpassphrasedialog.ui" line="+26"/> <source>Passphrase Dialog</source> <translation>Změna hesla</translation> </message> <message> <location line="+21"/> <source>Enter passphrase</source> <translation>Zadej platné heslo</translation> </message> <message> <location line="+14"/> <source>New passphrase</source> <translation>Zadej nové heslo</translation> </message> <message> <location line="+14"/> <source>Repeat new passphrase</source> <translation>Totéž heslo ještě jednou</translation> </message> <message> <location filename="../askpassphrasedialog.cpp" line="+33"/> <source>Enter the new passphrase to the wallet.&lt;br/&gt;Please use a passphrase of &lt;b&gt;10 or more random characters&lt;/b&gt;, or &lt;b&gt;eight or more words&lt;/b&gt;.</source> <translation>Zadej nové heslo k peněžence.&lt;br/&gt;Použij &lt;b&gt;alespoň 10 náhodných znaků&lt;/b&gt; nebo &lt;b&gt;alespoň osm slov&lt;/b&gt;.</translation> </message> <message> <location line="+1"/> <source>Encrypt wallet</source> <translation>Zašifruj peněženku</translation> </message> <message> <location line="+3"/> <source>This operation needs your wallet passphrase to unlock the wallet.</source> <translation>K provedení této operace musíš zadat heslo k peněžence, aby se mohla odemknout.</translation> </message> <message> <location line="+5"/> <source>Unlock wallet</source> <translation>Odemkni peněženku</translation> </message> <message> <location line="+3"/> <source>This operation needs your wallet passphrase to decrypt the wallet.</source> <translation>K provedení této operace musíš zadat heslo k peněžence, aby se mohla dešifrovat.</translation> </message> <message> <location line="+5"/> <source>Decrypt wallet</source> <translation>Dešifruj peněženku</translation> </message> <message> <location line="+3"/> <source>Change passphrase</source> <translation>Změň heslo</translation> </message> <message> <location line="+1"/> <source>Enter the old and new passphrase to the wallet.</source> <translation>Zadej staré a nové heslo k peněžence.</translation> </message> <message> <location line="+46"/> <source>Confirm wallet encryption</source> <translation>Potvrď zašifrování peněženky</translation> </message> <message> <location line="+1"/> <source>Warning: If you encrypt your wallet and lose your passphrase, you will &lt;b&gt;LOSE ALL OF YOUR LITECOINS&lt;/b&gt;!</source> <translation>Varování: Pokud si zašifruješ peněženku a ztratíš či zapomeneš heslo, &lt;b&gt;PŘIJDEŠ O VŠECHNY LITECOINY&lt;/b&gt;!</translation> </message> <message> <location line="+0"/> <source>Are you sure you wish to encrypt your wallet?</source> <translation>Jsi si jistý, že chceš peněženku zašifrovat?</translation> </message> <message> <location line="+15"/> <source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source> <translation>DŮLEŽITÉ: Všechny předchozí zálohy peněženky by měly být nahrazeny nově vygenerovanou, zašifrovanou peněženkou. Z bezpečnostních důvodů budou předchozí zálohy nešifrované peněženky nepoužitelné, jakmile začneš používat novou zašifrovanou peněženku.</translation> </message> <message> <location line="+100"/> <location line="+24"/> <source>Warning: The Caps Lock key is on!</source> <translation>Upozornění: Caps Lock je zapnutý!</translation> </message> <message> <location line="-130"/> <location line="+58"/> <source>Wallet encrypted</source> <translation>Peněženka je zašifrována</translation> </message> <message> <location line="-56"/> <source>taobaocoin will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your taobaocoins from being stolen by malware infecting your computer.</source> <translation>taobaocoin se teď ukončí, aby dokončil zašifrování. Pamatuj však, že pouhé zašifrování peněženky úplně nezabraňuje krádeži tvých taobaocoinů malwarem, kterým se může počítač nakazit.</translation> </message> <message> <location line="+13"/> <location line="+7"/> <location line="+42"/> <location line="+6"/> <source>Wallet encryption failed</source> <translation>Zašifrování peněženky selhalo</translation> </message> <message> <location line="-54"/> <source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source> <translation>Zašifrování peněženky selhalo kvůli vnitřní chybě. Tvá peněženka tedy nebyla zašifrována.</translation> </message> <message> <location line="+7"/> <location line="+48"/> <source>The supplied passphrases do not match.</source> <translation>Zadaná hesla nejsou shodná.</translation> </message> <message> <location line="-37"/> <source>Wallet unlock failed</source> <translation>Odemčení peněženky selhalo</translation> </message> <message> <location line="+1"/> <location line="+11"/> <location line="+19"/> <source>The passphrase entered for the wallet decryption was incorrect.</source> <translation>Nezadal jsi správné heslo pro dešifrování peněženky.</translation> </message> <message> <location line="-20"/> <source>Wallet decryption failed</source> <translation>Dešifrování peněženky selhalo</translation> </message> <message> <location line="+14"/> <source>Wallet passphrase was successfully changed.</source> <translation>Heslo k peněžence bylo v pořádku změněno.</translation> </message> </context> <context> <name>BitcoinGUI</name> <message> <location filename="../bitcoingui.cpp" line="+233"/> <source>Sign &amp;message...</source> <translation>Po&amp;depiš zprávu...</translation> </message> <message> <location line="+280"/> <source>Synchronizing with network...</source> <translation>Synchronizuji se se sítí...</translation> </message> <message> <location line="-349"/> <source>&amp;Overview</source> <translation>&amp;Přehled</translation> </message> <message> <location line="+1"/> <source>Show general overview of wallet</source> <translation>Zobraz celkový přehled peněženky</translation> </message> <message> <location line="+20"/> <source>&amp;Transactions</source> <translation>&amp;Transakce</translation> </message> <message> <location line="+1"/> <source>Browse transaction history</source> <translation>Procházej historii transakcí</translation> </message> <message> <location line="+7"/> <source>Edit the list of stored addresses and labels</source> <translation>Uprav seznam uložených adres a jejich označení</translation> </message> <message> <location line="-14"/> <source>Show the list of addresses for receiving payments</source> <translation>Zobraz seznam adres pro příjem plateb</translation> </message> <message> <location line="+31"/> <source>E&amp;xit</source> <translation>&amp;Konec</translation> </message> <message> <location line="+1"/> <source>Quit application</source> <translation>Ukonči aplikaci</translation> </message> <message> <location line="+4"/> <source>Show information about taobaocoin</source> <translation>Zobraz informace o taobaocoinu</translation> </message> <message> <location line="+2"/> <source>About &amp;Qt</source> <translation>O &amp;Qt</translation> </message> <message> <location line="+1"/> <source>Show information about Qt</source> <translation>Zobraz informace o Qt</translation> </message> <message> <location line="+2"/> <source>&amp;Options...</source> <translation>&amp;Možnosti...</translation> </message> <message> <location line="+6"/> <source>&amp;Encrypt Wallet...</source> <translation>Zaši&amp;fruj peněženku...</translation> </message> <message> <location line="+3"/> <source>&amp;Backup Wallet...</source> <translation>&amp;Zazálohuj peněženku...</translation> </message> <message> <location line="+2"/> <source>&amp;Change Passphrase...</source> <translation>Změň &amp;heslo...</translation> </message> <message> <location line="+285"/> <source>Importing blocks from disk...</source> <translation>Importuji bloky z disku...</translation> </message> <message> <location line="+3"/> <source>Reindexing blocks on disk...</source> <translation>Vytvářím nový index bloků na disku...</translation> </message> <message> <location line="-347"/> <source>Send coins to a taobaocoin address</source> <translation>Pošli mince na taobaocoinovou adresu</translation> </message> <message> <location line="+49"/> <source>Modify configuration options for taobaocoin</source> <translation>Uprav nastavení taobaocoinu</translation> </message> <message> <location line="+9"/> <source>Backup wallet to another location</source> <translation>Zazálohuj peněženku na jiné místo</translation> </message> <message> <location line="+2"/> <source>Change the passphrase used for wallet encryption</source> <translation>Změň heslo k šifrování peněženky</translation> </message> <message> <location line="+6"/> <source>&amp;Debug window</source> <translation>&amp;Ladicí okno</translation> </message> <message> <location line="+1"/> <source>Open debugging and diagnostic console</source> <translation>Otevři ladicí a diagnostickou konzoli</translation> </message> <message> <location line="-4"/> <source>&amp;Verify message...</source> <translation>&amp;Ověř zprávu...</translation> </message> <message> <location line="-165"/> <location line="+530"/> <source>taobaocoin</source> <translation>taobaocoin</translation> </message> <message> <location line="-530"/> <source>Wallet</source> <translation>Peněženka</translation> </message> <message> <location line="+101"/> <source>&amp;Send</source> <translation>&amp;Pošli</translation> </message> <message> <location line="+7"/> <source>&amp;Receive</source> <translation>Při&amp;jmi</translation> </message> <message> <location line="+14"/> <source>&amp;Addresses</source> <translation>&amp;Adresy</translation> </message> <message> <location line="+22"/> <source>&amp;About taobaocoin</source> <translation>O &amp;taobaocoinu</translation> </message> <message> <location line="+9"/> <source>&amp;Show / Hide</source> <translation>&amp;Zobraz/Skryj</translation> </message> <message> <location line="+1"/> <source>Show or hide the main Window</source> <translation>Zobraz nebo skryj hlavní okno</translation> </message> <message> <location line="+3"/> <source>Encrypt the private keys that belong to your wallet</source> <translation>Zašifruj soukromé klíče ve své peněžence</translation> </message> <message> <location line="+7"/> <source>Sign messages with your taobaocoin addresses to prove you own them</source> <translation>Podepiš zprávy svými taobaocoinovými adresami, čímž prokážeš, že jsi jejich vlastníkem</translation> </message> <message> <location line="+2"/> <source>Verify messages to ensure they were signed with specified taobaocoin addresses</source> <translation>Ověř zprávy, aby ses ujistil, že byly podepsány danými taobaocoinovými adresami</translation> </message> <message> <location line="+28"/> <source>&amp;File</source> <translation>&amp;Soubor</translation> </message> <message> <location line="+7"/> <source>&amp;Settings</source> <translation>&amp;Nastavení</translation> </message> <message> <location line="+6"/> <source>&amp;Help</source> <translation>Ná&amp;pověda</translation> </message> <message> <location line="+9"/> <source>Tabs toolbar</source> <translation>Panel s listy</translation> </message> <message> <location line="+17"/> <location line="+10"/> <source>[testnet]</source> <translation>[testnet]</translation> </message> <message> <location line="+47"/> <source>taobaocoin client</source> <translation>taobaocoin klient</translation> </message> <message numerus="yes"> <location line="+141"/> <source>%n active connection(s) to taobaocoin network</source> <translation><numerusform>%n aktivní spojení do taobaocoinové sítě</numerusform><numerusform>%n aktivní spojení do taobaocoinové sítě</numerusform><numerusform>%n aktivních spojení do taobaocoinové sítě</numerusform></translation> </message> <message> <location line="+22"/> <source>No block source available...</source> <translation>Není dostupný žádný zdroj bloků...</translation> </message> <message> <location line="+12"/> <source>Processed %1 of %2 (estimated) blocks of transaction history.</source> <translation>Zpracováno %1 z přibližně %2 bloků transakční historie.</translation> </message> <message> <location line="+4"/> <source>Processed %1 blocks of transaction history.</source> <translation>Zpracováno %1 bloků transakční historie.</translation> </message> <message numerus="yes"> <location line="+20"/> <source>%n hour(s)</source> <translation><numerusform>hodinu</numerusform><numerusform>%n hodiny</numerusform><numerusform>%n hodin</numerusform></translation> </message> <message numerus="yes"> <location line="+4"/> <source>%n day(s)</source> <translation><numerusform>den</numerusform><numerusform>%n dny</numerusform><numerusform>%n dnů</numerusform></translation> </message> <message numerus="yes"> <location line="+4"/> <source>%n week(s)</source> <translation><numerusform>týden</numerusform><numerusform>%n týdny</numerusform><numerusform>%n týdnů</numerusform></translation> </message> <message> <location line="+4"/> <source>%1 behind</source> <translation>Stahuji ještě bloky transakcí za poslední %1</translation> </message> <message> <location line="+14"/> <source>Last received block was generated %1 ago.</source> <translation>Poslední stažený blok byl vygenerován %1 zpátky.</translation> </message> <message> <location line="+2"/> <source>Transactions after this will not yet be visible.</source> <translation>Následné transakce ještě nebudou vidět.</translation> </message> <message> <location line="+22"/> <source>Error</source> <translation>Chyba</translation> </message> <message> <location line="+3"/> <source>Warning</source> <translation>Upozornění</translation> </message> <message> <location line="+3"/> <source>Information</source> <translation>Informace</translation> </message> <message> <location line="+70"/> <source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source> <translation>Tahle transakce přesahuje velikostní limit. I tak ji ale můžeš poslat, pokud za ni zaplatíš poplatek %1, který půjde uzlům, které tvou transakci zpracují, a navíc tak podpoříš síť. Chceš zaplatit poplatek?</translation> </message> <message> <location line="-140"/> <source>Up to date</source> <translation>Aktuální</translation> </message> <message> <location line="+31"/> <source>Catching up...</source> <translation>Stahuji...</translation> </message> <message> <location line="+113"/> <source>Confirm transaction fee</source> <translation>Potvrď transakční poplatek</translation> </message> <message> <location line="+8"/> <source>Sent transaction</source> <translation>Odeslané transakce</translation> </message> <message> <location line="+0"/> <source>Incoming transaction</source> <translation>Příchozí transakce</translation> </message> <message> <location line="+1"/> <source>Date: %1 Amount: %2 Type: %3 Address: %4 </source> <translation>Datum: %1 Částka: %2 Typ: %3 Adresa: %4 </translation> </message> <message> <location line="+33"/> <location line="+23"/> <source>URI handling</source> <translation>Zpracování URI</translation> </message> <message> <location line="-23"/> <location line="+23"/> <source>URI can not be parsed! This can be caused by an invalid taobaocoin address or malformed URI parameters.</source> <translation>Nepodařilo se analyzovat URI! Důvodem může být neplatná taobaocoinová adresa nebo poškozené parametry URI.</translation> </message> <message> <location line="+17"/> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;unlocked&lt;/b&gt;</source> <translation>Peněženka je &lt;b&gt;zašifrovaná&lt;/b&gt; a momentálně &lt;b&gt;odemčená&lt;/b&gt;</translation> </message> <message> <location line="+8"/> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;locked&lt;/b&gt;</source> <translation>Peněženka je &lt;b&gt;zašifrovaná&lt;/b&gt; a momentálně &lt;b&gt;zamčená&lt;/b&gt;</translation> </message> <message> <location filename="../bitcoin.cpp" line="+111"/> <source>A fatal error occurred. taobaocoin can no longer continue safely and will quit.</source> <translation>Stala se fatální chyba. taobaocoin nemůže bezpečně pokračovat v činnosti, a proto skončí.</translation> </message> </context> <context> <name>ClientModel</name> <message> <location filename="../clientmodel.cpp" line="+104"/> <source>Network Alert</source> <translation>Upozornění sítě</translation> </message> </context> <context> <name>EditAddressDialog</name> <message> <location filename="../forms/editaddressdialog.ui" line="+14"/> <source>Edit Address</source> <translation>Uprav adresu</translation> </message> <message> <location line="+11"/> <source>&amp;Label</source> <translation>&amp;Označení</translation> </message> <message> <location line="+10"/> <source>The label associated with this address book entry</source> <translation>Označení spojené s tímto záznamem v adresáři</translation> </message> <message> <location line="+7"/> <source>&amp;Address</source> <translation>&amp;Adresa</translation> </message> <message> <location line="+10"/> <source>The address associated with this address book entry. This can only be modified for sending addresses.</source> <translation>Adresa spojená s tímto záznamem v adresáři. Lze upravovat jen pro odesílací adresy.</translation> </message> <message> <location filename="../editaddressdialog.cpp" line="+21"/> <source>New receiving address</source> <translation>Nová přijímací adresa</translation> </message> <message> <location line="+4"/> <source>New sending address</source> <translation>Nová odesílací adresa</translation> </message> <message> <location line="+3"/> <source>Edit receiving address</source> <translation>Uprav přijímací adresu</translation> </message> <message> <location line="+4"/> <source>Edit sending address</source> <translation>Uprav odesílací adresu</translation> </message> <message> <location line="+76"/> <source>The entered address &quot;%1&quot; is already in the address book.</source> <translation>Zadaná adresa &quot;%1&quot; už v adresáři je.</translation> </message> <message> <location line="-5"/> <source>The entered address &quot;%1&quot; is not a valid taobaocoin address.</source> <translation>Zadaná adresa &quot;%1&quot; není platná taobaocoinová adresa.</translation> </message> <message> <location line="+10"/> <source>Could not unlock wallet.</source> <translation>Nemohu odemknout peněženku.</translation> </message> <message> <location line="+5"/> <source>New key generation failed.</source> <translation>Nepodařilo se mi vygenerovat nový klíč.</translation> </message> </context> <context> <name>GUIUtil::HelpMessageBox</name> <message> <location filename="../guiutil.cpp" line="+424"/> <location line="+12"/> <source>taobaocoin-Qt</source> <translation>taobaocoin-Qt</translation> </message> <message> <location line="-12"/> <source>version</source> <translation>verze</translation> </message> <message> <location line="+2"/> <source>Usage:</source> <translation>Užití:</translation> </message> <message> <location line="+1"/> <source>command-line options</source> <translation>možnosti příkazové řádky</translation> </message> <message> <location line="+4"/> <source>UI options</source> <translation>Možnosti UI</translation> </message> <message> <location line="+1"/> <source>Set language, for example &quot;de_DE&quot; (default: system locale)</source> <translation>Nastavit jazyk, například &quot;de_DE&quot; (výchozí: systémové nastavení)</translation> </message> <message> <location line="+1"/> <source>Start minimized</source> <translation>Nastartovat minimalizovaně</translation> </message> <message> <location line="+1"/> <source>Show splash screen on startup (default: 1)</source> <translation>Zobrazit startovací obrazovku (výchozí: 1)</translation> </message> </context> <context> <name>OptionsDialog</name> <message> <location filename="../forms/optionsdialog.ui" line="+14"/> <source>Options</source> <translation>Možnosti</translation> </message> <message> <location line="+16"/> <source>&amp;Main</source> <translation>&amp;Hlavní</translation> </message> <message> <location line="+6"/> <source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB.</source> <translation>Dobrovolný transakční poplatek za každý započatý kB dopomáhá k rychlému zpracování tvých transakcí. Většina transakcí má do 1 kB.</translation> </message> <message> <location line="+15"/> <source>Pay transaction &amp;fee</source> <translation>Platit &amp;transakční poplatek</translation> </message> <message> <location line="+31"/> <source>Automatically start taobaocoin after logging in to the system.</source> <translation>Automaticky spustí taobaocoin po přihlášení do systému.</translation> </message> <message> <location line="+3"/> <source>&amp;Start taobaocoin on system login</source> <translation>S&amp;pustit taobaocoin po přihlášení do systému</translation> </message> <message> <location line="+35"/> <source>Reset all client options to default.</source> <translation>Vrátí všechny volby na výchozí hodnoty.</translation> </message> <message> <location line="+3"/> <source>&amp;Reset Options</source> <translation>&amp;Obnovit nastavení</translation> </message> <message> <location line="+13"/> <source>&amp;Network</source> <translation>&amp;Síť</translation> </message> <message> <location line="+6"/> <source>Automatically open the taobaocoin client port on the router. This only works when your router supports UPnP and it is enabled.</source> <translation>Automaticky otevře potřebný port na routeru. Tohle funguje jen za předpokladu, že tvůj router podporuje UPnP a že je UPnP povolené.</translation> </message> <message> <location line="+3"/> <source>Map port using &amp;UPnP</source> <translation>Namapovat port přes &amp;UPnP</translation> </message> <message> <location line="+7"/> <source>Connect to the taobaocoin network through a SOCKS proxy (e.g. when connecting through Tor).</source> <translation>Připojí se do taobaocoinové sítě přes SOCKS proxy (např. když se připojuje přes Tor).</translation> </message> <message> <location line="+3"/> <source>&amp;Connect through SOCKS proxy:</source> <translation>&amp;Připojit přes SOCKS proxy:</translation> </message> <message> <location line="+9"/> <source>Proxy &amp;IP:</source> <translation>&amp;IP adresa proxy:</translation> </message> <message> <location line="+19"/> <source>IP address of the proxy (e.g. 127.0.0.1)</source> <translation>IP adresa proxy (např. 127.0.0.1)</translation> </message> <message> <location line="+7"/> <source>&amp;Port:</source> <translation>Por&amp;t:</translation> </message> <message> <location line="+19"/> <source>Port of the proxy (e.g. 9050)</source> <translation>Port proxy (např. 9050)</translation> </message> <message> <location line="+7"/> <source>SOCKS &amp;Version:</source> <translation>&amp;Verze SOCKS:</translation> </message> <message> <location line="+13"/> <source>SOCKS version of the proxy (e.g. 5)</source> <translation>Verze SOCKS proxy (např. 5)</translation> </message> <message> <location line="+36"/> <source>&amp;Window</source> <translation>O&amp;kno</translation> </message> <message> <location line="+6"/> <source>Show only a tray icon after minimizing the window.</source> <translation>Po minimalizaci okna zobrazí pouze ikonu v panelu.</translation> </message> <message> <location line="+3"/> <source>&amp;Minimize to the tray instead of the taskbar</source> <translation>&amp;Minimalizovávat do ikony v panelu</translation> </message> <message> <location line="+7"/> <source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source> <translation>Zavřením se aplikace minimalizuje. Pokud je tato volba zaškrtnuta, tak se aplikace ukončí pouze zvolením Konec v menu.</translation> </message> <message> <location line="+3"/> <source>M&amp;inimize on close</source> <translation>Za&amp;vřením minimalizovat</translation> </message> <message> <location line="+21"/> <source>&amp;Display</source> <translation>Zobr&amp;azení</translation> </message> <message> <location line="+8"/> <source>User Interface &amp;language:</source> <translation>&amp;Jazyk uživatelského rozhraní:</translation> </message> <message> <location line="+13"/> <source>The user interface language can be set here. This setting will take effect after restarting taobaocoin.</source> <translation>Tady lze nastavit jazyk uživatelského rozhraní. Nastavení se projeví až po restartování taobaocoinu.</translation> </message> <message> <location line="+11"/> <source>&amp;Unit to show amounts in:</source> <translation>J&amp;ednotka pro částky: </translation> </message> <message> <location line="+13"/> <source>Choose the default subdivision unit to show in the interface and when sending coins.</source> <translation>Zvol výchozí podjednotku, která se bude zobrazovat v programu a při posílání mincí.</translation> </message> <message> <location line="+9"/> <source>Whether to show taobaocoin addresses in the transaction list or not.</source> <translation>Zda ukazovat taobaocoinové adresy ve výpisu transakcí nebo ne.</translation> </message> <message> <location line="+3"/> <source>&amp;Display addresses in transaction list</source> <translation>Ukazo&amp;vat adresy ve výpisu transakcí</translation> </message> <message> <location line="+71"/> <source>&amp;OK</source> <translation>&amp;Budiž</translation> </message> <message> <location line="+7"/> <source>&amp;Cancel</source> <translation>&amp;Zrušit</translation> </message> <message> <location line="+10"/> <source>&amp;Apply</source> <translation>&amp;Uložit</translation> </message> <message> <location filename="../optionsdialog.cpp" line="+53"/> <source>default</source> <translation>výchozí</translation> </message> <message> <location line="+130"/> <source>Confirm options reset</source> <translation>Potvrzení obnovení nastavení</translation> </message> <message> <location line="+1"/> <source>Some settings may require a client restart to take effect.</source> <translation>Některá nastavení mohou vyžadovat restart klienta, aby se mohly projevit.</translation> </message> <message> <location line="+0"/> <source>Do you want to proceed?</source> <translation>Chceš pokračovat?</translation> </message> <message> <location line="+42"/> <location line="+9"/> <source>Warning</source> <translation>Upozornění</translation> </message> <message> <location line="-9"/> <location line="+9"/> <source>This setting will take effect after restarting taobaocoin.</source> <translation>Nastavení se projeví až po restartování taobaocoinu.</translation> </message> <message> <location line="+29"/> <source>The supplied proxy address is invalid.</source> <translation>Zadaná adresa proxy je neplatná.</translation> </message> </context> <context> <name>OverviewPage</name> <message> <location filename="../forms/overviewpage.ui" line="+14"/> <source>Form</source> <translation>Formulář</translation> </message> <message> <location line="+50"/> <location line="+166"/> <source>The displayed information may be out of date. Your wallet automatically synchronizes with the taobaocoin network after a connection is established, but this process has not completed yet.</source> <translation>Zobrazené informace nemusí být aktuální. Tvá peněženka se automaticky sesynchronizuje s taobaocoinovou sítí, jakmile se s ní spojí. Zatím ale ještě není synchronizace dokončena.</translation> </message> <message> <location line="-124"/> <source>Balance:</source> <translation>Stav účtu:</translation> </message> <message> <location line="+29"/> <source>Unconfirmed:</source> <translation>Nepotvrzeno:</translation> </message> <message> <location line="-78"/> <source>Wallet</source> <translation>Peněženka</translation> </message> <message> <location line="+107"/> <source>Immature:</source> <translation>Nedozráno:</translation> </message> <message> <location line="+13"/> <source>Mined balance that has not yet matured</source> <translation>Vytěžené mince, které ještě nejsou zralé</translation> </message> <message> <location line="+46"/> <source>&lt;b&gt;Recent transactions&lt;/b&gt;</source> <translation>&lt;b&gt;Poslední transakce&lt;/b&gt;</translation> </message> <message> <location line="-101"/> <source>Your current balance</source> <translation>Aktuální stav tvého účtu</translation> </message> <message> <location line="+29"/> <source>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</source> <translation>Celkem z transakcí, které ještě nejsou potvrzené a které se ještě nezapočítávají do celkového stavu účtu</translation> </message> <message> <location filename="../overviewpage.cpp" line="+116"/> <location line="+1"/> <source>out of sync</source> <translation>nesynchronizováno</translation> </message> </context> <context> <name>PaymentServer</name> <message> <location filename="../paymentserver.cpp" line="+107"/> <source>Cannot start taobaocoin: click-to-pay handler</source> <translation>Nemůžu spustit taobaocoin: obsluha click-to-pay</translation> </message> </context> <context> <name>QRCodeDialog</name> <message> <location filename="../forms/qrcodedialog.ui" line="+14"/> <source>QR Code Dialog</source> <translation>QR kód</translation> </message> <message> <location line="+59"/> <source>Request Payment</source> <translation>Požadovat platbu</translation> </message> <message> <location line="+56"/> <source>Amount:</source> <translation>Částka:</translation> </message> <message> <location line="-44"/> <source>Label:</source> <translation>Označení:</translation> </message> <message> <location line="+19"/> <source>Message:</source> <translation>Zpráva:</translation> </message> <message> <location line="+71"/> <source>&amp;Save As...</source> <translation>&amp;Ulož jako...</translation> </message> <message> <location filename="../qrcodedialog.cpp" line="+62"/> <source>Error encoding URI into QR Code.</source> <translation>Chyba při kódování URI do QR kódu.</translation> </message> <message> <location line="+40"/> <source>The entered amount is invalid, please check.</source> <translation>Zadaná částka je neplatná, překontroluj ji prosím.</translation> </message> <message> <location line="+23"/> <source>Resulting URI too long, try to reduce the text for label / message.</source> <translation>Výsledná URI je příliš dlouhá, zkus zkrátit text označení / zprávy.</translation> </message> <message> <location line="+25"/> <source>Save QR Code</source> <translation>Ulož QR kód</translation> </message> <message> <location line="+0"/> <source>PNG Images (*.png)</source> <translation>PNG obrázky (*.png)</translation> </message> </context> <context> <name>RPCConsole</name> <message> <location filename="../forms/rpcconsole.ui" line="+46"/> <source>Client name</source> <translation>Název klienta</translation> </message> <message> <location line="+10"/> <location line="+23"/> <location line="+26"/> <location line="+23"/> <location line="+23"/> <location line="+36"/> <location line="+53"/> <location line="+23"/> <location line="+23"/> <location filename="../rpcconsole.cpp" line="+339"/> <source>N/A</source> <translation>N/A</translation> </message> <message> <location line="-217"/> <source>Client version</source> <translation>Verze klienta</translation> </message> <message> <location line="-45"/> <source>&amp;Information</source> <translation>&amp;Informace</translation> </message> <message> <location line="+68"/> <source>Using OpenSSL version</source> <translation>Používaná verze OpenSSL</translation> </message> <message> <location line="+49"/> <source>Startup time</source> <translation>Čas spuštění</translation> </message> <message> <location line="+29"/> <source>Network</source> <translation>Síť</translation> </message> <message> <location line="+7"/> <source>Number of connections</source> <translation>Počet spojení</translation> </message> <message> <location line="+23"/> <source>On testnet</source> <translation>V testnetu</translation> </message> <message> <location line="+23"/> <source>Block chain</source> <translation>Řetězec bloků</translation> </message> <message> <location line="+7"/> <source>Current number of blocks</source> <translation>Aktuální počet bloků</translation> </message> <message> <location line="+23"/> <source>Estimated total blocks</source> <translation>Odhad celkového počtu bloků</translation> </message> <message> <location line="+23"/> <source>Last block time</source> <translation>Čas posledního bloku</translation> </message> <message> <location line="+52"/> <source>&amp;Open</source> <translation>&amp;Otevřít</translation> </message> <message> <location line="+16"/> <source>Command-line options</source> <translation>Argumenty z příkazové řádky</translation> </message> <message> <location line="+7"/> <source>Show the taobaocoin-Qt help message to get a list with possible taobaocoin command-line options.</source> <translation>Seznam parametrů taobaocoinu pro příkazovou řádku získáš v nápovědě taobaocoinu Qt.</translation> </message> <message> <location line="+3"/> <source>&amp;Show</source> <translation>&amp;Zobrazit</translation> </message> <message> <location line="+24"/> <source>&amp;Console</source> <translation>&amp;Konzole</translation> </message> <message> <location line="-260"/> <source>Build date</source> <translation>Datum kompilace</translation> </message> <message> <location line="-104"/> <source>taobaocoin - Debug window</source> <translation>taobaocoin - ladicí okno</translation> </message> <message> <location line="+25"/> <source>taobaocoin Core</source> <translation>Jádro taobaocoinu</translation> </message> <message> <location line="+279"/> <source>Debug log file</source> <translation>Soubor s ladicími záznamy</translation> </message> <message> <location line="+7"/> <source>Open the taobaocoin debug log file from the current data directory. This can take a few seconds for large log files.</source> <translation>Otevři soubor s ladicími záznamy taobaocoinu z aktuálního datového adresáře. U velkých logů to může pár vteřin zabrat.</translation> </message> <message> <location line="+102"/> <source>Clear console</source> <translation>Vyčistit konzoli</translation> </message> <message> <location filename="../rpcconsole.cpp" line="-30"/> <source>Welcome to the taobaocoin RPC console.</source> <translation>Vítej v taobaocoinové RPC konzoli.</translation> </message> <message> <location line="+1"/> <source>Use up and down arrows to navigate history, and &lt;b&gt;Ctrl-L&lt;/b&gt; to clear screen.</source> <translation>V historii se pohybuješ šipkami nahoru a dolů a pomocí &lt;b&gt;Ctrl-L&lt;/b&gt; čistíš obrazovku.</translation> </message> <message> <location line="+1"/> <source>Type &lt;b&gt;help&lt;/b&gt; for an overview of available commands.</source> <translation>Napsáním &lt;b&gt;help&lt;/b&gt; si vypíšeš přehled dostupných příkazů.</translation> </message> </context> <context> <name>SendCoinsDialog</name> <message> <location filename="../forms/sendcoinsdialog.ui" line="+14"/> <location filename="../sendcoinsdialog.cpp" line="+124"/> <location line="+5"/> <location line="+5"/> <location line="+5"/> <location line="+6"/> <location line="+5"/> <location line="+5"/> <source>Send Coins</source> <translation>Pošli mince</translation> </message> <message> <location line="+50"/> <source>Send to multiple recipients at once</source> <translation>Pošli více příjemcům naráz</translation> </message> <message> <location line="+3"/> <source>Add &amp;Recipient</source> <translation>Při&amp;dej příjemce</translation> </message> <message> <location line="+20"/> <source>Remove all transaction fields</source> <translation>Smaž všechny transakční formuláře</translation> </message> <message> <location line="+3"/> <source>Clear &amp;All</source> <translation>Všechno s&amp;maž</translation> </message> <message> <location line="+22"/> <source>Balance:</source> <translation>Stav účtu:</translation> </message> <message> <location line="+10"/> <source>123.456 BTC</source> <translation>123.456 BTC</translation> </message> <message> <location line="+31"/> <source>Confirm the send action</source> <translation>Potvrď odeslání</translation> </message> <message> <location line="+3"/> <source>S&amp;end</source> <translation>P&amp;ošli</translation> </message> <message> <location filename="../sendcoinsdialog.cpp" line="-59"/> <source>&lt;b&gt;%1&lt;/b&gt; to %2 (%3)</source> <translation>&lt;b&gt;%1&lt;/b&gt; pro %2 (%3)</translation> </message> <message> <location line="+5"/> <source>Confirm send coins</source> <translation>Potvrď odeslání mincí</translation> </message> <message> <location line="+1"/> <source>Are you sure you want to send %1?</source> <translation>Jsi si jistý, že chceš poslat %1?</translation> </message> <message> <location line="+0"/> <source> and </source> <translation> a </translation> </message> <message> <location line="+23"/> <source>The recipient address is not valid, please recheck.</source> <translation>Adresa příjemce je neplatná, překontroluj ji prosím.</translation> </message> <message> <location line="+5"/> <source>The amount to pay must be larger than 0.</source> <translation>Odesílaná částka musí být větší než 0.</translation> </message> <message> <location line="+5"/> <source>The amount exceeds your balance.</source> <translation>Částka překračuje stav účtu.</translation> </message> <message> <location line="+5"/> <source>The total exceeds your balance when the %1 transaction fee is included.</source> <translation>Celková částka při připočítání poplatku %1 překročí stav účtu.</translation> </message> <message> <location line="+6"/> <source>Duplicate address found, can only send to each address once per send operation.</source> <translation>Zaznamenána duplikovaná adresa; každá adresa může být v odesílané platbě pouze jednou.</translation> </message> <message> <location line="+5"/> <source>Error: Transaction creation failed!</source> <translation>Chyba: Vytvoření transakce selhalo!</translation> </message> <message> <location line="+5"/> <source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source> <translation>Chyba: Transakce byla odmítnuta. Tohle může nastat, pokud nějaké mince z tvé peněženky už jednou byly utraceny, například pokud používáš kopii souboru wallet.dat a mince byly utraceny v druhé kopii, ale nebyly označeny jako utracené v této.</translation> </message> </context> <context> <name>SendCoinsEntry</name> <message> <location filename="../forms/sendcoinsentry.ui" line="+14"/> <source>Form</source> <translation>Formulář</translation> </message> <message> <location line="+15"/> <source>A&amp;mount:</source> <translation>Čás&amp;tka:</translation> </message> <message> <location line="+13"/> <source>Pay &amp;To:</source> <translation>&amp;Komu:</translation> </message> <message> <location line="+34"/> <source>The address to send the payment to (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source> <translation>Adresa příjemce (např. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation> </message> <message> <location line="+60"/> <location filename="../sendcoinsentry.cpp" line="+26"/> <source>Enter a label for this address to add it to your address book</source> <translation>Zadej označení této adresy; obojí se ti pak uloží do adresáře</translation> </message> <message> <location line="-78"/> <source>&amp;Label:</source> <translation>O&amp;značení:</translation> </message> <message> <location line="+28"/> <source>Choose address from address book</source> <translation>Vyber adresu z adresáře</translation> </message> <message> <location line="+10"/> <source>Alt+A</source> <translation>Alt+A</translation> </message> <message> <location line="+7"/> <source>Paste address from clipboard</source> <translation>Vlož adresu ze schránky</translation> </message> <message> <location line="+10"/> <source>Alt+P</source> <translation>Alt+P</translation> </message> <message> <location line="+7"/> <source>Remove this recipient</source> <translation>Smaž tohoto příjemce</translation> </message> <message> <location filename="../sendcoinsentry.cpp" line="+1"/> <source>Enter a taobaocoin address (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source> <translation>Zadej taobaocoinovou adresu (např. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation> </message> </context> <context> <name>SignVerifyMessageDialog</name> <message> <location filename="../forms/signverifymessagedialog.ui" line="+14"/> <source>Signatures - Sign / Verify a Message</source> <translation>Podpisy - podepsat/ověřit zprávu</translation> </message> <message> <location line="+13"/> <source>&amp;Sign Message</source> <translation>&amp;Podepiš zprávu</translation> </message> <message> <location line="+6"/> <source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source> <translation>Podepsáním zprávy svými adresami můžeš prokázat, že je skutečně vlastníš. Buď opatrný a nepodepisuj nic vágního; například při phishingových útocích můžeš být lákán, abys něco takového podepsal. Podepisuj pouze zcela úplná a detailní prohlášení, se kterými souhlasíš.</translation> </message> <message> <location line="+18"/> <source>The address to sign the message with (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source> <translation>Adresa, kterou se zpráva podepíše (např. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation> </message> <message> <location line="+10"/> <location line="+213"/> <source>Choose an address from the address book</source> <translation>Vyber adresu z adresáře</translation> </message> <message> <location line="-203"/> <location line="+213"/> <source>Alt+A</source> <translation>Alt+A</translation> </message> <message> <location line="-203"/> <source>Paste address from clipboard</source> <translation>Vlož adresu ze schránky</translation> </message> <message> <location line="+10"/> <source>Alt+P</source> <translation>Alt+P</translation> </message> <message> <location line="+12"/> <source>Enter the message you want to sign here</source> <translation>Sem vepiš zprávu, kterou chceš podepsat</translation> </message> <message> <location line="+7"/> <source>Signature</source> <translation>Podpis</translation> </message> <message> <location line="+27"/> <source>Copy the current signature to the system clipboard</source> <translation>Zkopíruj aktuálně vybraný podpis do systémové schránky</translation> </message> <message> <location line="+21"/> <source>Sign the message to prove you own this taobaocoin address</source> <translation>Podepiš zprávu, čímž prokážeš, že jsi vlastníkem této taobaocoinové adresy</translation> </message> <message> <location line="+3"/> <source>Sign &amp;Message</source> <translation>Po&amp;depiš zprávu</translation> </message> <message> <location line="+14"/> <source>Reset all sign message fields</source> <translation>Vymaž všechna pole formuláře pro podepsání zrávy</translation> </message> <message> <location line="+3"/> <location line="+146"/> <source>Clear &amp;All</source> <translation>Všechno &amp;smaž</translation> </message> <message> <location line="-87"/> <source>&amp;Verify Message</source> <translation>&amp;Ověř zprávu</translation> </message> <message> <location line="+6"/> <source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source> <translation>K ověření podpisu zprávy zadej podepisující adresu, zprávu (ověř si, že správně kopíruješ zalomení řádků, mezery, tabulátory apod.) a podpis. Dávej pozor na to, abys nezkopíroval do podpisu víc, než co je v samotné podepsané zprávě, abys nebyl napálen man-in-the-middle útokem.</translation> </message> <message> <location line="+21"/> <source>The address the message was signed with (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source> <translation>Adresa, kterou je zpráva podepsána (např. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation> </message> <message> <location line="+40"/> <source>Verify the message to ensure it was signed with the specified taobaocoin address</source> <translation>Ověř zprávu, aby ses ujistil, že byla podepsána danou taobaocoinovou adresou</translation> </message> <message> <location line="+3"/> <source>Verify &amp;Message</source> <translation>O&amp;věř zprávu</translation> </message> <message> <location line="+14"/> <source>Reset all verify message fields</source> <translation>Vymaž všechna pole formuláře pro ověření zrávy</translation> </message> <message> <location filename="../signverifymessagedialog.cpp" line="+27"/> <location line="+3"/> <source>Enter a taobaocoin address (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source> <translation>Zadej taobaocoinovou adresu (např. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation> </message> <message> <location line="-2"/> <source>Click &quot;Sign Message&quot; to generate signature</source> <translation>Kliknutím na &quot;Podepiš zprávu&quot; vygeneruješ podpis</translation> </message> <message> <location line="+3"/> <source>Enter taobaocoin signature</source> <translation>Vlož taobaocoinový podpis</translation> </message> <message> <location line="+82"/> <location line="+81"/> <source>The entered address is invalid.</source> <translation>Zadaná adresa je neplatná.</translation> </message> <message> <location line="-81"/> <location line="+8"/> <location line="+73"/> <location line="+8"/> <source>Please check the address and try again.</source> <translation>Zkontroluj ji prosím a zkus to pak znovu.</translation> </message> <message> <location line="-81"/> <location line="+81"/> <source>The entered address does not refer to a key.</source> <translation>Zadaná adresa nepasuje ke klíči.</translation> </message> <message> <location line="-73"/> <source>Wallet unlock was cancelled.</source> <translation>Odemčení peněženky bylo zrušeno.</translation> </message> <message> <location line="+8"/> <source>Private key for the entered address is not available.</source> <translation>Soukromý klíč pro zadanou adresu není dostupný.</translation> </message> <message> <location line="+12"/> <source>Message signing failed.</source> <translation>Podepisování zprávy selhalo.</translation> </message> <message> <location line="+5"/> <source>Message signed.</source> <translation>Zpráv podepsána.</translation> </message> <message> <location line="+59"/> <source>The signature could not be decoded.</source> <translation>Podpis nejde dekódovat.</translation> </message> <message> <location line="+0"/> <location line="+13"/> <source>Please check the signature and try again.</source> <translation>Zkontroluj ho prosím a zkus to pak znovu.</translation> </message> <message> <location line="+0"/> <source>The signature did not match the message digest.</source> <translation>Podpis se neshoduje s hašem zprávy.</translation> </message> <message> <location line="+7"/> <source>Message verification failed.</source> <translation>Ověřování zprávy selhalo.</translation> </message> <message> <location line="+5"/> <source>Message verified.</source> <translation>Zpráva ověřena.</translation> </message> </context> <context> <name>SplashScreen</name> <message> <location filename="../splashscreen.cpp" line="+22"/> <source>The taobaocoin developers</source> <translation>Vývojáři taobaocoinu</translation> </message> <message> <location line="+1"/> <source>[testnet]</source> <translation>[testnet]</translation> </message> </context> <context> <name>TransactionDesc</name> <message> <location filename="../transactiondesc.cpp" line="+20"/> <source>Open until %1</source> <translation>Otřevřeno dokud %1</translation> </message> <message> <location line="+6"/> <source>%1/offline</source> <translation>%1/offline</translation> </message> <message> <location line="+2"/> <source>%1/unconfirmed</source> <translation>%1/nepotvrzeno</translation> </message> <message> <location line="+2"/> <source>%1 confirmations</source> <translation>%1 potvrzení</translation> </message> <message> <location line="+18"/> <source>Status</source> <translation>Stav</translation> </message> <message numerus="yes"> <location line="+7"/> <source>, broadcast through %n node(s)</source> <translation><numerusform>, rozesláno přes 1 uzel</numerusform><numerusform>, rozesláno přes %n uzly</numerusform><numerusform>, rozesláno přes %n uzlů</numerusform></translation> </message> <message> <location line="+4"/> <source>Date</source> <translation>Datum</translation> </message> <message> <location line="+7"/> <source>Source</source> <translation>Zdroj</translation> </message> <message> <location line="+0"/> <source>Generated</source> <translation>Vygenerováno</translation> </message> <message> <location line="+5"/> <location line="+17"/> <source>From</source> <translation>Od</translation> </message> <message> <location line="+1"/> <location line="+22"/> <location line="+58"/> <source>To</source> <translation>Pro</translation> </message> <message> <location line="-77"/> <location line="+2"/> <source>own address</source> <translation>vlastní adresa</translation> </message> <message> <location line="-2"/> <source>label</source> <translation>označení</translation> </message> <message> <location line="+37"/> <location line="+12"/> <location line="+45"/> <location line="+17"/> <location line="+30"/> <source>Credit</source> <translation>Příjem</translation> </message> <message numerus="yes"> <location line="-102"/> <source>matures in %n more block(s)</source> <translation><numerusform>dozraje po jednom bloku</numerusform><numerusform>dozraje po %n blocích</numerusform><numerusform>dozraje po %n blocích</numerusform></translation> </message> <message> <location line="+2"/> <source>not accepted</source> <translation>neakceptováno</translation> </message> <message> <location line="+44"/> <location line="+8"/> <location line="+15"/> <location line="+30"/> <source>Debit</source> <translation>Výdaj</translation> </message> <message> <location line="-39"/> <source>Transaction fee</source> <translation>Transakční poplatek</translation> </message> <message> <location line="+16"/> <source>Net amount</source> <translation>Čistá částka</translation> </message> <message> <location line="+6"/> <source>Message</source> <translation>Zpráva</translation> </message> <message> <location line="+2"/> <source>Comment</source> <translation>Komentář</translation> </message> <message> <location line="+2"/> <source>Transaction ID</source> <translation>ID transakce</translation> </message> <message> <location line="+3"/> <source>Generated coins must mature 120 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to &quot;not accepted&quot; and it won&apos;t be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source> <translation>Vygenerované mince musí čekat 120 bloků, než mohou být utraceny. Když jsi vygeneroval tenhle blok, tak byl rozposlán do sítě, aby byl přidán do řetězce bloků. Pokud se mu nepodaří dostat se do řetězce, změní se na &quot;neakceptovaný&quot; a nepůjde utratit. To se občas může stát, pokud jiný uzel vygeneruje blok zhruba ve stejném okamžiku jako ty.</translation> </message> <message> <location line="+7"/> <source>Debug information</source> <translation>Ladicí informace</translation> </message> <message> <location line="+8"/> <source>Transaction</source> <translation>Transakce</translation> </message> <message> <location line="+3"/> <source>Inputs</source> <translation>Vstupy</translation> </message> <message> <location line="+23"/> <source>Amount</source> <translation>Částka</translation> </message> <message> <location line="+1"/> <source>true</source> <translation>true</translation> </message> <message> <location line="+0"/> <source>false</source> <translation>false</translation> </message> <message> <location line="-209"/> <source>, has not been successfully broadcast yet</source> <translation>, ještě nebylo rozesláno</translation> </message> <message numerus="yes"> <location line="-35"/> <source>Open for %n more block(s)</source> <translation><numerusform>Otevřeno pro 1 další blok</numerusform><numerusform>Otevřeno pro %n další bloky</numerusform><numerusform>Otevřeno pro %n dalších bloků</numerusform></translation> </message> <message> <location line="+70"/> <source>unknown</source> <translation>neznámo</translation> </message> </context> <context> <name>TransactionDescDialog</name> <message> <location filename="../forms/transactiondescdialog.ui" line="+14"/> <source>Transaction details</source> <translation>Detaily transakce</translation> </message> <message> <location line="+6"/> <source>This pane shows a detailed description of the transaction</source> <translation>Toto okno zobrazuje detailní popis transakce</translation> </message> </context> <context> <name>TransactionTableModel</name> <message> <location filename="../transactiontablemodel.cpp" line="+225"/> <source>Date</source> <translation>Datum</translation> </message> <message> <location line="+0"/> <source>Type</source> <translation>Typ</translation> </message> <message> <location line="+0"/> <source>Address</source> <translation>Adresa</translation> </message> <message> <location line="+0"/> <source>Amount</source> <translation>Částka</translation> </message> <message numerus="yes"> <location line="+57"/> <source>Open for %n more block(s)</source> <translation><numerusform>Otevřeno pro 1 další blok</numerusform><numerusform>Otevřeno pro %n další bloky</numerusform><numerusform>Otevřeno pro %n dalších bloků</numerusform></translation> </message> <message> <location line="+3"/> <source>Open until %1</source> <translation>Otřevřeno dokud %1</translation> </message> <message> <location line="+3"/> <source>Offline (%1 confirmations)</source> <translation>Offline (%1 potvrzení)</translation> </message> <message> <location line="+3"/> <source>Unconfirmed (%1 of %2 confirmations)</source> <translation>Nepotvrzeno (%1 z %2 potvrzení)</translation> </message> <message> <location line="+3"/> <source>Confirmed (%1 confirmations)</source> <translation>Potvrzeno (%1 potvrzení)</translation> </message> <message numerus="yes"> <location line="+8"/> <source>Mined balance will be available when it matures in %n more block(s)</source> <translation><numerusform>Vytěžené mince budou použitelné po dozrání, tj. po jednom bloku</numerusform><numerusform>Vytěžené mince budou použitelné po dozrání, tj. po %n blocích</numerusform><numerusform>Vytěžené mince budou použitelné po dozrání, tj. po %n blocích</numerusform></translation> </message> <message> <location line="+5"/> <source>This block was not received by any other nodes and will probably not be accepted!</source> <translation>Tento blok nedostal žádný jiný uzel a pravděpodobně nebude akceptován!</translation> </message> <message> <location line="+3"/> <source>Generated but not accepted</source> <translation>Vygenerováno, ale neakceptováno</translation> </message> <message> <location line="+43"/> <source>Received with</source> <translation>Přijato do</translation> </message> <message> <location line="+2"/> <source>Received from</source> <translation>Přijato od</translation> </message> <message> <location line="+3"/> <source>Sent to</source> <translation>Posláno na</translation> </message> <message> <location line="+2"/> <source>Payment to yourself</source> <translation>Platba sama sobě</translation> </message> <message> <location line="+2"/> <source>Mined</source> <translation>Vytěženo</translation> </message> <message> <location line="+38"/> <source>(n/a)</source> <translation>(n/a)</translation> </message> <message> <location line="+199"/> <source>Transaction status. Hover over this field to show number of confirmations.</source> <translation>Stav transakce. Najetím myši na toto políčko si zobrazíš počet potvrzení.</translation> </message> <message> <location line="+2"/> <source>Date and time that the transaction was received.</source> <translation>Datum a čas přijetí transakce.</translation> </message> <message> <location line="+2"/> <source>Type of transaction.</source> <translation>Druh transakce.</translation> </message> <message> <location line="+2"/> <source>Destination address of transaction.</source> <translation>Cílová adresa transakce.</translation> </message> <message> <location line="+2"/> <source>Amount removed from or added to balance.</source> <translation>Částka odečtená z nebo přičtená k účtu.</translation> </message> </context> <context> <name>TransactionView</name> <message> <location filename="../transactionview.cpp" line="+52"/> <location line="+16"/> <source>All</source> <translation>Vše</translation> </message> <message> <location line="-15"/> <source>Today</source> <translation>Dnes</translation> </message> <message> <location line="+1"/> <source>This week</source> <translation>Tento týden</translation> </message> <message> <location line="+1"/> <source>This month</source> <translation>Tento měsíc</translation> </message> <message> <location line="+1"/> <source>Last month</source> <translation>Minulý měsíc</translation> </message> <message> <location line="+1"/> <source>This year</source> <translation>Letos</translation> </message> <message> <location line="+1"/> <source>Range...</source> <translation>Rozsah...</translation> </message> <message> <location line="+11"/> <source>Received with</source> <translation>Přijato</translation> </message> <message> <location line="+2"/> <source>Sent to</source> <translation>Posláno</translation> </message> <message> <location line="+2"/> <source>To yourself</source> <translation>Sám sobě</translation> </message> <message> <location line="+1"/> <source>Mined</source> <translation>Vytěženo</translation> </message> <message> <location line="+1"/> <source>Other</source> <translation>Ostatní</translation> </message> <message> <location line="+7"/> <source>Enter address or label to search</source> <translation>Zadej adresu nebo označení pro její vyhledání</translation> </message> <message> <location line="+7"/> <source>Min amount</source> <translation>Minimální částka</translation> </message> <message> <location line="+34"/> <source>Copy address</source> <translation>Kopíruj adresu</translation> </message> <message> <location line="+1"/> <source>Copy label</source> <translation>Kopíruj její označení</translation> </message> <message> <location line="+1"/> <source>Copy amount</source> <translation>Kopíruj částku</translation> </message> <message> <location line="+1"/> <source>Copy transaction ID</source> <translation>Kopíruj ID transakce</translation> </message> <message> <location line="+1"/> <source>Edit label</source> <translation>Uprav označení</translation> </message> <message> <location line="+1"/> <source>Show transaction details</source> <translation>Zobraz detaily transakce</translation> </message> <message> <location line="+139"/> <source>Export Transaction Data</source> <translation>Exportuj transakční data</translation> </message> <message> <location line="+1"/> <source>Comma separated file (*.csv)</source> <translation>CSV formát (*.csv)</translation> </message> <message> <location line="+8"/> <source>Confirmed</source> <translation>Potvrzeno</translation> </message> <message> <location line="+1"/> <source>Date</source> <translation>Datum</translation> </message> <message> <location line="+1"/> <source>Type</source> <translation>Typ</translation> </message> <message> <location line="+1"/> <source>Label</source> <translation>Označení</translation> </message> <message> <location line="+1"/> <source>Address</source> <translation>Adresa</translation> </message> <message> <location line="+1"/> <source>Amount</source> <translation>Částka</translation> </message> <message> <location line="+1"/> <source>ID</source> <translation>ID</translation> </message> <message> <location line="+4"/> <source>Error exporting</source> <translation>Chyba při exportu</translation> </message> <message> <location line="+0"/> <source>Could not write to file %1.</source> <translation>Nemohu zapisovat do souboru %1.</translation> </message> <message> <location line="+100"/> <source>Range:</source> <translation>Rozsah:</translation> </message> <message> <location line="+8"/> <source>to</source> <translation>až</translation> </message> </context> <context> <name>WalletModel</name> <message> <location filename="../walletmodel.cpp" line="+193"/> <source>Send Coins</source> <translation>Pošli mince</translation> </message> </context> <context> <name>WalletView</name> <message> <location filename="../walletview.cpp" line="+42"/> <source>&amp;Export</source> <translation>&amp;Export</translation> </message> <message> <location line="+1"/> <source>Export the data in the current tab to a file</source> <translation>Exportuj data z tohoto panelu do souboru</translation> </message> <message> <location line="+193"/> <source>Backup Wallet</source> <translation>Záloha peněženky</translation> </message> <message> <location line="+0"/> <source>Wallet Data (*.dat)</source> <translation>Data peněženky (*.dat)</translation> </message> <message> <location line="+3"/> <source>Backup Failed</source> <translation>Zálohování selhalo</translation> </message> <message> <location line="+0"/> <source>There was an error trying to save the wallet data to the new location.</source> <translation>Při ukládání peněženky na nové místo se přihodila nějaká chyba.</translation> </message> <message> <location line="+4"/> <source>Backup Successful</source> <translation>Úspěšně zazálohováno</translation> </message> <message> <location line="+0"/> <source>The wallet data was successfully saved to the new location.</source> <translation>Data z peněženky byla v pořádku uložena na nové místo.</translation> </message> </context> <context> <name>bitcoin-core</name> <message> <location filename="../bitcoinstrings.cpp" line="+94"/> <source>taobaocoin version</source> <translation>Verze taobaocoinu</translation> </message> <message> <location line="+102"/> <source>Usage:</source> <translation>Užití:</translation> </message> <message> <location line="-29"/> <source>Send command to -server or taobaocoind</source> <translation>Poslat příkaz pro -server nebo taobaocoind</translation> </message> <message> <location line="-23"/> <source>List commands</source> <translation>Výpis příkazů</translation> </message> <message> <location line="-12"/> <source>Get help for a command</source> <translation>Získat nápovědu pro příkaz</translation> </message> <message> <location line="+24"/> <source>Options:</source> <translation>Možnosti:</translation> </message> <message> <location line="+24"/> <source>Specify configuration file (default: taobaocoin.conf)</source> <translation>Konfigurační soubor (výchozí: taobaocoin.conf)</translation> </message> <message> <location line="+3"/> <source>Specify pid file (default: taobaocoind.pid)</source> <translation>PID soubor (výchozí: taobaocoind.pid)</translation> </message> <message> <location line="-1"/> <source>Specify data directory</source> <translation>Adresář pro data</translation> </message> <message> <location line="-9"/> <source>Set database cache size in megabytes (default: 25)</source> <translation>Nastavit velikost databázové vyrovnávací paměti v megabajtech (výchozí: 25)</translation> </message> <message> <location line="-28"/> <source>Listen for connections on &lt;port&gt; (default: 9333 or testnet: 19333)</source> <translation>Čekat na spojení na &lt;portu&gt; (výchozí: 9333 nebo testnet: 19333)</translation> </message> <message> <location line="+5"/> <source>Maintain at most &lt;n&gt; connections to peers (default: 125)</source> <translation>Povolit nejvýše &lt;n&gt; připojení k uzlům (výchozí: 125)</translation> </message> <message> <location line="-48"/> <source>Connect to a node to retrieve peer addresses, and disconnect</source> <translation>Připojit se k uzlu, získat adresy jeho protějšků a odpojit se</translation> </message> <message> <location line="+82"/> <source>Specify your own public address</source> <translation>Specifikuj svou veřejnou adresu</translation> </message> <message> <location line="+3"/> <source>Threshold for disconnecting misbehaving peers (default: 100)</source> <translation>Práh pro odpojování zlobivých uzlů (výchozí: 100)</translation> </message> <message> <location line="-134"/> <source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source> <translation>Doba ve vteřinách, po kterou se nebudou moci zlobivé uzly znovu připojit (výchozí: 86400)</translation> </message> <message> <location line="-29"/> <source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source> <translation>Při nastavování naslouchacího RPC portu %i pro IPv4 nastala chyba: %s</translation> </message> <message> <location line="+27"/> <source>Listen for JSON-RPC connections on &lt;port&gt; (default: 9332 or testnet: 19332)</source> <translation>Čekat na JSON RPC spojení na &lt;portu&gt; (výchozí: 9332 nebo testnet: 19332)</translation> </message> <message> <location line="+37"/> <source>Accept command line and JSON-RPC commands</source> <translation>Akceptovat příkazy z příkazové řádky a přes JSON-RPC</translation> </message> <message> <location line="+76"/> <source>Run in the background as a daemon and accept commands</source> <translation>Běžet na pozadí jako démon a akceptovat příkazy</translation> </message> <message> <location line="+37"/> <source>Use the test network</source> <translation>Použít testovací síť (testnet)</translation> </message> <message> <location line="-112"/> <source>Accept connections from outside (default: 1 if no -proxy or -connect)</source> <translation>Přijímat spojení zvenčí (výchozí: 1, pokud není zadáno -proxy nebo -connect)</translation> </message> <message> <location line="-80"/> <source>%s, you must set a rpcpassword in the configuration file: %s It is recommended you use the following random password: rpcuser=taobaocoinrpc rpcpassword=%s (you do not need to remember this password) The username and password MUST NOT be the same. If the file does not exist, create it with owner-readable-only file permissions. It is also recommended to set alertnotify so you are notified of problems; for example: alertnotify=echo %%s | mail -s &quot;taobaocoin Alert&quot; [email protected] </source> <translation>%s, musíš nastavit rpcpassword v konfiguračním souboru: %s Je vhodné použít následující náhodné heslo: rpcuser=taobaocoinrpc rpcpassword=%s (není potřeba si ho pamatovat) rpcuser a rpcpassword NESMÍ být stejné. Pokud konfigurační soubor ještě neexistuje, vytvoř ho tak, aby ho mohl číst pouze vlastník. Je také doporučeno si nastavit alertnotify, abys byl upozorněn na případné problémy; například: alertnotify=echo %%s | mail -s &quot;taobaocoin Alert&quot; [email protected] </translation> </message> <message> <location line="+17"/> <source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source> <translation>Při nastavování naslouchacího RPC portu %u pro IPv6 nastala chyba, vracím se k IPv4: %s</translation> </message> <message> <location line="+3"/> <source>Bind to given address and always listen on it. Use [host]:port notation for IPv6</source> <translation>Poslouchat na zadané adrese. Pro zápis IPv6 adresy použij notaci [adresa]:port</translation> </message> <message> <location line="+3"/> <source>Cannot obtain a lock on data directory %s. taobaocoin is probably already running.</source> <translation>Nedaří se mi získat zámek na datový adresář %s. taobaocoin pravděpodobně už jednou běží.</translation> </message> <message> <location line="+3"/> <source>Error: The transaction was rejected! This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source> <translation>Chyba: Transakce byla odmítnuta! Tohle může nastat, pokud nějaké mince z tvé peněženky už jednou byly utraceny, například pokud používáš kopii souboru wallet.dat a mince byly utraceny v druhé kopii, ale nebyly označeny jako utracené v této.</translation> </message> <message> <location line="+4"/> <source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds!</source> <translation>Chyba: Tahle transakce vyžaduje transakční poplatek nejméně %s kvůli velikosti zasílané částky, komplexnosti nebo použití nedávno přijatých mincí!</translation> </message> <message> <location line="+3"/> <source>Execute command when a relevant alert is received (%s in cmd is replaced by message)</source> <translation>Spustit příkaz po přijetí relevantního hlášení (%s se v příkazu nahradí za zprávu)</translation> </message> <message> <location line="+3"/> <source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source> <translation>Spustit příkaz, když se objeví transakce týkající se peněženky (%s se v příkazu nahradí za TxID)</translation> </message> <message> <location line="+11"/> <source>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</source> <translation>Nastavit maximální velikost prioritních/nízkopoplatkových transakcí v bajtech (výchozí: 27000)</translation> </message> <message> <location line="+6"/> <source>This is a pre-release test build - use at your own risk - do not use for mining or merchant applications</source> <translation>Tohle je testovací verze – používej ji jen na vlastní riziko, ale rozhodně ji nepoužívej k těžbě nebo pro obchodní aplikace</translation> </message> <message> <location line="+5"/> <source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source> <translation>Upozornění: -paytxfee je nastaveno velmi vysoko! Toto je transakční poplatek, který zaplatíš za každou poslanou transakci.</translation> </message> <message> <location line="+3"/> <source>Warning: Displayed transactions may not be correct! You may need to upgrade, or other nodes may need to upgrade.</source> <translation>Upozornění: Zobrazené transakce nemusí být správné! Možná potřebuješ aktualizovat nebo ostatní uzly potřebují aktualizovat.</translation> </message> <message> <location line="+3"/> <source>Warning: Please check that your computer&apos;s date and time are correct! If your clock is wrong taobaocoin will not work properly.</source> <translation>Upozornění: Zkontroluj, že máš v počítači správně nastavený datum a čas! Pokud jsou nastaveny špatně, taobaocoin nebude fungovat správně.</translation> </message> <message> <location line="+3"/> <source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source> <translation>Upozornění: nastala chyba při čtení souboru wallet.dat! Všechny klíče se přečetly správně, ale data o transakcích nebo záznamy v adresáři mohou chybět či být nesprávné.</translation> </message> <message> <location line="+3"/> <source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source> <translation>Upozornění: soubor wallet.dat je poškozený, data jsou však zachráněna! Původní soubor wallet.dat je uložený jako wallet.{timestamp}.bak v %s. Pokud je stav tvého účtu nebo transakce nesprávné, zřejmě bys měl obnovit zálohu.</translation> </message> <message> <location line="+14"/> <source>Attempt to recover private keys from a corrupt wallet.dat</source> <translation>Pokusit se zachránit soukromé klíče z poškozeného souboru wallet.dat</translation> </message> <message> <location line="+2"/> <source>Block creation options:</source> <translation>Možnosti vytvoření bloku:</translation> </message> <message> <location line="+5"/> <source>Connect only to the specified node(s)</source> <translation>Připojit se pouze k zadanému uzlu (příp. zadaným uzlům)</translation> </message> <message> <location line="+3"/> <source>Corrupted block database detected</source> <translation>Bylo zjištěno poškození databáze bloků</translation> </message> <message> <location line="+1"/> <source>Discover own IP address (default: 1 when listening and no -externalip)</source> <translation>Zjistit vlastní IP adresu (výchozí: 1, pokud naslouchá a není zadáno -externalip)</translation> </message> <message> <location line="+1"/> <source>Do you want to rebuild the block database now?</source> <translation>Chceš přestavět databázi bloků hned teď?</translation> </message> <message> <location line="+2"/> <source>Error initializing block database</source> <translation>Chyba při zakládání databáze bloků</translation> </message> <message> <location line="+1"/> <source>Error initializing wallet database environment %s!</source> <translation>Chyba při vytváření databázového prostředí %s pro peněženku!</translation> </message> <message> <location line="+1"/> <source>Error loading block database</source> <translation>Chyba při načítání databáze bloků</translation> </message> <message> <location line="+4"/> <source>Error opening block database</source> <translation>Chyba při otevírání databáze bloků</translation> </message> <message> <location line="+2"/> <source>Error: Disk space is low!</source> <translation>Problém: Na disku je málo místa!</translation> </message> <message> <location line="+1"/> <source>Error: Wallet locked, unable to create transaction!</source> <translation>Chyba: Peněženka je zamčená, nemohu vytvořit transakci!</translation> </message> <message> <location line="+1"/> <source>Error: system error: </source> <translation>Chyba: systémová chyba: </translation> </message> <message> <location line="+1"/> <source>Failed to listen on any port. Use -listen=0 if you want this.</source> <translation>Nepodařilo se naslouchat na žádném portu. Použij -listen=0, pokud to byl tvůj záměr.</translation> </message> <message> <location line="+1"/> <source>Failed to read block info</source> <translation>Nepodařilo se přečíst informace o bloku</translation> </message> <message> <location line="+1"/> <source>Failed to read block</source> <translation>Nepodařilo se přečíst blok</translation> </message> <message> <location line="+1"/> <source>Failed to sync block index</source> <translation>Nepodařilo se sesynchronizovat index bloků</translation> </message> <message> <location line="+1"/> <source>Failed to write block index</source> <translation>Nepodařilo se zapsat index bloků</translation> </message> <message> <location line="+1"/> <source>Failed to write block info</source> <translation>Nepodařilo se zapsat informace o bloku</translation> </message> <message> <location line="+1"/> <source>Failed to write block</source> <translation>Nepodařilo se zapsat blok</translation> </message> <message> <location line="+1"/> <source>Failed to write file info</source> <translation>Nepodařilo se zapsat informace o souboru</translation> </message> <message> <location line="+1"/> <source>Failed to write to coin database</source> <translation>Selhal zápis do databáze mincí</translation> </message> <message> <location line="+1"/> <source>Failed to write transaction index</source> <translation>Nepodařilo se zapsat index transakcí</translation> </message> <message> <location line="+1"/> <source>Failed to write undo data</source> <translation>Nepodařilo se zapsat data o vracení změn</translation> </message> <message> <location line="+2"/> <source>Find peers using DNS lookup (default: 1 unless -connect)</source> <translation>Hledat uzly přes DNS (výchozí: 1, pokud není zadáno -connect)</translation> </message> <message> <location line="+1"/> <source>Generate coins (default: 0)</source> <translation>Generovat mince (výchozí: 0)</translation> </message> <message> <location line="+2"/> <source>How many blocks to check at startup (default: 288, 0 = all)</source> <translation>Kolik bloků při startu zkontrolovat (výchozí: 288, 0 = všechny)</translation> </message> <message> <location line="+1"/> <source>How thorough the block verification is (0-4, default: 3)</source> <translation>Jak moc důkladná má být verifikace bloků (0-4, výchozí: 3)</translation> </message> <message> <location line="+19"/> <source>Not enough file descriptors available.</source> <translation>Je nedostatek deskriptorů souborů.</translation> </message> <message> <location line="+8"/> <source>Rebuild block chain index from current blk000??.dat files</source> <translation>Znovu vytvořit index řetězce bloků z aktuálních blk000??.dat souborů</translation> </message> <message> <location line="+16"/> <source>Set the number of threads to service RPC calls (default: 4)</source> <translation>Nastavení počtu vláken pro servisní RPC volání (výchozí: 4)</translation> </message> <message> <location line="+26"/> <source>Verifying blocks...</source> <translation>Ověřuji bloky...</translation> </message> <message> <location line="+1"/> <source>Verifying wallet...</source> <translation>Kontroluji peněženku...</translation> </message> <message> <location line="-69"/> <source>Imports blocks from external blk000??.dat file</source> <translation>Importovat bloky z externího souboru blk000??.dat</translation> </message> <message> <location line="-76"/> <source>Set the number of script verification threads (up to 16, 0 = auto, &lt;0 = leave that many cores free, default: 0)</source> <translation>Nastavení počtu vláken pro verifikaci skriptů (max. 16, 0 = automaticky, &lt;0 = nechat daný počet jader volný, výchozí: 0)</translation> </message> <message> <location line="+77"/> <source>Information</source> <translation>Informace</translation> </message> <message> <location line="+3"/> <source>Invalid -tor address: &apos;%s&apos;</source> <translation>Neplatná -tor adresa: &apos;%s&apos;</translation> </message> <message> <location line="+1"/> <source>Invalid amount for -minrelaytxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation>Neplatná částka pro -minrelaytxfee=&lt;částka&gt;: &apos;%s&apos;</translation> </message> <message> <location line="+1"/> <source>Invalid amount for -mintxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation>Neplatná částka pro -mintxfee=&lt;částka&gt;: &apos;%s&apos;</translation> </message> <message> <location line="+8"/> <source>Maintain a full transaction index (default: 0)</source> <translation>Spravovat úplný index transakcí (výchozí: 0)</translation> </message> <message> <location line="+2"/> <source>Maximum per-connection receive buffer, &lt;n&gt;*1000 bytes (default: 5000)</source> <translation>Maximální velikost přijímacího bufferu pro každé spojení, &lt;n&gt;*1000 bajtů (výchozí: 5000)</translation> </message> <message> <location line="+1"/> <source>Maximum per-connection send buffer, &lt;n&gt;*1000 bytes (default: 1000)</source> <translation>Maximální velikost odesílacího bufferu pro každé spojení, &lt;n&gt;*1000 bajtů (výchozí: 1000)</translation> </message> <message> <location line="+2"/> <source>Only accept block chain matching built-in checkpoints (default: 1)</source> <translation>Uznávat pouze řetěz bloků, který odpovídá vnitřním kontrolním bodům (výchozí: 1)</translation> </message> <message> <location line="+1"/> <source>Only connect to nodes in network &lt;net&gt; (IPv4, IPv6 or Tor)</source> <translation>Připojit se pouze k uzlům v &lt;net&gt; síti (IPv4, IPv6 nebo Tor)</translation> </message> <message> <location line="+2"/> <source>Output extra debugging information. Implies all other -debug* options</source> <translation>Tisknout speciální ladicí informace. Implikuje použití všech -debug* voleb</translation> </message> <message> <location line="+1"/> <source>Output extra network debugging information</source> <translation>Tisknout speciální ladicí informace o síti</translation> </message> <message> <location line="+2"/> <source>Prepend debug output with timestamp</source> <translation>Připojit před ladicí výstup časové razítko</translation> </message> <message> <location line="+5"/> <source>SSL options: (see the taobaocoin Wiki for SSL setup instructions)</source> <translation>Možnosti SSL: (viz instrukce nastavení SSL v taobaocoin Wiki)</translation> </message> <message> <location line="+1"/> <source>Select the version of socks proxy to use (4-5, default: 5)</source> <translation>Zvol verzi socks proxy (4-5, výchozí: 5)</translation> </message> <message> <location line="+3"/> <source>Send trace/debug info to console instead of debug.log file</source> <translation>Posílat stopovací/ladicí informace do konzole místo do souboru debug.log</translation> </message> <message> <location line="+1"/> <source>Send trace/debug info to debugger</source> <translation>Posílat stopovací/ladicí informace do debuggeru</translation> </message> <message> <location line="+5"/> <source>Set maximum block size in bytes (default: 250000)</source> <translation>Nastavit maximální velikost bloku v bajtech (výchozí: 250000)</translation> </message> <message> <location line="+1"/> <source>Set minimum block size in bytes (default: 0)</source> <translation>Nastavit minimální velikost bloku v bajtech (výchozí: 0)</translation> </message> <message> <location line="+2"/> <source>Shrink debug.log file on client startup (default: 1 when no -debug)</source> <translation>Při spuštění klienta zmenšit soubor debug.log (výchozí: 1, pokud není zadáno -debug)</translation> </message> <message> <location line="+1"/> <source>Signing transaction failed</source> <translation>Podepisování transakce selhalo</translation> </message> <message> <location line="+2"/> <source>Specify connection timeout in milliseconds (default: 5000)</source> <translation>Zadej časový limit spojení v milisekundách (výchozí: 5000)</translation> </message> <message> <location line="+4"/> <source>System error: </source> <translation>Systémová chyba: </translation> </message> <message> <location line="+4"/> <source>Transaction amount too small</source> <translation>Částka v transakci je příliš malá</translation> </message> <message> <location line="+1"/> <source>Transaction amounts must be positive</source> <translation>Částky v transakci musí být kladné</translation> </message> <message> <location line="+1"/> <source>Transaction too large</source> <translation>Transace je příliš velká</translation> </message> <message> <location line="+7"/> <source>Use UPnP to map the listening port (default: 0)</source> <translation>Použít UPnP k namapování naslouchacího portu (výchozí: 0)</translation> </message> <message> <location line="+1"/> <source>Use UPnP to map the listening port (default: 1 when listening)</source> <translation>Použít UPnP k namapování naslouchacího portu (výchozí: 1, pokud naslouchá)</translation> </message> <message> <location line="+1"/> <source>Use proxy to reach tor hidden services (default: same as -proxy)</source> <translation>Použít proxy k připojení ke skryté služby (výchozí: stejné jako -proxy)</translation> </message> <message> <location line="+2"/> <source>Username for JSON-RPC connections</source> <translation>Uživatelské jméno pro JSON-RPC spojení</translation> </message> <message> <location line="+4"/> <source>Warning</source> <translation>Upozornění</translation> </message> <message> <location line="+1"/> <source>Warning: This version is obsolete, upgrade required!</source> <translation>Upozornění: tahle verze je zastaralá, měl bys ji aktualizovat!</translation> </message> <message> <location line="+1"/> <source>You need to rebuild the databases using -reindex to change -txindex</source> <translation>Je třeba přestavět databázi použitím -reindex, aby bylo možné změnit -txindex</translation> </message> <message> <location line="+1"/> <source>wallet.dat corrupt, salvage failed</source> <translation>Soubor wallet.dat je poškozen, jeho záchrana se nezdařila</translation> </message> <message> <location line="-50"/> <source>Password for JSON-RPC connections</source> <translation>Heslo pro JSON-RPC spojení</translation> </message> <message> <location line="-67"/> <source>Allow JSON-RPC connections from specified IP address</source> <translation>Povolit JSON-RPC spojení ze specifikované IP adresy</translation> </message> <message> <location line="+76"/> <source>Send commands to node running on &lt;ip&gt; (default: 127.0.0.1)</source> <translation>Posílat příkazy uzlu běžícím na &lt;ip&gt; (výchozí: 127.0.0.1)</translation> </message> <message> <location line="-120"/> <source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source> <translation>Spustit příkaz, když se změní nejlepší blok (%s se v příkazu nahradí hashem bloku)</translation> </message> <message> <location line="+147"/> <source>Upgrade wallet to latest format</source> <translation>Převést peněženku na nejnovější formát</translation> </message> <message> <location line="-21"/> <source>Set key pool size to &lt;n&gt; (default: 100)</source> <translation>Nastavit zásobník klíčů na velikost &lt;n&gt; (výchozí: 100)</translation> </message> <message> <location line="-12"/> <source>Rescan the block chain for missing wallet transactions</source> <translation>Přeskenovat řetězec bloků na chybějící transakce tvé pěněženky</translation> </message> <message> <location line="+35"/> <source>Use OpenSSL (https) for JSON-RPC connections</source> <translation>Použít OpenSSL (https) pro JSON-RPC spojení</translation> </message> <message> <location line="-26"/> <source>Server certificate file (default: server.cert)</source> <translation>Soubor se serverovým certifikátem (výchozí: server.cert)</translation> </message> <message> <location line="+1"/> <source>Server private key (default: server.pem)</source> <translation>Soubor se serverovým soukromým klíčem (výchozí: server.pem)</translation> </message> <message> <location line="-151"/> <source>Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</source> <translation>Akceptovatelné šifry (výchozí: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</translation> </message> <message> <location line="+165"/> <source>This help message</source> <translation>Tato nápověda</translation> </message> <message> <location line="+6"/> <source>Unable to bind to %s on this computer (bind returned error %d, %s)</source> <translation>Nedaří se mi připojit na %s na tomhle počítači (operace bind vrátila chybu %d, %s)</translation> </message> <message> <location line="-91"/> <source>Connect through socks proxy</source> <translation>Připojit se přes socks proxy</translation> </message> <message> <location line="-10"/> <source>Allow DNS lookups for -addnode, -seednode and -connect</source> <translation>Povolit DNS dotazy pro -addnode (přidání uzlu), -seednode a -connect (připojení)</translation> </message> <message> <location line="+55"/> <source>Loading addresses...</source> <translation>Načítám adresy...</translation> </message> <message> <location line="-35"/> <source>Error loading wallet.dat: Wallet corrupted</source> <translation>Chyba při načítání wallet.dat: peněženka je poškozená</translation> </message> <message> <location line="+1"/> <source>Error loading wallet.dat: Wallet requires newer version of taobaocoin</source> <translation>Chyba při načítání wallet.dat: peněženka vyžaduje novější verzi taobaocoinu</translation> </message> <message> <location line="+93"/> <source>Wallet needed to be rewritten: restart taobaocoin to complete</source> <translation>Soubor s peněženkou potřeboval přepsat: restartuj taobaocoin, aby se operace dokončila</translation> </message> <message> <location line="-95"/> <source>Error loading wallet.dat</source> <translation>Chyba při načítání wallet.dat</translation> </message> <message> <location line="+28"/> <source>Invalid -proxy address: &apos;%s&apos;</source> <translation>Neplatná -proxy adresa: &apos;%s&apos;</translation> </message> <message> <location line="+56"/> <source>Unknown network specified in -onlynet: &apos;%s&apos;</source> <translation>V -onlynet byla uvedena neznámá síť: &apos;%s&apos;</translation> </message> <message> <location line="-1"/> <source>Unknown -socks proxy version requested: %i</source> <translation>V -socks byla požadována neznámá verze proxy: %i</translation> </message> <message> <location line="-96"/> <source>Cannot resolve -bind address: &apos;%s&apos;</source> <translation>Nemohu přeložit -bind adresu: &apos;%s&apos;</translation> </message> <message> <location line="+1"/> <source>Cannot resolve -externalip address: &apos;%s&apos;</source> <translation>Nemohu přeložit -externalip adresu: &apos;%s&apos;</translation> </message> <message> <location line="+44"/> <source>Invalid amount for -paytxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation>Neplatná částka pro -paytxfee=&lt;částka&gt;: &apos;%s&apos;</translation> </message> <message> <location line="+1"/> <source>Invalid amount</source> <translation>Neplatná částka</translation> </message> <message> <location line="-6"/> <source>Insufficient funds</source> <translation>Nedostatek prostředků</translation> </message> <message> <location line="+10"/> <source>Loading block index...</source> <translation>Načítám index bloků...</translation> </message> <message> <location line="-57"/> <source>Add a node to connect to and attempt to keep the connection open</source> <translation>Přidat uzel, ke kterému se připojit a snažit se spojení udržet</translation> </message> <message> <location line="-25"/> <source>Unable to bind to %s on this computer. taobaocoin is probably already running.</source> <translation>Nedaří se mi připojit na %s na tomhle počítači. taobaocoin už pravděpodobně jednou běží.</translation> </message> <message> <location line="+64"/> <source>Fee per KB to add to transactions you send</source> <translation>Poplatek za kB, který se přidá ke každé odeslané transakci</translation> </message> <message> <location line="+19"/> <source>Loading wallet...</source> <translation>Načítám peněženku...</translation> </message> <message> <location line="-52"/> <source>Cannot downgrade wallet</source> <translation>Nemohu převést peněženku do staršího formátu</translation> </message> <message> <location line="+3"/> <source>Cannot write default address</source> <translation>Nemohu napsat výchozí adresu</translation> </message> <message> <location line="+64"/> <source>Rescanning...</source> <translation>Přeskenovávám...</translation> </message> <message> <location line="-57"/> <source>Done loading</source> <translation>Načítání dokončeno</translation> </message> <message> <location line="+82"/> <source>To use the %s option</source> <translation>K použití volby %s</translation> </message> <message> <location line="-74"/> <source>Error</source> <translation>Chyba</translation> </message> <message> <location line="-31"/> <source>You must set rpcpassword=&lt;password&gt; in the configuration file: %s If the file does not exist, create it with owner-readable-only file permissions.</source> <translation>Musíš nastavit rpcpassword=&lt;heslo&gt; v konfiguračním souboru: %s Pokud konfigurační soubor ještě neexistuje, vytvoř ho tak, aby ho mohl číst pouze vlastník.</translation> </message> </context> </TS><|fim▁end|>
</message> <message>
<|file_name|>buildCts.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python # Copyright (C) 2009 The Android Open Source Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Module for generating CTS test descriptions and test plans.""" import glob import os import re import shutil import subprocess import sys import xml.dom.minidom as dom from cts import tools from multiprocessing import Pool def GetSubDirectories(root): """Return all directories under the given root directory.""" return [x for x in os.listdir(root) if os.path.isdir(os.path.join(root, x))] def GetMakeFileVars(makefile_path): """Extracts variable definitions from the given make file. Args: makefile_path: Path to the make file. Returns: A dictionary mapping variable names to their assigned value. """ result = {} pattern = re.compile(r'^\s*([^:#=\s]+)\s*:=\s*(.*?[^\\])$', re.MULTILINE + re.DOTALL) stream = open(makefile_path, 'r') content = stream.read() for match in pattern.finditer(content): result[match.group(1)] = match.group(2) stream.close() return result class CtsBuilder(object): """Main class for generating test descriptions and test plans.""" def __init__(self, argv): """Initialize the CtsBuilder from command line arguments.""" if len(argv) != 6: print 'Usage: %s <testRoot> <ctsOutputDir> <tempDir> <androidRootDir> <docletPath>' % argv[0] print '' print 'testRoot: Directory under which to search for CTS tests.' print 'ctsOutputDir: Directory in which the CTS repository should be created.' print 'tempDir: Directory to use for storing temporary files.' print 'androidRootDir: Root directory of the Android source tree.' print 'docletPath: Class path where the DescriptionGenerator doclet can be found.' sys.exit(1) self.test_root = sys.argv[1] self.out_dir = sys.argv[2] self.temp_dir = sys.argv[3] self.android_root = sys.argv[4] self.doclet_path = sys.argv[5] self.test_repository = os.path.join(self.out_dir, 'repository/testcases') self.plan_repository = os.path.join(self.out_dir, 'repository/plans') self.definedplans_repository = os.path.join(self.android_root, 'cts/tests/plans') def GenerateTestDescriptions(self): """Generate test descriptions for all packages.""" pool = Pool(processes=2) # generate test descriptions for android tests results = [] pool.close() pool.join() return sum(map(lambda result: result.get(), results)) def __WritePlan(self, plan, plan_name): print 'Generating test plan %s' % plan_name plan.Write(os.path.join(self.plan_repository, plan_name + '.xml')) def GenerateTestPlans(self): """Generate default test plans.""" # TODO: Instead of hard-coding the plans here, use a configuration file, # such as test_defs.xml packages = [] descriptions = sorted(glob.glob(os.path.join(self.test_repository, '*.xml'))) for description in descriptions: doc = tools.XmlFile(description) packages.append(doc.GetAttr('TestPackage', 'appPackageName')) # sort the list to give the same sequence based on name packages.sort() plan = tools.TestPlan(packages) plan.Exclude('android\.performance.*') self.__WritePlan(plan, 'CTS') self.__WritePlan(plan, 'CTS-TF') plan = tools.TestPlan(packages) plan.Exclude('android\.performance.*') plan.Exclude('android\.media\.cts\.StreamingMediaPlayerTest.*') # Test plan to not include media streaming tests self.__WritePlan(plan, 'CTS-No-Media-Stream') plan = tools.TestPlan(packages) plan.Exclude('android\.performance.*') self.__WritePlan(plan, 'SDK') plan.Exclude(r'android\.signature') plan.Exclude(r'android\.core.*') self.__WritePlan(plan, 'Android') plan = tools.TestPlan(packages) plan.Include(r'android\.core\.tests.*') plan.Exclude(r'android\.core\.tests\.libcore.\package.\harmony*') self.__WritePlan(plan, 'Java') # TODO: remove this once the tests are fixed and merged into Java plan above. plan = tools.TestPlan(packages) plan.Include(r'android\.core\.tests\.libcore.\package.\harmony*') self.__WritePlan(plan, 'Harmony') plan = tools.TestPlan(packages) plan.Include(r'android\.core\.vm-tests-tf') self.__WritePlan(plan, 'VM-TF') plan = tools.TestPlan(packages) plan.Include(r'android\.tests\.appsecurity') self.__WritePlan(plan, 'AppSecurity') # hard-coded white list for PDK plan plan.Exclude('.*') plan.Include('android\.aadb') plan.Include('android\.bluetooth') plan.Include('android\.graphics.*') plan.Include('android\.hardware') plan.Include('android\.media') plan.Exclude('android\.mediastress') plan.Include('android\.net') plan.Include('android\.opengl.*') plan.Include('android\.renderscript') plan.Include('android\.telephony') plan.Include('android\.nativemedia.*') plan.Include('com\.android\.cts\..*')#TODO(stuartscott): Should PDK have all these? self.__WritePlan(plan, 'PDK') flaky_tests = BuildCtsFlakyTestList() # CTS Stable plan plan = tools.TestPlan(packages) plan.Exclude(r'com\.android\.cts\.browserbench') for package, test_list in flaky_tests.iteritems(): plan.ExcludeTests(package, test_list) self.__WritePlan(plan, 'CTS-stable') # CTS Flaky plan - list of tests known to be flaky in lab environment plan = tools.TestPlan(packages) plan.Exclude('.*') plan.Include(r'com\.android\.cts\.browserbench') for package, test_list in flaky_tests.iteritems(): plan.Include(package+'$') plan.IncludeTests(package, test_list) self.__WritePlan(plan, 'CTS-flaky') small_tests = BuildAospSmallSizeTestList() medium_tests = BuildAospMediumSizeTestList() new_test_packages = BuildCtsVettedNewPackagesList() # CTS - sub plan for public, small size tests plan = tools.TestPlan(packages) plan.Exclude('.*') for package, test_list in small_tests.iteritems(): plan.Include(package+'$') plan.Exclude(r'com\.android\.cts\.browserbench') for package, test_list in flaky_tests.iteritems(): plan.ExcludeTests(package, test_list) self.__WritePlan(plan, 'CTS-kitkat-small') # CTS - sub plan for public, medium size tests plan = tools.TestPlan(packages) plan.Exclude('.*') for package, test_list in medium_tests.iteritems(): plan.Include(package+'$') plan.Exclude(r'com\.android\.cts\.browserbench') for package, test_list in flaky_tests.iteritems(): plan.ExcludeTests(package, test_list) self.__WritePlan(plan, 'CTS-kitkat-medium') # CTS - sub plan for hardware tests which is public, large plan = tools.TestPlan(packages) plan.Exclude('.*') plan.Include(r'android\.hardware$') plan.Exclude(r'com\.android\.cts\.browserbench') for package, test_list in flaky_tests.iteritems(): plan.ExcludeTests(package, test_list) self.__WritePlan(plan, 'CTS-hardware')<|fim▁hole|> # CTS - sub plan for media tests which is public, large plan = tools.TestPlan(packages) plan.Exclude('.*') plan.Include(r'android\.media$') plan.Include(r'android\.view$') plan.Exclude(r'com\.android\.cts\.browserbench') for package, test_list in flaky_tests.iteritems(): plan.ExcludeTests(package, test_list) self.__WritePlan(plan, 'CTS-media') # CTS - sub plan for mediastress tests which is public, large plan = tools.TestPlan(packages) plan.Exclude('.*') plan.Include(r'android\.mediastress$') plan.Exclude(r'com\.android\.cts\.browserbench') for package, test_list in flaky_tests.iteritems(): plan.ExcludeTests(package, test_list) self.__WritePlan(plan, 'CTS-mediastress') # CTS - sub plan for new tests that is vetted for L launch plan = tools.TestPlan(packages) plan.Exclude('.*') for package, test_list in new_test_packages.iteritems(): plan.Include(package+'$') plan.Exclude(r'com\.android\.cts\.browserbench') for package, test_list in flaky_tests.iteritems(): plan.ExcludeTests(package, test_list) self.__WritePlan(plan, 'CTS-l-tests') #CTS - sub plan for new test packages added for staging plan = tools.TestPlan(packages) for package, test_list in small_tests.iteritems(): plan.Exclude(package+'$') for package, test_list in medium_tests.iteritems(): plan.Exclude(package+'$') for package, tests_list in new_test_packages.iteritems(): plan.Exclude(package+'$') plan.Exclude(r'android\.hardware$') plan.Exclude(r'android\.media$') plan.Exclude(r'android\.view$') plan.Exclude(r'android\.mediastress$') plan.Exclude(r'com\.android\.cts\.browserbench') for package, test_list in flaky_tests.iteritems(): plan.ExcludeTests(package, test_list) self.__WritePlan(plan, 'CTS-staging') plan = tools.TestPlan(packages) plan.Exclude('.*') plan.Include(r'com\.drawelements\.') self.__WritePlan(plan, 'CTS-DEQP') plan = tools.TestPlan(packages) plan.Exclude('.*') plan.Include(r'android\.webgl') self.__WritePlan(plan, 'CTS-webview') def BuildAospMediumSizeTestList(): """ Construct a defaultdic that lists package names of medium tests already published to aosp. """ return { 'android.app' : [], 'android.core.tests.libcore.package.libcore' : [], 'android.core.tests.libcore.package.org' : [], 'android.core.vm-tests-tf' : [], 'android.dpi' : [], 'android.host.security' : [], 'android.net' : [], 'android.os' : [], 'android.permission2' : [], 'android.security' : [], 'android.telephony' : [], 'android.webkit' : [], 'android.widget' : [], 'com.android.cts.browserbench' : []} def BuildAospSmallSizeTestList(): """ Construct a defaultdict that lists packages names of small tests already published to aosp. """ return { 'android.aadb' : [], 'android.acceleration' : [], 'android.accessibility' : [], 'android.accessibilityservice' : [], 'android.accounts' : [], 'android.admin' : [], 'android.animation' : [], 'android.bionic' : [], 'android.bluetooth' : [], 'android.calendarcommon' : [], 'android.content' : [], 'android.core.tests.libcore.package.com' : [], 'android.core.tests.libcore.package.conscrypt' : [], 'android.core.tests.libcore.package.dalvik' : [], 'android.core.tests.libcore.package.sun' : [], 'android.core.tests.libcore.package.tests' : [], 'android.database' : [], 'android.dreams' : [], 'android.drm' : [], 'android.effect' : [], 'android.gesture' : [], 'android.graphics' : [], 'android.graphics2' : [], 'android.jni' : [], 'android.keystore' : [], 'android.location' : [], 'android.nativemedia.sl' : [], 'android.nativemedia.xa' : [], 'android.nativeopengl' : [], 'android.ndef' : [], 'android.opengl' : [], 'android.openglperf' : [], 'android.permission' : [], 'android.preference' : [], 'android.preference2' : [], 'android.provider' : [], 'android.renderscript' : [], 'android.rscpp' : [], 'android.rsg' : [], 'android.sax' : [], 'android.signature' : [], 'android.speech' : [], 'android.tests.appsecurity' : [], 'android.text' : [], 'android.textureview' : [], 'android.theme' : [], 'android.usb' : [], 'android.util' : [], 'com.android.cts.dram' : [], 'com.android.cts.filesystemperf' : [], 'com.android.cts.jank' : [], 'com.android.cts.opengl' : [], 'com.android.cts.simplecpu' : [], 'com.android.cts.ui' : [], 'com.android.cts.uihost' : [], 'com.android.cts.videoperf' : [], 'zzz.android.monkey' : []} def BuildCtsVettedNewPackagesList(): """ Construct a defaultdict that maps package names that is vetted for L. """ return { 'android.JobScheduler' : [], 'android.core.tests.libcore.package.harmony_annotation' : [], 'android.core.tests.libcore.package.harmony_beans' : [], 'android.core.tests.libcore.package.harmony_java_io' : [], 'android.core.tests.libcore.package.harmony_java_lang' : [], 'android.core.tests.libcore.package.harmony_java_math' : [], 'android.core.tests.libcore.package.harmony_java_net' : [], 'android.core.tests.libcore.package.harmony_java_nio' : [], 'android.core.tests.libcore.package.harmony_java_util' : [], 'android.core.tests.libcore.package.harmony_java_text' : [], 'android.core.tests.libcore.package.harmony_javax_security' : [], 'android.core.tests.libcore.package.harmony_logging' : [], 'android.core.tests.libcore.package.harmony_prefs' : [], 'android.core.tests.libcore.package.harmony_sql' : [], 'android.core.tests.libcore.package.jsr166' : [], 'android.core.tests.libcore.package.okhttp' : [], 'android.display' : [], 'android.host.theme' : [], 'android.jdwp' : [], 'android.location2' : [], 'android.print' : [], 'android.renderscriptlegacy' : [], 'android.signature' : [], 'android.tv' : [], 'android.uiautomation' : [], 'android.uirendering' : [], 'android.webgl' : [], 'com.drawelements.deqp.gles3' : [], 'com.drawelements.deqp.gles31' : []} def BuildCtsFlakyTestList(): """ Construct a defaultdict that maps package name to a list of tests that are known to be flaky in the lab or not passing on userdebug builds. """ return { 'android.app' : [ 'cts.ActivityManagerTest#testIsRunningInTestHarness',], 'android.dpi' : [ 'cts.DefaultManifestAttributesSdkTest#testPackageHasExpectedSdkVersion',], 'android.hardware' : [ 'cts.CameraTest#testVideoSnapshot', 'cts.CameraGLTest#testCameraToSurfaceTextureMetadata', 'cts.CameraGLTest#testSetPreviewTextureBothCallbacks', 'cts.CameraGLTest#testSetPreviewTexturePreviewCallback',], 'android.media' : [ 'cts.DecoderTest#testCodecResetsH264WithSurface', 'cts.StreamingMediaPlayerTest#testHLS',], 'android.net' : [ 'cts.ConnectivityManagerTest#testStartUsingNetworkFeature_enableHipri', 'cts.DnsTest#testDnsWorks', 'cts.SSLCertificateSocketFactoryTest#testCreateSocket', 'cts.SSLCertificateSocketFactoryTest#test_createSocket_bind', 'cts.SSLCertificateSocketFactoryTest#test_createSocket_simple', 'cts.SSLCertificateSocketFactoryTest#test_createSocket_wrapping', 'cts.TrafficStatsTest#testTrafficStatsForLocalhost', 'wifi.cts.NsdManagerTest#testAndroidTestCaseSetupProperly',], 'android.os' : [ 'cts.BuildVersionTest#testReleaseVersion', 'cts.BuildTest#testIsSecureUserBuild',], 'android.security' : [ 'cts.BannedFilesTest#testNoSu', 'cts.BannedFilesTest#testNoSuInPath', 'cts.ListeningPortsTest#testNoRemotelyAccessibleListeningUdp6Ports', 'cts.ListeningPortsTest#testNoRemotelyAccessibleListeningUdpPorts', 'cts.PackageSignatureTest#testPackageSignatures', 'cts.SELinuxDomainTest#testSuDomain', 'cts.SELinuxHostTest#testAllEnforcing',], 'android.webkit' : [ 'cts.WebViewClientTest#testOnUnhandledKeyEvent',], 'com.android.cts.filesystemperf' : [ 'RandomRWTest#testRandomRead', 'RandomRWTest#testRandomUpdate',], '' : []} def LogGenerateDescription(name): print 'Generating test description for package %s' % name if __name__ == '__main__': builder = CtsBuilder(sys.argv) result = builder.GenerateTestDescriptions() if result != 0: sys.exit(result) builder.GenerateTestPlans()<|fim▁end|>
<|file_name|>add-ebook.component.ts<|end_file_name|><|fim▁begin|>import { Component, OnInit } from '@angular/core'; import {Router, ActivatedRoute, Params} from '@angular/router'; import { FlashMessagesService } from 'angular2-flash-messages'; import { LearnService } from '../../services/learn.service'; import { AuthService } from '../../services/auth.service'; import { MainService } from '../../services/main.service'; @Component({ selector: 'app-add-ebook', templateUrl: './add-ebook.component.html', styleUrls: ['./add-ebook.component.css'] }) export class AddEbookComponent implements OnInit { subtopic: String; topic: String; name: String; description: String; serverAddress: String; constructor( private router: Router, private activatedRoute: ActivatedRoute, private flashMessagesService: FlashMessagesService, private learnService: LearnService, private authService: AuthService, private mainService: MainService,<|fim▁hole|> ngOnInit() { this.serverAddress = this.mainService.getServerAddress(); this.activatedRoute.queryParams.subscribe((params: Params) => { this.subtopic = params['subtopic']; this.topic = params['topic']; }); } }<|fim▁end|>
) { }
<|file_name|>4Sum.cpp<|end_file_name|><|fim▁begin|>/* * Copyright (c) 2012 Binson Zhang. All rights reserved. * * @author Binson Zhang <[email protected]> * @date 2012-12-11 */ #include <iostream> #include <vector> #include <algorithm> using namespace std; class Solution { public: struct TwoSum { int sum; size_t a; size_t b; TwoSum(int sum = 0, size_t a = 0, size_t b = 0): sum(sum), a(a), b(b) {} bool operator<(const TwoSum& other) const { if (sum == other.sum) { if (a == other.a) { return b < other.b; } return a < other.a; } return sum < other.sum; } }; struct SumABCmp { bool operator()(const TwoSum& lhs, const TwoSum& rhs) const { if (lhs.sum == rhs.sum) { if (lhs.a == rhs.a) { return lhs.b < rhs.b; } return lhs.a < rhs.a; } return lhs.sum < rhs.sum; } } sumabcmp; struct SumCmp { bool operator()(const TwoSum& lhs, const TwoSum& rhs) const { return lhs.sum < rhs.sum; } } sumcmp; vector<vector<int> > fourSum(vector<int> &num, int target) { sort(num.begin(), num.end()); size_t n = num.size(); vector<TwoSum> sums; for (size_t i = 0; i < n; ++i) { for (size_t j = i + 1; j < n; ++j) { sums.push_back(TwoSum(num[i] + num[j], i, j)); } } sort(sums.begin(), sums.end(), sumabcmp); vector<vector<int> > vecs; TwoSum val; for (size_t i = 0; i < n; ++i) { if (i > 0 && num[i - 1] == num[i]) continue; for (size_t j = i + 1; j + 2 < n; ++j) { if (j > i + 1 && num[j - 1] == num[j]) continue; val.sum = target - (num[i] + num[j]); pair<vector<TwoSum>::iterator,vector<TwoSum>::iterator> bounds = equal_range(sums.begin(), sums.end(), val, sumcmp); bool prv = false; int prv_third = 0; for (vector<TwoSum>::iterator it = bounds.first; it != bounds.second; ++it) { if (it->a <= j) continue; if (!prv) { prv = true; } else if (prv_third == num[it->a]) { continue; } prv_third = num[it->a]; vecs.push_back(vector<int>()); vector<int>& v = vecs.back(); v.push_back(num[i]); v.push_back(num[j]); v.push_back(num[it->a]); v.push_back(num[it->b]); } } } return vecs; } }; #include "util.h" int main(int argc, char **argv) { std::cout << "------" << argv[0] << "------" << std::endl; // int arr[] = {1, 0, -1, 0, -2, 2}; // int arr[] = {-2, -1, 0, 0, 1, 2}; // int arr[] = {0}; int arr[] = {0, 0, 0, 0}; int sz = sizeof(arr) / sizeof(arr[0]); vector<int> num(arr, arr + sz); int target = 0; <|fim▁hole|> Solution s; vector<vector<int> > vecs = s.fourSum(num, target); std::cout << "Output:\n"; Output(vecs); return 0; }<|fim▁end|>
std::cout << "Input:\n"; Output(num);
<|file_name|>utils.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use rustc::front::map as ast_map; use rustc::lint::{LateContext, LintContext}; use rustc::middle::def; use rustc::middle::def_id::DefId; use rustc_front::hir; use syntax::ast; use syntax::attr::mark_used; use syntax::codemap::{ExpnFormat, Span}; use syntax::ptr::P; /// Matches a type with a provided string, and returns its type parameters if successful /// /// Try not to use this for types defined in crates you own, use match_lang_ty instead (for lint passes) pub fn match_ty_unwrap<'a>(ty: &'a ast::Ty, segments: &[&str]) -> Option<&'a [P<ast::Ty>]> { match ty.node { ast::TyPath(_, ast::Path { segments: ref seg, .. }) => { // So hir::Path isn't the full path, just the tokens that were provided. // I could muck around with the maps and find the full path // however the more efficient way is to simply reverse the iterators and zip them // which will compare them in reverse until one of them runs out of segments if seg.iter().rev().zip(segments.iter().rev()).all(|(a, b)| a.identifier.name.as_str() == *b) { match seg.last() { Some(&ast::PathSegment { parameters: ast::PathParameters::AngleBracketed(ref a), .. }) => { Some(&a.types) } _ => None } } else { None } }, _ => None } } /// Checks if a type has a #[servo_lang = "str"] attribute pub fn match_lang_ty(cx: &LateContext, ty: &hir::Ty, value: &str) -> bool { match ty.node { hir::TyPath(..) => {}, _ => return false, } let def = match cx.tcx.def_map.borrow().get(&ty.id) { Some(&def::PathResolution { base_def: def, .. }) => def, _ => return false, }; if let def::Def::PrimTy(_) = def { return false; } match_lang_did(cx, def.def_id(), value) } pub fn match_lang_did(cx: &LateContext, did: DefId, value: &str) -> bool { cx.tcx.get_attrs(did).iter().any(|attr| { match attr.node.value.node { ast::MetaNameValue(ref name, ref val) if &**name == "servo_lang" => { match val.node { ast::LitStr(ref v, _) if &**v == value => { mark_used(attr); true }, _ => false, } } _ => false, } }) } // Determines if a block is in an unsafe context so that an unhelpful // lint can be aborted. pub fn unsafe_context(map: &ast_map::Map, id: ast::NodeId) -> bool { match map.find(map.get_parent(id)) { Some(ast_map::NodeImplItem(itm)) => { match itm.node { hir::ImplItemKind::Method(ref sig, _) => sig.unsafety == hir::Unsafety::Unsafe, _ => false } },<|fim▁hole|> hir::Unsafety::Unsafe => true, _ => false, }, _ => false, } } _ => false // There are probably a couple of other unsafe cases we don't care to lint, those will need // to be added. } } /// check if a DefId's path matches the given absolute type path /// usage e.g. with /// `match_def_path(cx, id, &["core", "option", "Option"])` pub fn match_def_path(cx: &LateContext, def_id: DefId, path: &[&str]) -> bool { cx.tcx.with_path(def_id, |iter| iter.map(|elem| elem.name()) .zip(path.iter()).all(|(nm, p)| &nm.as_str() == p)) } pub fn in_derive_expn(cx: &LateContext, span: Span) -> bool { cx.sess().codemap().with_expn_info(span.expn_id, |info| { if let Some(i) = info { if let ExpnFormat::MacroAttribute(n) = i.callee.format { if n.as_str().contains("derive") { true } else { false } } else { false } } else { false } }) }<|fim▁end|>
Some(ast_map::NodeItem(itm)) => { match itm.node { hir::ItemFn(_, style, _, _, _, _) => match style {
<|file_name|>agent_manager.py<|end_file_name|><|fim▁begin|># vim: tabstop=4 shiftwidth=4 softtabstop=4 # # Copyright 2013 New Dream Network, LLC (DreamHost) # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # # @author: Mark McClain, DreamHost import weakref from oslo.config import cfg from neutron.agent.common import config from neutron.agent import rpc as agent_rpc from neutron.common import constants from neutron import context from neutron.openstack.common import importutils from neutron.openstack.common import log as logging from neutron.openstack.common import loopingcall from neutron.openstack.common import periodic_task from neutron.services.loadbalancer.drivers.haproxy import ( agent_api, plugin_driver ) LOG = logging.getLogger(__name__) NS_PREFIX = 'qlbaas-' OPTS = [ cfg.StrOpt( 'device_driver', default=('neutron.services.loadbalancer.drivers' '.haproxy.namespace_driver.HaproxyNSDriver'), help=_('The driver used to manage the loadbalancing device'), ), cfg.StrOpt( 'loadbalancer_state_path', default='$state_path/lbaas', help=_('Location to store config and state files'), ), cfg.StrOpt( 'interface_driver', help=_('The driver used to manage the virtual interface') ), cfg.StrOpt( 'user_group', default='nogroup', help=_('The user group'), ), ] class LogicalDeviceCache(object): """Manage a cache of known devices.""" class Device(object): """Inner classes used to hold values for weakref lookups.""" def __init__(self, port_id, pool_id): self.port_id = port_id self.pool_id = pool_id def __eq__(self, other): return self.__dict__ == other.__dict__ def __hash__(self): return hash((self.port_id, self.pool_id)) def __init__(self): self.devices = set() self.port_lookup = weakref.WeakValueDictionary() self.pool_lookup = weakref.WeakValueDictionary() def put(self, device): port_id = device['vip']['port_id'] pool_id = device['pool']['id'] d = self.Device(device['vip']['port_id'], device['pool']['id']) if d not in self.devices: self.devices.add(d) self.port_lookup[port_id] = d self.pool_lookup[pool_id] = d def remove(self, device): if not isinstance(device, self.Device): device = self.Device( device['vip']['port_id'], device['pool']['id'] ) if device in self.devices: self.devices.remove(device) def remove_by_pool_id(self, pool_id): d = self.pool_lookup.get(pool_id) if d: self.devices.remove(d) def get_by_pool_id(self, pool_id): return self.pool_lookup.get(pool_id) def get_by_port_id(self, port_id): return self.port_lookup.get(port_id) def get_pool_ids(self): return self.pool_lookup.keys() class LbaasAgentManager(periodic_task.PeriodicTasks): # history # 1.0 Initial version # 1.1 Support agent_updated call RPC_API_VERSION = '1.1' def __init__(self, conf): self.conf = conf try: vif_driver = importutils.import_object(conf.interface_driver, conf) except ImportError: # the driver is optional msg = _('Error importing interface driver: %s') raise SystemExit(msg % conf.interface_driver) vif_driver = None try: self.driver = importutils.import_object( conf.device_driver, config.get_root_helper(self.conf), conf.loadbalancer_state_path, vif_driver, self._vip_plug_callback ) except ImportError: msg = _('Error importing loadbalancer device driver: %s') raise SystemExit(msg % conf.device_driver) self.agent_state = { 'binary': 'neutron-loadbalancer-agent', 'host': conf.host, 'topic': plugin_driver.TOPIC_LOADBALANCER_AGENT, 'configurations': {'device_driver': conf.device_driver, 'interface_driver': conf.interface_driver}, 'agent_type': constants.AGENT_TYPE_LOADBALANCER, 'start_flag': True} self.admin_state_up = True self.context = context.get_admin_context_without_session() self._setup_rpc() self.needs_resync = False self.cache = LogicalDeviceCache() <|fim▁hole|> plugin_driver.TOPIC_PROCESS_ON_HOST, self.context, self.conf.host ) self.state_rpc = agent_rpc.PluginReportStateAPI( plugin_driver.TOPIC_PROCESS_ON_HOST) report_interval = self.conf.AGENT.report_interval if report_interval: heartbeat = loopingcall.FixedIntervalLoopingCall( self._report_state) heartbeat.start(interval=report_interval) def _report_state(self): try: device_count = len(self.cache.devices) self.agent_state['configurations']['devices'] = device_count self.state_rpc.report_state(self.context, self.agent_state) self.agent_state.pop('start_flag', None) except Exception: LOG.exception("Failed reporting state!") def initialize_service_hook(self, started_by): self.sync_state() @periodic_task.periodic_task def periodic_resync(self, context): if self.needs_resync: self.needs_resync = False self.sync_state() @periodic_task.periodic_task(spacing=6) def collect_stats(self, context): for pool_id in self.cache.get_pool_ids(): try: stats = self.driver.get_stats(pool_id) if stats: self.plugin_rpc.update_pool_stats(pool_id, stats) except Exception: LOG.exception(_('Error upating stats')) self.needs_resync = True def _vip_plug_callback(self, action, port): if action == 'plug': self.plugin_rpc.plug_vip_port(port['id']) elif action == 'unplug': self.plugin_rpc.unplug_vip_port(port['id']) def sync_state(self): known_devices = set(self.cache.get_pool_ids()) try: ready_logical_devices = set(self.plugin_rpc.get_ready_devices()) for deleted_id in known_devices - ready_logical_devices: self.destroy_device(deleted_id) for pool_id in ready_logical_devices: self.refresh_device(pool_id) except Exception: LOG.exception(_('Unable to retrieve ready devices')) self.needs_resync = True self.remove_orphans() def refresh_device(self, pool_id): try: logical_config = self.plugin_rpc.get_logical_device(pool_id) if self.driver.exists(pool_id): self.driver.update(logical_config) else: self.driver.create(logical_config) self.cache.put(logical_config) except Exception: LOG.exception(_('Unable to refresh device for pool: %s'), pool_id) self.needs_resync = True def destroy_device(self, pool_id): device = self.cache.get_by_pool_id(pool_id) if not device: return try: self.driver.destroy(pool_id) self.plugin_rpc.pool_destroyed(pool_id) except Exception: LOG.exception(_('Unable to destroy device for pool: %s'), pool_id) self.needs_resync = True self.cache.remove(device) def remove_orphans(self): try: self.driver.remove_orphans(self.cache.get_pool_ids()) except NotImplementedError: pass # Not all drivers will support this def reload_pool(self, context, pool_id=None, host=None): """Handle RPC cast from plugin to reload a pool.""" if pool_id: self.refresh_device(pool_id) def modify_pool(self, context, pool_id=None, host=None): """Handle RPC cast from plugin to modify a pool if known to agent.""" if self.cache.get_by_pool_id(pool_id): self.refresh_device(pool_id) def destroy_pool(self, context, pool_id=None, host=None): """Handle RPC cast from plugin to destroy a pool if known to agent.""" if self.cache.get_by_pool_id(pool_id): self.destroy_device(pool_id) def agent_updated(self, context, payload): """Handle the agent_updated notification event.""" if payload['admin_state_up'] != self.admin_state_up: self.admin_state_up = payload['admin_state_up'] if self.admin_state_up: self.needs_resync = True else: for pool_id in self.cache.get_pool_ids(): self.destroy_device(pool_id) LOG.info(_("agent_updated by server side %s!"), payload)<|fim▁end|>
def _setup_rpc(self): self.plugin_rpc = agent_api.LbaasAgentApi(
<|file_name|>BleReceiveDataProvider.java<|end_file_name|><|fim▁begin|>package jp.co.thcomp.bluetoothhelper; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; public class BleReceiveDataProvider extends BleDataProvider { public static final int AddPacketResultSuccess = -1; public static final int AddPacketResultAlreadyFinished = -2; public static final int AddPacketResultNoData = -3; private boolean mReceiveDataFinish = false; private byte[][] mReceiveDataArray; private int mLeftPacketCount = 0; private int mDataSize; private Short mReservedMessageId = null; private ArrayList<byte[]> mReservedPacketList = new ArrayList<>(); /** * @param packetData * @return AddPacketResultAlreadyFinished: 既に完了済みのメッセージへの追加(追加失敗) * AddPacketResultSuccess: 追加成功 * 0-ShortMax: 別のメッセージを追加している(追加失敗) */ public int addPacket(byte[] packetData) { int ret = AddPacketResultSuccess; if (packetData != null && packetData.length > 0) { if (!mReceiveDataFinish) { ByteBuffer tempBufferForShort = ByteBuffer.allocate(Short.SIZE / Byte.SIZE); ByteBuffer tempBufferForInt = ByteBuffer.allocate(Integer.SIZE / Byte.SIZE); // 0-1バイト:メッセージID(ShortMax上限且つPeripheralからの送信順番を示すが値は循環する) tempBufferForShort.position(0); tempBufferForShort.put(packetData, 0, LengthMessageID); short messageId = tempBufferForShort.getShort(0); // 2-5バイト:パケットサイズ、MTUサイズ以下の値が設定される tempBufferForInt.position(0); tempBufferForInt.put(packetData, IndexPacketSize, LengthPacketSize);<|fim▁hole|> // 6-9バイト: パケットポジション、0は設定パケット、1以上の値が設定されている場合はデータパケット tempBufferForInt.position(0); tempBufferForInt.put(packetData, IndexPacketPosition, LengthPacketPosition); int packetPosition = tempBufferForInt.getInt(0); if (packetPosition == 0) { if (mMessageId == null) { boolean matchMessageId = true; if (mReservedMessageId != null) { // 既にリザーブされたMessageIdがあるので、それ以外の設定パケットは受け付けない if (messageId != mReservedMessageId) { matchMessageId = false; } } if (matchMessageId) { mMessageId = messageId; // 設定パケット // 10-13バイト:パケット数(設定パケットも含む)(IntMax上限) tempBufferForInt.position(0); tempBufferForInt.put(packetData, IndexPacketCount, LengthPacketCount); mLeftPacketCount = tempBufferForInt.getInt(0) - 1; mReceiveDataArray = new byte[mLeftPacketCount][]; // 14-17バイト:データサイズ(IntMax上限) tempBufferForInt.position(0); tempBufferForInt.put(packetData, IndexDataSize, LengthDataSize); mDataSize = tempBufferForInt.getInt(0); if (mReservedMessageId != null && mReservedPacketList.size() > 0) { // 保留されているメッセージを展開 for (byte[] reservedPacketData : mReservedPacketList) { addPacket(reservedPacketData); } } mReservedMessageId = null; mReservedPacketList.clear(); } } else { // 別のメッセージパケットを追加しようとしているので、新しい方のメッセージIDを返却 ret = messageId; } } else { if (mMessageId == null) { if (mReservedMessageId == null) { mReservedMessageId = messageId; } if (mReservedMessageId == messageId) { // 設定パケットが未だないので保留リストに mReservedPacketList.add(packetData); } } else if (mMessageId == messageId) { // データパケット if (mReceiveDataArray != null) { mLeftPacketCount--; // 10 バイト:次のパケットがあるかのフラグ、0:次パケットなし、1:次パケットあり tempBufferForInt.position(0); tempBufferForInt.put(packetData, IndexExistNextPacket, LengthExistNextPacket); int existNextPacket = tempBufferForInt.getInt(0); // 以後、0-3バイトに記載されていたサイズ - 9バイトを引算したサイズだけデータが格納 mReceiveDataArray[packetPosition - 1] = Arrays.copyOfRange(packetData, IndexDataStartPosition, packetSize); if ((mLeftPacketCount == 0) || (existNextPacket == NotExistNextPacket)) { // 一旦0に残パケット数を0にして、受信状況に合わせて正しい値にする mLeftPacketCount = 0; for (int i = 0, size = mReceiveDataArray.length; i < size; i++) { if (mReceiveDataArray[i] == null) { mLeftPacketCount++; } } if (mLeftPacketCount == 0) { mReceiveDataFinish = true; } } } } else { ret = messageId; } } } else { ret = AddPacketResultAlreadyFinished; } } else { ret = AddPacketResultNoData; } return ret; } public boolean isCompleted() { return mReceiveDataFinish; } @Override public byte[] getData() { byte[] ret = null; if (mReceiveDataFinish) { if (mData == null) { ByteArrayOutputStream stream = new ByteArrayOutputStream(); try { for (int i = 0, size = mReceiveDataArray.length; i < size; i++) { stream.write(mReceiveDataArray[i]); } mData = stream.toByteArray(); } catch (IOException e) { e.printStackTrace(); } } ret = super.getData(); } return ret; } @Override public Short getMessageId() { if (mReservedMessageId != null && mMessageId == null) { return mReservedMessageId; } else { return super.getMessageId(); } } }<|fim▁end|>
int packetSize = tempBufferForInt.getInt(0);
<|file_name|>models.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- import os import requests import time import math import datetime import random import envoy import jsonfield import logging import urllib from collections import defaultdict from magic_repr import make_repr from hashlib import md5, sha1 from django.db import models from django.db.models import Q from django.utils import timezone from django.conf import settings from django.contrib.auth.models import AbstractBaseUser, UserManager as BaseUserManager from django.core.cache import cache #from south.modelsinspector import add_introspection_rules from twiggy_goodies.threading import log from allmychanges.validators import URLValidator from allmychanges.downloaders.utils import normalize_url from allmychanges.issues import calculate_issue_importance from allmychanges.utils import ( split_filenames, parse_search_list, get_one_or_none, ) from allmychanges import chat from allmychanges.downloaders import ( get_downloader) from allmychanges.utils import reverse from allmychanges.tasks import ( update_preview_task, update_changelog_task) from allmychanges.exceptions import SynonymError MARKUP_CHOICES = ( ('markdown', 'markdown'), ('rest', 'rest'), ) NAME_LENGTH = 80 NAMESPACE_LENGTH = 80 DESCRIPTION_LENGTH = 255 PROCESSING_STATUS_LENGTH = 40 # based on http://www.caktusgroup.com/blog/2013/08/07/migrating-custom-user-model-django/ from pytz import common_timezones TIMEZONE_CHOICES = [(tz, tz) for tz in common_timezones] class URLField(models.URLField): default_validators = [URLValidator()] #add_introspection_rules([], ["^allmychanges\.models\.URLField"]) class UserManager(BaseUserManager): def _create_user(self, username, email=None, password=None, **extra_fields): now = timezone.now() email = self.normalize_email(email) user = self.model(username=username, email=email, last_login=now, date_joined=now, **extra_fields) user.set_password(password) user.save(using=self._db) return user def create(self, *args, **kwargs): email = kwargs.get('email') if email and self.filter(email=email).count() > 0: raise ValueError('User with email "{0}" already exists'.format(email)) username = kwargs.get('username') url = settings.BASE_URL + reverse('admin-user-profile', username=username) chat.send(('New user <{url}|{username}> ' 'with email "{email}" (from create)').format( url=url, username=username, email=email)) return super(UserManager, self).create(*args, **kwargs) def create_user(self, username, email=None, password=None, **extra_fields): if email and self.filter(email=email).count() > 0: raise ValueError('User with email "{0}" already exists'.format(email)) url = settings.BASE_URL + reverse('admin-user-profile', username=username) chat.send(('New user <{url}|{username}> ' 'with email "{email}" (from create_user)').format( url=url, username=username, email=email)) return self._create_user(username, email, password, **extra_fields) def active_users(self, interval): """Outputs only users who was active in last `interval` days. """ after = timezone.now() - datetime.timedelta(interval) queryset = self.all() queryset = queryset.filter(history_log__action__in=ACTIVE_USER_ACTIONS, history_log__created_at__gte=after).distinct() return queryset SEND_DIGEST_CHOICES = ( ('daily', 'Every day'), ('weekly', 'Every week (on Monday)'), ('never', 'Never')) RSS_HASH_LENGH = 32 class User(AbstractBaseUser): """ A fully featured User model with admin-compliant permissions that uses a full-length email field as the username. Email and password are required. Other fields are optional. """ username = models.CharField('user name', max_length=254, unique=True) email = models.EmailField('email address', max_length=254) email_is_valid = models.BooleanField(default=False)<|fim▁hole|> timezone = models.CharField(max_length=100, choices=TIMEZONE_CHOICES, default='UTC') changelogs = models.ManyToManyField('Changelog', through='ChangelogTrack', related_name='trackers') feed_versions = models.ManyToManyField('Version', through='FeedItem', related_name='users') feed_sent_id = models.IntegerField( default=0, help_text='Keeps position in feed items already sent in digest emails') last_digest_sent_at = models.DateTimeField( blank=True, null=True, help_text='Date when last email digest was sent') skips_changelogs = models.ManyToManyField('Changelog', through='ChangelogSkip', related_name='skipped_by') moderated_changelogs = models.ManyToManyField('Changelog', through='Moderator', related_name='moderators') # notification settings send_digest = models.CharField(max_length=100, choices=SEND_DIGEST_CHOICES, default='daily') slack_url = models.URLField(max_length=2000, default='', blank=True) webhook_url = models.URLField(max_length=2000, default='', blank=True) rss_hash = models.CharField(max_length=RSS_HASH_LENGH, unique=True, blank=True, null=True) custom_fields = jsonfield.JSONField( default={}, help_text='Custom fields such like "Location" or "SecondEmail".', blank=True) objects = UserManager() USERNAME_FIELD = 'username' REQUIRED_FIELDS = ['email'] class Meta: verbose_name = 'user' verbose_name_plural = 'users' __repr__ = make_repr('username', 'email') def get_avatar(self, size): # adorable_template = 'https://api.adorable.io/avatars/{size}/{hash}.png' robohash_template = 'https://robohash.org/{hash}.png?size={size}x{size}' if self.email: hash = md5(self.email.lower()).hexdigest() default = robohash_template.format(size=size, hash=hash) avatar_url = 'https://www.gravatar.com/avatar/{hash}?{opts}'.format( hash=hash, opts=urllib.urlencode( dict( s=str(size), d=default ) ) ) else: hash = md5(self.username).hexdigest() avatar_url = robohash_template.format(size=size, hash=hash) return avatar_url @property def is_superuser(self): return self.username in settings.SUPERUSERS def does_track(self, changelog): """Check if this user tracks given changelog.""" return self.changelogs.filter(pk=changelog.id).exists() def track(self, changelog): if not self.does_track(changelog): if changelog.namespace == 'web' and changelog.name == 'allmychanges': action = 'track-allmychanges' action_description = 'User tracked our project\'s changelog.' else: action = 'track' action_description = 'User tracked changelog:{0}'.format(changelog.id) UserHistoryLog.write(self, '', action, action_description) ChangelogTrack.objects.create( user=self, changelog=changelog) def untrack(self, changelog): if self.does_track(changelog): if changelog.namespace == 'web' and changelog.name == 'allmychanges': action = 'untrack-allmychanges' action_description = 'User untracked our project\'s changelog.' else: action = 'untrack' action_description = 'User untracked changelog:{0}'.format(changelog.id) UserHistoryLog.write(self, '', action, action_description) ChangelogTrack.objects.filter( user=self, changelog=changelog).delete() def does_skip(self, changelog): """Check if this user skipped this changelog in package selector.""" return self.skips_changelogs.filter(pk=changelog.id).exists() def skip(self, changelog): if not self.does_skip(changelog): action = 'skip' action_description = 'User skipped changelog:{0}'.format(changelog.id) UserHistoryLog.write(self, '', action, action_description) ChangelogSkip.objects.create( user=self, changelog=changelog) def add_feed_item(self, version): if self.send_digest == 'never': return None return FeedItem.objects.create(user=self, version=version) def save(self, *args, **kwargs): if self.rss_hash is None: self.rss_hash = sha1(self.username + settings.SECRET_KEY).hexdigest()[:RSS_HASH_LENGH] return super(User, self).save(*args, **kwargs) class Subscription(models.Model): email = models.EmailField() come_from = models.CharField(max_length=100) date_created = models.DateTimeField() def __unicode__(self): return self.email class Downloadable(object): """Adds method download, which uses attribute `source` to update attribute `downloader` if needed and then to download repository into a temporary directory. """ def download(self, downloader, report_back=lambda message, level=logging.INFO: None): """This method fetches repository into a temporary directory and returns path to this directory. It can report about downloading status using callback `report_back`. Everything what will passed to `report_back`, will be displayed to the end user in a processing log on a "Tune" page. """ if isinstance(downloader, dict): params = downloader.get('params', {}) downloader = downloader['name'] else: params = {} params.update(self.downloader_settings or {}) download = get_downloader(downloader) return download(self.source, report_back=report_back, **params) # A mixin to get/set ignore and check lists on a model. def get_ignore_list(self): """Returns a list with all filenames and directories to ignore when searching a changelog.""" return split_filenames(self.ignore_list) def set_ignore_list(self, items): self.ignore_list = u'\n'.join(items) def get_search_list(self): """Returns a list with all filenames and directories to check when searching a changelog.""" return parse_search_list(self.search_list) def set_search_list(self, items): def process(item): if isinstance(item, tuple) and item[1]: return u':'.join(item) else: return item self.search_list = u'\n'.join(map(process, items)) class ChangelogManager(models.Manager): def only_active(self): # active changelog is good and not paused queryset = self.good() return queryset.filter(paused_at=None) def good(self): # good changelog should have namespace, name, source and downloader return self.all().exclude( Q(name=None) | Q(namespace=None) | Q(downloader=None) | Q(source='')) def unsuccessful(self): return self.all().filter( Q(name=None) | Q(namespace=None) | Q(downloader=None) | Q(source='')) class Changelog(Downloadable, models.Model): objects = ChangelogManager() source = URLField(db_index=True, blank=True) created_at = models.DateTimeField(auto_now_add=True) # TODO: remove processing_started_at = models.DateTimeField(blank=True, null=True) problem = models.CharField(max_length=1000, help_text='Latest error message', blank=True, null=True) # TODO: remove filename = models.CharField(max_length=1000, help_text=('If changelog was discovered, then ' 'field will store it\'s filename'), blank=True, null=True) updated_at = models.DateTimeField(blank=True, null=True) next_update_at = models.DateTimeField(default=timezone.now) paused_at = models.DateTimeField(blank=True, null=True) last_update_took = models.IntegerField( help_text=('Number of seconds required to ' 'update this changelog last time'), default=0) ignore_list = models.CharField(max_length=1000, default='', help_text=('Comma-separated list of directories' ' and filenames to ignore searching' ' changelog.'), blank=True) # TODO: выяснить зачем тут два поля check_list и search_list check_list = models.CharField(max_length=1000, default='', help_text=('Comma-separated list of directories' ' and filenames to search' ' changelog.'), blank=True) search_list = models.CharField(max_length=1000, default='', help_text=('Comma-separated list of directories' ' and filenames to search' ' changelog.'), blank=True) xslt = models.TextField(default='', help_text=('XSLT transform to be applied to all html files.'), blank=True) namespace = models.CharField(max_length=NAMESPACE_LENGTH, blank=True, null=True) name = models.CharField(max_length=NAME_LENGTH, blank=True, null=True) description = models.CharField(max_length=DESCRIPTION_LENGTH, blank=True, default='') downloader = models.CharField(max_length=20, blank=True, null=True) downloader_settings = jsonfield.JSONField( default={}, help_text=('JSON with settings for selected downloader.'), blank=True) downloaders = jsonfield.JSONField( default=[], help_text=('JSON with guessed downloaders and their additional meta information.'), blank=True) status = models.CharField(max_length=40, default='created') processing_status = models.CharField(max_length=PROCESSING_STATUS_LENGTH) icon = models.CharField(max_length=1000, blank=True, null=True) class Meta: unique_together = ('namespace', 'name') def __unicode__(self): return u'Changelog from {0}'.format(self.source) __repr__ = make_repr('namespace', 'name', 'source') def latest_versions(self, limit): return self.versions.exclude(unreleased=True) \ .order_by('-order_idx')[:limit] def latest_version(self): versions = list(self.latest_versions(1)) if versions: return versions[0] def get_display_name(self): return u'{0}/{1}'.format( self.namespace, self.name) @staticmethod def create_uniq_name(namespace, name): """Returns a name which is unique in given namespace. Name is created by incrementing a value.""" if namespace and name: base_name = name counter = 0 while Changelog.objects.filter( namespace=namespace, name=name).exists(): counter += 1 name = '{0}{1}'.format(base_name, counter) return name @staticmethod def get_all_namespaces(like=None): queryset = Changelog.objects.all() if like is not None: queryset = queryset.filter( namespace__iexact=like ) return list(queryset.values_list('namespace', flat=True).distinct()) @staticmethod def normalize_namespaces(): namespaces_usage = defaultdict(int) changelogs_with_namespaces = Changelog.objects.exclude(namespace=None) for namespace in changelogs_with_namespaces.values_list('namespace', flat=True): namespaces_usage[namespace] += 1 def normalize(namespace): lowercased = namespace.lower() # here we process only capitalized namespaces if namespace == lowercased: return # if there lowercased is not used at all if lowercased not in namespaces_usage: return lowercased_count = namespaces_usage[lowercased] this_count = namespaces_usage[namespace] if lowercased_count >= this_count: # if num of occurences is equal, # prefer lowercased name Changelog.objects.filter( namespace=namespace).update( namespace=lowercased) else: Changelog.objects.filter( namespace=lowercased).update( namespace=namespace) del namespaces_usage[namespace] del namespaces_usage[lowercased] all_namespaces = namespaces_usage.keys() all_namespaces.sort() for namespace in all_namespaces: normalize(namespace) def save(self, *args, **kwargs): if self.id is None: # than objects just created and this is good # time to fix it's namespace existing_namespaces = Changelog.get_all_namespaces(like=self.namespace) if existing_namespaces: self.namespace = existing_namespaces[0] return super(Changelog, self).save(*args, **kwargs) def get_absolute_url(self): return reverse('project', namespace=self.namespace, name=self.name) def editable_by(self, user, light_user=None): light_moderators = set(self.light_moderators.values_list('light_user', flat=True)) moderators = set(self.moderators.values_list('id', flat=True)) if user.is_authenticated(): # Any changelog could be edited by me if user.is_superuser: return True if moderators or light_moderators: return user.id in moderators else: if moderators or light_moderators: return light_user in light_moderators return True def is_unsuccessful(self): return self.name is None or \ self.namespace is None or \ self.downloader is None or \ not self.source def is_moderator(self, user, light_user=None): light_moderators = set(self.light_moderators.values_list('light_user', flat=True)) moderators = set(self.moderators.values_list('id', flat=True)) if user.is_authenticated(): return user.id in moderators else: return light_user in light_moderators def add_to_moderators(self, user, light_user=None): """Adds user to moderators and returns 'normal' or 'light' if it really added him. In case if user already was a moderator, returns None.""" if not self.is_moderator(user, light_user): if user.is_authenticated(): Moderator.objects.create(changelog=self, user=user) return 'normal' else: if light_user is not None: self.light_moderators.create(light_user=light_user) return 'light' def create_issue(self, type, comment='', related_versions=[]): joined_versions = u', '.join(related_versions) # for some types, only one issue at a time is allowed if type == 'lesser-version-count': if self.issues.filter(type=type, resolved_at=None, related_versions=joined_versions).count() > 0: return issue = self.issues.create(type=type, comment=comment.format(related_versions=joined_versions), related_versions=joined_versions) chat.send(u'New issue of type "{issue.type}" with comment: "{issue.comment}" was created for <https://allmychanges.com/issues/?namespace={issue.changelog.namespace}&name={issue.changelog.name}|{issue.changelog.namespace}/{issue.changelog.name}>'.format( issue=issue)) def resolve_issues(self, type): self.issues.filter(type=type, resolved_at=None).update(resolved_at=timezone.now()) def create_preview(self, user, light_user, **params): params.setdefault('downloader', self.downloader) params.setdefault('downloader_settings', self.downloader_settings) params.setdefault('downloaders', self.downloaders) params.setdefault('source', self.source) params.setdefault('search_list', self.search_list) params.setdefault('ignore_list', self.ignore_list) params.setdefault('xslt', self.xslt) preview = self.previews.create(user=user, light_user=light_user, **params) # preview_test_task.delay( # preview.id, # ['Guessing downloders', # 'Downloading using git', # 'Searching versions', # 'Nothing found', # 'Downloading from GitHub Review', # 'Searching versions', # 'Some results were found']) return preview def set_status(self, status, **kwargs): changed_fields = ['status', 'updated_at'] if status == 'error': self.problem = kwargs.get('problem') changed_fields.append('problem') self.status = status self.updated_at = timezone.now() self.save(update_fields=changed_fields) def set_processing_status(self, status, level=logging.INFO): self.processing_status = status[:PROCESSING_STATUS_LENGTH] self.updated_at = timezone.now() self.save(update_fields=('processing_status', 'updated_at')) key = 'preview-processing-status:{0}'.format(self.id) cache.set(key, status, 10 * 60) def get_processing_status(self): key = 'preview-processing-status:{0}'.format(self.id) result = cache.get(key, self.processing_status) return result def calc_next_update(self): """Returns date and time when next update should be scheduled. """ hour = 60 * 60 min_update_interval = hour max_update_interval = 48 * hour num_trackers = self.trackers.count() # here we divide max interval on 2 because # on the last stage will add some randomness to # the resulting value time_to_next_update = (max_update_interval / 2) / math.log(max(math.e, num_trackers)) time_to_next_update = max(min_update_interval, time_to_next_update, 2 * self.last_update_took) # add some randomness time_to_next_update = random.randint( int(time_to_next_update * 0.8), int(time_to_next_update * 2.0)) # limit upper bound return timezone.now() + datetime.timedelta(0, time_to_next_update) def calc_next_update_if_error(self): # TODO: check and remove return timezone.now() + datetime.timedelta(0, 1 * 60 * 60) def schedule_update(self, async=True, full=False): with log.fields(changelog_name=self.name, changelog_namespace=self.namespace, async=async, full=full): log.info('Scheduling changelog update') self.set_status('processing') self.set_processing_status('Waiting in the queue') self.problem = None self.save() if full: self.versions.all().delete() if async: update_changelog_task.delay(self.id) else: update_changelog_task(self.id) def resume(self): self.paused_at = None self.next_update_at = timezone.now() # we don't need to save here, because this will be done in schedule_update self.schedule_update() def clean(self): super(Changelog, self).clean() self.source, _, _ = normalize_url(self.source, for_checkout=False) def update_description_from_source(self, fall_asleep_on_rate_limit=False): # right now this works only for github urls if 'github.com' not in self.source: return url, username, repo = normalize_url(self.source) url = 'https://api.github.com/repos/{0}/{1}'.format(username, repo) headers={'Authorization': 'token ' + settings.GITHUB_TOKEN} response = requests.get(url, headers=headers) if response.status_code == 200: data = response.json() self.description = data.get('description', '') self.save(update_fields=('description', )) if fall_asleep_on_rate_limit: remaining = int(response.headers['x-ratelimit-remaining']) if remaining == 1: to_sleep = int(response.headers['x-ratelimit-reset']) - time.time() + 10 print 'OK, now I need to sleep {0} seconds because of GitHub\'s rate limit.'.format(to_sleep) time.sleep(to_sleep) def add_synonym(self, synonym): """Just a shortcut.""" if self.synonyms.filter(source=synonym).count() == 0: # if this synonym already bound to some another project # then raise exception found = list(SourceSynonym.objects.filter(source=synonym)) if found: with log.fields(changelog_id=self.pk, another_changelog_id=found[0].changelog_id): raise SynonymError('Synonym already bound to a changelog') found = list(Changelog.objects.filter(source=synonym)) if found: with log.fields(changelog_id=self.pk, another_changelog_id=found[0].pk): raise SynonymError('Synonym matches a changelog\'s source') self.synonyms.create(source=synonym) def merge_into(self, to_ch): # move trackers to_ch_trackers = set(to_ch.trackers.values_list('id', flat=True)) for user in self.trackers.all(): if user.id not in to_ch_trackers: ChangelogTrack.objects.create(user=user, changelog=to_ch) action = 'moved-during-merge' action_description = 'User was moved from {0}/{1} to changelog:{2}'.format( self.namespace, self.name, to_ch.id) UserHistoryLog.write(user, '', action, action_description) # move issues for issue in self.issues.all(): issue.changelog = to_ch issue.save(update_fields=('changelog',)) # remove itself Changelog.objects.filter(pk=self.pk).delete() # add synonym to_ch.add_synonym(self.source) def set_tag(self, user, name, version_number): """Sets or updates tag with `name` on the version. If tag was updated, returns 'updated' otherwise, returns 'created' """ assert isinstance(version_number, basestring), \ 'Parameter "version_number" should be a string, not "{0}"'.format( type(version_number)) params = dict(user=user, name=name) existing_tag = self.tags.filter( **params) update = existing_tag.count() > 0 if update: existing_tag.delete() version = get_one_or_none(self.versions, number=version_number) self.tags.create(version=version, version_number=version_number, **params) return 'updated' if update else 'created' def remove_tag(self, user, name): """Removes tag with `name` on the version. """ self.tags.filter(user=user, name=name).delete() class SourceSynonym(models.Model): changelog = models.ForeignKey(Changelog, related_name='synonyms') created_at = models.DateTimeField(default=timezone.now) source = URLField(unique=True) class ChangelogTrack(models.Model): user = models.ForeignKey(User) changelog = models.ForeignKey(Changelog) created_at = models.DateTimeField(default=timezone.now) class ChangelogSkip(models.Model): user = models.ForeignKey(User) changelog = models.ForeignKey(Changelog) created_at = models.DateTimeField(default=timezone.now) class Issue(models.Model): """Keeps track any issues, related to a changelog. """ changelog = models.ForeignKey(Changelog, related_name='issues', blank=True, null=True) user = models.ForeignKey(User, related_name='issues', blank=True, null=True) light_user = models.CharField(max_length=40, blank=True, null=True) type = models.CharField(max_length=40) comment = models.TextField() created_at = models.DateTimeField(auto_now_add=True) resolved_at = models.DateTimeField(blank=True, null=True) resolved_by = models.ForeignKey(User, related_name='resolved_issues', blank=True, null=True) related_versions = models.TextField(default='', blank=True, help_text='Comma-separated list of versions, related to this issue') email = models.CharField(max_length=100, blank=True, null=True) page = models.CharField(max_length=100, blank=True, null=True) importance = models.IntegerField(db_index=True, blank=True, default=0) __repr__ = make_repr('changelog', 'type', 'comment', 'created_at', 'resolved_at') def save(self, *args, **kwargs): if not self.importance: self.importance = calculate_issue_importance( num_trackers=self.changelog.trackers.count() if self.changelog else 0, user=self.user, light_user=self.light_user) return super(Issue, self).save(*args, **kwargs) @staticmethod def merge(user, light_user): entries = Issue.objects.filter(user=None, light_user=light_user) if entries.count() > 0: with log.fields(username=user.username, num_entries=entries.count(), light_user=light_user): log.info('Merging issues') entries.update(user=user) def editable_by(self, user, light_user=None): return self.changelog.editable_by(user, light_user) def get_related_versions(self): response = [version.strip() for version in self.related_versions.split(',')] return filter(None, response) def get_related_deployments(self): return DeploymentHistory.objects \ .filter(deployed_at__lte=self.created_at) \ .order_by('-id')[:3] def resolve(self, user, notify=True): self.resolved_at = timezone.now() self.resolved_by = user self.save(update_fields=('resolved_at', 'resolved_by')) if notify: chat.send((u'Issue <https://allmychanges.com{url}|#{issue_id}> ' u'for {namespace}/{name} was resolved by {username}.').format( url=reverse('issue-detail', pk=self.id), issue_id=self.id, namespace=self.changelog.namespace, name=self.changelog.name, username=user.username)) if self.type == 'auto-paused': changelog = self.changelog with log.fields(changelog_id=changelog.id): log.info('Resuming changelog updates') changelog.resume() if notify: chat.send(u'Autopaused package {namespace}/{name} was resumed {username}.'.format( namespace=changelog.namespace, name=changelog.name, username=user.username)) class IssueComment(models.Model): issue = models.ForeignKey(Issue, related_name='comments') user = models.ForeignKey(User, blank=True, null=True, related_name='issue_comments') created_at = models.DateTimeField(default=timezone.now) message = models.TextField() class DiscoveryHistory(models.Model): """Keeps track any issues, related to a changelog. """ changelog = models.ForeignKey(Changelog, related_name='discovery_history') discovered_versions = models.TextField() new_versions = models.TextField() num_discovered_versions = models.IntegerField() num_new_versions = models.IntegerField() created_at = models.DateTimeField(auto_now_add=True) __repr__ = make_repr('discovered_versions') class LightModerator(models.Model): """These entries are created when anonymouse user adds another package into the system. When user signs up, these entries should be transformed into the Moderator entries. """ changelog = models.ForeignKey(Changelog, related_name='light_moderators') light_user = models.CharField(max_length=40) created_at = models.DateTimeField(auto_now_add=True) @staticmethod def merge(user, light_user): entries = LightModerator.objects.filter(light_user=light_user) for entry in entries: with log.fields(username=user.username, light_user=light_user): log.info('Transforming light moderator into the permanent') Moderator.objects.create( changelog=entry.changelog, user=user, from_light_user=light_user) entries.delete() @staticmethod def remove_stale_moderators(): LightModerator.objects.filter( created_at__lte=timezone.now() - datetime.timedelta(1)).delete() class Moderator(models.Model): changelog = models.ForeignKey(Changelog, related_name='+') user = models.ForeignKey(User, related_name='+') created_at = models.DateTimeField(auto_now_add=True) from_light_user = models.CharField(max_length=40, blank=True, null=True) class Preview(Downloadable, models.Model): user = models.ForeignKey(settings.AUTH_USER_MODEL, related_name='previews', blank=True, null=True) changelog = models.ForeignKey(Changelog, related_name='previews') light_user = models.CharField(max_length=40) created_at = models.DateTimeField(auto_now_add=True) updated_at = models.DateTimeField(blank=True, null=True) source = models.URLField() ignore_list = models.CharField(max_length=1000, default='', help_text=('Comma-separated list of directories' ' and filenames to ignore searching' ' changelog.'), blank=True) # TODO: remove this field after migration on production check_list = models.CharField(max_length=1000, default='', help_text=('Comma-separated list of directories' ' and filenames to search' ' changelog.'), blank=True) search_list = models.CharField(max_length=1000, default='', help_text=('Comma-separated list of directories' ' and filenames to search' ' changelog.'), blank=True) xslt = models.TextField(default='', help_text=('XSLT transform to be applied to all html files.'), blank=True) problem = models.CharField(max_length=1000, help_text='Latest error message', blank=True, null=True) downloader = models.CharField(max_length=255, blank=True, null=True) downloader_settings = jsonfield.JSONField( default={}, help_text=('JSON with settings for selected downloader.'), blank=True) downloaders = jsonfield.JSONField( default=[], help_text=('JSON with guessed downloaders and their additional meta information.'), blank=True) done = models.BooleanField(default=False) status = models.CharField(max_length=40, default='created') processing_status = models.CharField(max_length=40) log = jsonfield.JSONField(default=[], help_text=('JSON with log of all operation applied during preview processing.'), blank=True) @property def namespace(self): return self.changelog.namespace @property def name(self): return self.changelog.name @property def description(self): return self.changelog.description def set_status(self, status, **kwargs): changed_fields = ['status', 'updated_at'] if status == 'processing': self.versions.all().delete() self.updated_at = timezone.now() changed_fields.append('updated_at') elif status == 'error': self.problem = kwargs.get('problem') changed_fields.append('problem') self.status = status self.updated_at = timezone.now() self.save(update_fields=changed_fields) def set_processing_status(self, status, level=logging.INFO): self.log.append(status) self.processing_status = status[:PROCESSING_STATUS_LENGTH] self.updated_at = timezone.now() self.save(update_fields=('processing_status', 'updated_at', 'log')) key = 'preview-processing-status:{0}'.format(self.id) cache.set(key, status, 10 * 60) def get_processing_status(self): key = 'preview-processing-status:{0}'.format(self.id) result = cache.get(key, self.processing_status) return result def schedule_update(self): self.set_status('processing') self.set_processing_status('Waiting in the queue') self.versions.all().delete() update_preview_task.delay(self.pk) class VersionManager(models.Manager): use_for_related_fields = True def create(self, *args, **kwargs): version = super(VersionManager, self).create(*args, **kwargs) changelog = kwargs.get('changelog') if changelog: version.associate_with_free_tags() return version def released(self): return self.exclude(unreleased=True) def unreleased(self): return self.filter(unreleased=True) class Version(models.Model): changelog = models.ForeignKey(Changelog, related_name='versions', blank=True, null=True, on_delete=models.SET_NULL) preview = models.ForeignKey(Preview, related_name='versions', blank=True, null=True, on_delete=models.SET_NULL) date = models.DateField(blank=True, null=True) number = models.CharField(max_length=255) unreleased = models.BooleanField(default=False) discovered_at = models.DateTimeField(blank=True, null=True) last_seen_at = models.DateTimeField(blank=True, null=True) filename = models.CharField(max_length=1000, help_text=('Source file where this version was found'), blank=True, null=True) raw_text = models.TextField(blank=True, null=True) processed_text = models.TextField(blank=True, null=True) order_idx = models.IntegerField(blank=True, null=True, help_text=('This field is used to reorder versions ' 'according their version numbers and to ' 'fetch them from database efficiently.')) tweet_id = models.CharField(max_length=1000, help_text=('Tweet id or None if we did not tweeted about this version yet.'), blank=True, null=True) objects = VersionManager() class Meta: get_latest_by = 'order_idx' ordering = ['-order_idx'] def __unicode__(self): return self.number def get_absolute_url(self): return self.changelog.get_absolute_url() + '#' + self.number def post_tweet(self): if not settings.TWITTER_CREDS: return if self.unreleased: raise RuntimeError('Unable to tweet about unreleased version') if self.tweet_id: return # because we already posted a tweet ch = self.changelog image_url = settings.BASE_URL + ch.get_absolute_url() \ + '?snap=1&version=' + self.number filename = sha1(image_url).hexdigest() + '.png' full_path = os.path.join(settings.SNAPSHOTS_ROOT, filename) result = envoy.run( '{root}/makescreenshot --width 590 --height 600 {url} {path}'.format( root=settings.PROJECT_ROOT, url=image_url, path=full_path)) if result.status_code != 0: with log.fields( status_code=result.status_code, std_out=result.std_out, std_err=result.std_err): log.error('Unable to make a screenshot') raise RuntimeError('Unable to make a screenshot') with open(full_path, 'rb') as f: from requests_oauthlib import OAuth1 auth = OAuth1(*settings.TWITTER_CREDS) response = requests.post( 'https://upload.twitter.com/1.1/media/upload.json', auth=auth, files={'media': ('screenshot.png', f.read(), 'image/png')}) media_id = response.json()['media_id_string'] url = settings.BASE_URL + self.get_absolute_url() text = '{number} of {namespace}/{name} was released: {url} #{namespace} #{name} #release'.format( number=self.number, namespace=ch.namespace, name=ch.name, url=url) response = requests.post( 'https://api.twitter.com/1.1/statuses/update.json', auth=auth, data={'status': text, 'media_ids': media_id}) if response.status_code == 200: self.tweet_id = response.json()['id_str'] self.save(update_fields=('tweet_id',)) return full_path def set_tag(self, user, name): """Convenience method to set tag on just this version. """ self.changelog.set_tag(user, name, self.number) def associate_with_free_tags(self): # associate free tags with this version for tag in self.changelog.tags.filter(version_number=self.number): tag.version = self tag.save(update_fields=('version',)) class Tag(models.Model): # this field shouldn't be blank or null # but I have to make it so, because otherwise # DB migrations wasn't possible changelog = models.ForeignKey(Changelog, blank=True, null=True, related_name='tags') # tag may be tied to a version in the database, # but in some cases, we may don't have parsed version # with given number version = models.ForeignKey(Version, blank=True, null=True, related_name='tags') user = models.ForeignKey(User, related_name='tags') # regex=ur'[a-z][a-z0-9-]*[a-z0-9]' name = models.CharField(max_length=40) # we have not any restrictions on the format of this field # this could be any string even something like 'latest' version_number = models.CharField(max_length=40) created_at = models.DateTimeField(auto_now_add=True) class Meta: unique_together = ('changelog', 'user', 'name') def get_absolute_url(self): # the name shouldn't contain any unicode or nonascii letters nor spaces # otherwise, we need to encode tu utf-8 and quote_plus it. return self.changelog.get_absolute_url() + '#' + self.name __repr__ = make_repr('name', 'version_number') class FeedItem(models.Model): user = models.ForeignKey(User) version = models.ForeignKey(Version, related_name='feed_items') created_at = models.DateTimeField(auto_now_add=True) ACTIVE_USER_ACTIONS = ( u'landing-digest-view', u'landing-track', u'landing-ignore', u'login', u'profile-update', u'digest-view', u'package-view', u'package-create', u'package-edit', u'edit-digest-view', u'index-view', u'track', u'untrack', u'untrack-allmychanges', u'create-issue', u'email-digest-open', u'email-digest-click') class UserHistoryLog(models.Model): user = models.ForeignKey(settings.AUTH_USER_MODEL, related_name='history_log', blank=True, null=True) light_user = models.CharField(max_length=40) action = models.CharField(max_length=40) description = models.CharField(max_length=1000) created_at = models.DateTimeField(auto_now_add=True) @staticmethod def merge(user, light_user): entries = UserHistoryLog.objects.filter(user=None, light_user=light_user) if entries.count() > 0: with log.fields(username=user.username, num_entries=entries.count(), light_user=light_user): log.info('Merging user history logs') entries.update(user=user) @staticmethod def write(user, light_user, action, description): user = user if user is not None and user.is_authenticated() else None return UserHistoryLog.objects.create(user=user, light_user=light_user, action=action, description=description) class UserStateHistory(models.Model): user = models.ForeignKey(settings.AUTH_USER_MODEL, related_name='state_history') date = models.DateField() state = models.CharField(max_length=40) class DeploymentHistory(models.Model): hash = models.CharField(max_length=32, default='') description = models.TextField() deployed_at = models.DateTimeField(auto_now_add=True) __repr__ = make_repr('deployed_at', 'hash') class EmailVerificationCode(models.Model): user = models.OneToOneField(settings.AUTH_USER_MODEL, related_name='email_verification_code') hash = models.CharField(max_length=32, default='') deployed_at = models.DateTimeField(auto_now_add=True) @staticmethod def new_code_for(user): hash = md5(str(time.time()) + settings.SECRET_KEY).hexdigest() try: code = user.email_verification_code code.hash = hash code.save() except EmailVerificationCode.DoesNotExist: code = EmailVerificationCode.objects.create( user=user, hash=hash) return code AUTOCOMPLETE_TYPES = ( ('source', 'Source URL'), ('namespace', 'Namespace'), ('package', 'Package')) AUTOCOMPLETE_ORIGINS = ( ('app-store', 'App Store'), ('pypi', 'PyPi')) COMMON_WORDS = set('a,able,about,across,after,all,almost,also,am,among,an,and,any,are,as,at,be,because,been,but,by,can,cannot,could,dear,did,do,does,either,else,ever,every,for,from,get,got,had,has,have,he,her,hers,him,his,how,however,i,if,in,into,is,it,its,just,least,let,like,likely,may,me,might,most,must,my,neither,no,nor,not,of,off,often,on,only,or,other,our,own,rather,said,say,says,she,should,since,so,some,than,that,the,their,them,then,there,these,they,this,tis,to,too,twas,us,wants,was,we,were,what,when,where,which,while,who,whom,why,will,with,would,yet,you,your'.split(',')) class AutocompleteData(models.Model): origin = models.CharField(max_length=100, choices=AUTOCOMPLETE_ORIGINS) title = models.CharField(max_length=255) description = models.CharField(max_length=DESCRIPTION_LENGTH, default='') type = models.CharField(max_length=10, choices=AUTOCOMPLETE_TYPES) source = models.CharField(max_length=255, # we need this because MySQL will output warning and break our migrations for greater length blank=True, null=True, db_index=True) icon = models.CharField(max_length=255, blank=True, null=True) changelog = models.ForeignKey(Changelog, blank=True, null=True, related_name='autocomplete') score = models.IntegerField(default=0, help_text=('A value from 0 to infinity. ' 'Items with bigger values ' 'should appear at the top ' 'of the suggest.')) __repr__ = make_repr('title') def save(self, *args, **kwargs): super(AutocompleteData, self).save(*args, **kwargs) if self.words.count() == 0: self.add_words() def add_words(self, db_name='default'): if db_name == 'default': data = self else: data = AutocompleteData.objects.using(db_name).get(pk=self.pk) words = data.title.split() words = (word.strip() for word in words) words = set(word.lower() for word in words if len(word) > 3) words -= COMMON_WORDS words.add(data.title.lower()) words = [AutocompleteWord2.objects.using(db_name).get_or_create(word=word)[0] for word in words] data.words2.add(*words) class AutocompleteWord(models.Model): word = models.CharField(max_length=100, db_index=True) data = models.ForeignKey(AutocompleteData, related_name='words') __repr__ = make_repr('word') class AutocompleteWord2(models.Model): word = models.CharField(max_length=100, unique=True) data_objects = models.ManyToManyField( AutocompleteData, related_name='words2') __repr__ = make_repr('word') class AppStoreBatch(models.Model): """To identify separate processing batches. """ created = models.DateTimeField(auto_now_add=True) __repr__ = make_repr() class AppStoreUrl(models.Model): """This model is used when we are fetching data from app store for our autocomplete. Use management command update_appstore_urls to populate this collection. """ # we need this because MySQL will output warning and break our migrations for greater length source = models.CharField(max_length=255, blank=True, null=True, unique=True) autocomplete_data = models.OneToOneField(AutocompleteData, blank=True, null=True, related_name='appstore_url', on_delete=models.SET_NULL) batch = models.ForeignKey(AppStoreBatch, blank=True, null=True, related_name='urls', on_delete=models.SET_NULL) rating = models.FloatField(blank=True, null=True) rating_count = models.IntegerField(blank=True, null=True) __repr__ = make_repr('source') class MandrillMessage(models.Model): mid = models.CharField(max_length=32, help_text='Mandrills ID', db_index=True) timestamp = models.IntegerField() email = models.EmailField() user = models.ForeignKey(settings.AUTH_USER_MODEL, related_name='mandrill_messages', on_delete=models.SET_NULL, blank=True, null=True) payload = models.TextField() __repr__ = make_repr('mid', 'email')<|fim▁end|>
date_joined = models.DateTimeField('date joined', default=timezone.now)
<|file_name|>binop.rs<|end_file_name|><|fim▁begin|>use std::ops::{Add, Sub, Mul, Div}; /// Binary numeric operators #[derive(Copy, Clone, PartialEq, Eq, Debug)] pub enum BinOp { /// a + b Add, /// a - b Sub, /// b - a SubSwap, /// a * b Mul, /// a / b Div, /// b / a DivSwap, } impl BinOp { /// a `op` b pub fn eval<A, B, C>(&self, a: A, b: B) -> C where A: Add<B, Output=C>, A: Sub<B, Output=C>, B: Sub<A, Output=C>, A: Mul<B, Output=C>, A: Div<B, Output=C>, B: Div<A, Output=C> { match *self { BinOp::Add => a + b, BinOp::Sub => a - b, BinOp::SubSwap => b - a, BinOp::Mul => a * b, BinOp::Div => a / b, BinOp::DivSwap => b / a, } } /// (a `op` b) == (b `op.swap()` a) pub fn swap(&self) -> BinOp { match *self { BinOp::Add => BinOp::Add, BinOp::Sub => BinOp::SubSwap, BinOp::SubSwap => BinOp::Sub, BinOp::Mul => BinOp::Mul, BinOp::Div => BinOp::DivSwap, BinOp::DivSwap => BinOp::Div, } } /// ((a `op` b) `op.invert()` b) == a pub fn invert(&self) -> BinOp { match *self { BinOp::Add => BinOp::Sub, BinOp::Sub => BinOp::Add,<|fim▁hole|> } } }<|fim▁end|>
BinOp::SubSwap => BinOp::SubSwap, BinOp::Mul => BinOp::Div, BinOp::Div => BinOp::Mul, BinOp::DivSwap => BinOp::DivSwap,
<|file_name|>TestValues.cpp<|end_file_name|><|fim▁begin|>// TestValues.cpp : Test XML encoding and decoding of XmlRpcValues. #include <stdlib.h> #include "XmlRpcValue.h" #include <assert.h> #include <iostream> using namespace XmlRpc; void testBoolean() { XmlRpcValue booleanFalse(false); XmlRpcValue booleanTrue(true); int offset = 0; XmlRpcValue booleanFalseXml("<value><boolean>0</boolean></value>", &offset); offset = 0; XmlRpcValue booleanTrueXml("<value><boolean>1</boolean></value>", &offset); assert(booleanFalse != booleanTrue); assert(booleanFalse == booleanFalseXml); assert(booleanFalse != booleanTrueXml); if (bool(booleanFalse)) assert(false); if ( ! bool(booleanTrue)) assert(false); } // Int void testInt() { XmlRpcValue int0(0); XmlRpcValue int1(1); XmlRpcValue int10(10); XmlRpcValue int_1(-1); int offset = 0; XmlRpcValue int0Xml("<value><int>0</int></value>", &offset); offset = 0; XmlRpcValue int9Xml("<value><i4>9</i4></value>", &offset); assert(int0 == int0Xml); assert(int(int10) - int(int1) == int(int9Xml)); assert(9 == int(int9Xml)); assert(int(int10) + int(int_1) == int(int9Xml)); } void testDouble() { // Double XmlRpcValue d(43.7); int offset = 0; XmlRpcValue dXml("<value><double>56.3</double></value>", &offset); assert(double(d) + double(dXml) == 100.0); // questionable practice... } void testString() { // String XmlRpcValue s("Now is the time <&"); char csxml[] = "<value><string>Now is the time &lt;&amp;</string></value>"; std::string ssxml = csxml; int offset = 0; XmlRpcValue vscXml(csxml, &offset); offset = 0; XmlRpcValue vssXml(ssxml, &offset); assert(s == vscXml); assert(s == vssXml); offset = 0; XmlRpcValue fromXml(vssXml.toXml(), &offset); assert(s == fromXml); // Empty or blank strings with no <string> tags std::string emptyStringXml("<value></value>"); offset = 0; XmlRpcValue emptyStringVal1(emptyStringXml, &offset); XmlRpcValue emptyStringVal2(""); assert(emptyStringVal1 == emptyStringVal2); emptyStringXml = "<value> </value>"; offset = 0; XmlRpcValue blankStringVal(emptyStringXml, &offset); assert(std::string(blankStringVal) == " "); } void testDateTime() { // DateTime int offset = 0; XmlRpcValue dateTime("<value><dateTime.iso8601>19040101T03:12:35</dateTime.iso8601></value>", &offset); struct tm &t = dateTime; assert(t.tm_year == 1904 && t.tm_min == 12); } void testArray(XmlRpcValue const& d) { // Array XmlRpcValue a; a.setSize(4); a[0] = 1; a[1] = std::string("two"); a[2] = 43.7; a[3] = "four"; assert(int(a[0]) == 1); assert(a[2] == d); char csaXml[] = "<value><array>\n" " <data>\n" " <value><i4>1</i4></value> \n" " <value> <string>two</string></value>\n" " <value><double>43.7</double></value>\n" " <value>four</value>\n" " </data>\n"<|fim▁hole|> "</array></value>"; int offset = 0; XmlRpcValue aXml(csaXml, &offset); assert(a == aXml); } void testStruct() { // Struct XmlRpcValue struct1; struct1["i4"] = 1; struct1["str"] = "two"; struct1["d"] = 43.7; XmlRpcValue a; a.setSize(4); a[0] = 1; a[1] = std::string("two"); a[2] = 43.7; a[3] = "four"; assert(struct1["d"] == a[2]); char csStructXml[] = "<value><struct>\n" " <member>\n" " <name>i4</name> \n" " <value><i4>1</i4></value> \n" " </member>\n" " <member>\n" " <name>d</name> \n" " <value><double>43.7</double></value>\n" " </member>\n" " <member>\n" " <name>str</name> \n" " <value> <string>two</string></value>\n" " </member>\n" "</struct></value>"; int offset = 0; XmlRpcValue structXml(csStructXml, &offset); assert(struct1 == structXml); XmlRpcValue astruct; astruct["array"] = a; assert(astruct["array"][2] == struct1["d"]); for (int i=0; i<10; i++) { XmlRpcValue Event; Event["Name"] = "string"; Event.clear(); const int NELMTS = 100; int ii; for (ii=0; ii< NELMTS; ++ii) { char buf[40]; sprintf(buf,"%d", ii); Event[std::string(buf)] = buf; } Event.clear(); for (ii=0; ii< NELMTS; ++ii) { char buf[40]; sprintf(buf,"%d", ii); if (ii != NELMTS/2) Event[std::string(buf)] = ii; else for (int jj=0; jj< NELMTS; ++jj) { char bufj[40]; sprintf(bufj,"%d", jj); Event[std::string(buf)][std::string(bufj)] = bufj; } } for (ii=0; ii< NELMTS; ++ii) { char buf[40]; sprintf(buf,"%d", ii); if (ii != NELMTS/2) assert(Event[std::string(buf)] == XmlRpcValue(ii)); else assert(Event[std::string(buf)].size() == NELMTS); } } } int main(int argc, char* argv[]) { testBoolean(); testInt(); testDouble(); testString(); testDateTime(); testArray(43.7); testStruct(); return 0; }<|fim▁end|>
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>import logging from mimeprovider.documenttype import get_default_document_types from mimeprovider.client import get_default_client from mimeprovider.exceptions import MimeException from mimeprovider.exceptions import MimeBadRequest from mimeprovider.mimerenderer import MimeRenderer from mimeprovider.validators import get_default_validator __all__ = ["MimeProvider"] __version__ = "0.1.5" log = logging.getLogger(__name__) def build_json_ref(request): def json_ref(route, document=None, **kw): ref = dict() ref["$ref"] = request.route_path(route, **kw) rel_default = None if document: rel_default = getattr(document, "object_type", document.__class__.__name__) else: rel_default = route ref["rel"] = kw.pop("rel_", rel_default) return ref return json_ref class MimeProvider(object): def __init__(self, documents=[], **kw): self.renderer_name = kw.get("renderer_name", "mime") self.attribute_name = kw.get("attribute_name", "mime_body") self.error_handler = kw.get("error_handler", None) self.set_default_renderer = kw.get("set_default_renderer", False) self.validator = kw.get("validator") if self.validator is None: self.validator = get_default_validator() types = kw.get("types") if types is None: types = get_default_document_types() if not types: raise ValueError("No document types specified") self.client = kw.get("client") if self.client is None: self.client = get_default_client() self.type_instances = [t() for t in types] self.mimeobjects = dict() self.mimetypes = dict(self._generate_base_mimetypes()) self.error_document_type = kw.get( "error_document_type", self.type_instances[0]) <|fim▁hole|> raise ValueError( ("Object does not have required 'object_type' " "attribute {0!r}").format(document)) def _generate_base_mimetypes(self): """ Generate the base mimetypes as described by non customized document types. """ for t in self.type_instances: if t.custom_mime: continue yield t.mime, (t, None, None) def _generate_document_mimetypes(self, documents): for t in self.type_instances: if not t.custom_mime: continue for o in documents: mimetype = t.mime.format(o=o) validator = None if hasattr(o, "schema"): validator = self.validator(o.schema) m_value = (mimetype, (t, o, validator)) o_value = (o, (t, mimetype, validator)) yield m_value, o_value def register(self, *documents): documents = list(documents) for document in documents: self._validate(document) generator = self._generate_document_mimetypes(documents) for (m, m_value), (o, o_value) in generator: self.mimeobjects.setdefault(o, []).append(o_value) if m not in self.mimetypes: self.mimetypes[m] = m_value continue _, cls, validator = self.mimetypes[m] _, new_cls, validator = m_value raise ValueError( "Conflicting handler for {0}, {1} and {2}".format( m, cls, new_cls)) def get_client(self, *args, **kw): return self.client(self.mimetypes, self.mimeobjects, *args, **kw) def get_mime_body(self, request): if not request.body or not request.content_type: return None result = self.mimetypes.get(request.content_type) if result is None: raise MimeBadRequest( "Unsupported Content-Type: " + request.content_type) document_type, cls, validator = result # the specific document does not support deserialization. if not hasattr(cls, "from_data"): raise MimeBadRequest( "Unsupported Content-Type: " + request.content_type) return document_type.parse(validator, cls, request.body) @property def renderer(self): if self.error_handler is None: raise ValueError("No 'error_handler' available") def setup_renderer(helper): return MimeRenderer(self.mimetypes, self.error_document_type, self.error_handler, validator=self.validator) return setup_renderer def add_config(self, config): config.add_renderer(self.renderer_name, self.renderer) if self.set_default_renderer: config.add_renderer(None, self.renderer) config.set_request_property(self.get_mime_body, self.attribute_name, reify=True) config.set_request_property(build_json_ref, "json_ref", reify=True) config.add_view(self.error_handler, context=MimeException, renderer=self.renderer_name)<|fim▁end|>
self.register(*documents) def _validate(self, document): if not hasattr(document, "object_type"):
<|file_name|>TimerPage.js<|end_file_name|><|fim▁begin|>var React = require('react'); var Animation = require('react-addons-css-transition-group'); var NBButton = require('./NBItems').NBButton; var NBTitle = require('./NBItems').NBTitle; var NBEmpty = require('./NBItems').NBEmpty; var Navbar = React.createClass({ render: function() { return ( <div className="navbar"> <div className="navbar-inner"> <NBButton path="/" icon="icon-back" position="left"/> <NBTitle text="Settings"/> <NBEmpty position="right"/> </div> </div> ) } }); var Button = React.createClass({ render: function() { return ( <div> </div> ) } });<|fim▁hole|> }, render: function() { return ( <div className="page"> <div className="page-content"> <Navbar/> <Animation transitionName={{ appear: "slideLeft-enter", leave: "slideLeft-leave" }} transitionEnterTimeout={1000} transitionLeaveTimeout={500} transitionAppearTimeout={500} transitionAppear={true} transitionLeave={true}> <div className="content-block"> Content some text alalala ashdihaish uasodj iioash iodhias </div> </Animation> </div> </div> ) } }); module.exports = PageContent;<|fim▁end|>
var PageContent = React.createClass({ componentWillMount: function() {
<|file_name|>interface.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The html5ever Project Developers. See the // COPYRIGHT file at the top-level directory of this distribution. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use tokenizer::states; use std::borrow::Cow; use string_cache::{Atom, QualName}; use tendril::StrTendril; pub use self::TagKind::{StartTag, EndTag}; pub use self::Token::{DoctypeToken, TagToken, CommentToken, CharacterTokens}; pub use self::Token::{NullCharacterToken, EOFToken, ParseError}; /// A `DOCTYPE` token. // FIXME: already exists in Servo DOM #[derive(PartialEq, Eq, Clone, Debug)] pub struct Doctype { pub name: Option<StrTendril>, pub public_id: Option<StrTendril>, pub system_id: Option<StrTendril>, pub force_quirks: bool, } impl Doctype { pub fn new() -> Doctype { Doctype { name: None, public_id: None, system_id: None,<|fim▁hole|> } } /// A tag attribute. /// /// The namespace on the attribute name is almost always ns!(""). /// The tokenizer creates all attributes this way, but the tree /// builder will adjust certain attribute names inside foreign /// content (MathML, SVG). #[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Debug)] pub struct Attribute { pub name: QualName, pub value: StrTendril, } #[derive(PartialEq, Eq, Hash, Copy, Clone, Debug)] pub enum TagKind { StartTag, EndTag, } /// A tag token. #[derive(PartialEq, Eq, Clone, Debug)] pub struct Tag { pub kind: TagKind, pub name: Atom, pub self_closing: bool, pub attrs: Vec<Attribute>, } impl Tag { /// Are the tags equivalent when we don't care about attribute order? /// Also ignores the self-closing flag. pub fn equiv_modulo_attr_order(&self, other: &Tag) -> bool { if (self.kind != other.kind) || (self.name != other.name) { return false; } let mut self_attrs = self.attrs.clone(); let mut other_attrs = other.attrs.clone(); self_attrs.sort(); other_attrs.sort(); self_attrs == other_attrs } } #[derive(PartialEq, Eq, Debug)] pub enum Token { DoctypeToken(Doctype), TagToken(Tag), CommentToken(StrTendril), CharacterTokens(StrTendril), NullCharacterToken, EOFToken, ParseError(Cow<'static, str>), } // FIXME: rust-lang/rust#22629 unsafe impl Send for Token { } /// Types which can receive tokens from the tokenizer. pub trait TokenSink { /// Process a token. fn process_token(&mut self, token: Token); /// Used in the markup declaration open state. By default, this always /// returns false and thus all CDATA sections are tokenized as bogus /// comments. /// https://html.spec.whatwg.org/multipage/#markup-declaration-open-state fn adjusted_current_node_present_but_not_in_html_namespace(&self) -> bool { false } /// The tokenizer will call this after emitting any tag. /// This allows the tree builder to change the tokenizer's state. /// By default no state changes occur. fn query_state_change(&mut self) -> Option<states::State> { None } }<|fim▁end|>
force_quirks: false, }
<|file_name|>config.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- import os, argparse from flask import Flask parser = argparse.ArgumentParser() #настройка аргументов принимаемых с консоли parser.add_argument("--port", default='7000', type=int, help='Port to listen'), parser.add_argument("--hash-algo", default='sha1', type=str, help='Hashing algorithm to use'), parser.add_argument("--content-dir", default='UPLOADS', type=str, help='Enable folder to upload'), parser.add_argument("--secret", default='d41d8cd98f00b204e9800998ecf8427e', type=str, help='secret key'), args = parser.parse_args() port = args.port #обработка параметров получаемых с консоли hash_algo = args.hash_algo content_dir = args.content_dir secret = args.secret BASE_DIR = os.path.abspath('.') if os.path.exists(os.path.join(BASE_DIR, content_dir)) == False: #если нету папки 'UPLOADS', в которой будут храниться все загрузки, создаем ее os.mkdir(content_dir) app = Flask(__name__)<|fim▁hole|><|fim▁end|>
app.config['UPLOAD_FOLDER'] = os.path.join(BASE_DIR, content_dir) app.config['MAX_CONTENT_LENGTH'] = 16 * 1024 * 1024 #Максимальный размер загружаемых файлов (16 mb)
<|file_name|>GeneralSettings.tsx<|end_file_name|><|fim▁begin|>import React, { useState } from 'react'; import { connect, ConnectedProps } from 'react-redux'; import { TimeZone } from '@grafana/data'; import { CollapsableSection, Field, Input, RadioButtonGroup, TagsInput } from '@grafana/ui'; import { selectors } from '@grafana/e2e-selectors'; import { FolderPicker } from 'app/core/components/Select/FolderPicker'; import { DashboardModel } from '../../state/DashboardModel'; import { DeleteDashboardButton } from '../DeleteDashboard/DeleteDashboardButton'; import { TimePickerSettings } from './TimePickerSettings'; import { updateTimeZoneDashboard, updateWeekStartDashboard } from 'app/features/dashboard/state/actions'; import { PreviewSettings } from './PreviewSettings'; import { config } from '@grafana/runtime'; interface OwnProps { dashboard: DashboardModel; } export type Props = OwnProps & ConnectedProps<typeof connector>; const GRAPH_TOOLTIP_OPTIONS = [ { value: 0, label: 'Default' }, { value: 1, label: 'Shared crosshair' }, { value: 2, label: 'Shared Tooltip' }, ]; export function GeneralSettingsUnconnected({ dashboard, updateTimeZone, updateWeekStart }: Props): JSX.Element { const [renderCounter, setRenderCounter] = useState(0); const onFolderChange = (folder: { id: number; title: string }) => { dashboard.meta.folderId = folder.id; dashboard.meta.folderTitle = folder.title; dashboard.meta.hasUnsavedFolderChange = true; }; const onBlur = (event: React.FocusEvent<HTMLInputElement>) => { dashboard[event.currentTarget.name as 'title' | 'description'] = event.currentTarget.value; }; const onTooltipChange = (graphTooltip: number) => { dashboard.graphTooltip = graphTooltip; setRenderCounter(renderCounter + 1); }; const onRefreshIntervalChange = (intervals: string[]) => { dashboard.timepicker.refresh_intervals = intervals.filter((i) => i.trim() !== ''); }; const onNowDelayChange = (nowDelay: string) => { dashboard.timepicker.nowDelay = nowDelay; }; const onHideTimePickerChange = (hide: boolean) => { dashboard.timepicker.hidden = hide; setRenderCounter(renderCounter + 1); }; const onLiveNowChange = (v: boolean) => { dashboard.liveNow = v; setRenderCounter(renderCounter + 1); }; const onTimeZoneChange = (timeZone: TimeZone) => { dashboard.timezone = timeZone; setRenderCounter(renderCounter + 1); updateTimeZone(timeZone); }; const onWeekStartChange = (weekStart: string) => { dashboard.weekStart = weekStart; setRenderCounter(renderCounter + 1); updateWeekStart(weekStart); }; const onTagsChange = (tags: string[]) => { dashboard.tags = tags; setRenderCounter(renderCounter + 1); }; const onEditableChange = (value: boolean) => { dashboard.editable = value; setRenderCounter(renderCounter + 1); }; const editableOptions = [ { label: 'Editable', value: true }, { label: 'Read-only', value: false }, ]; return ( <div style={{ maxWidth: '600px' }}> <h3 className="dashboard-settings__header" aria-label={selectors.pages.Dashboard.Settings.General.title}> General </h3> <div className="gf-form-group"> <Field label="Name"> <Input id="title-input" name="title" onBlur={onBlur} defaultValue={dashboard.title} /> </Field> <Field label="Description"> <Input id="description-input" name="description" onBlur={onBlur} defaultValue={dashboard.description} /> </Field> <Field label="Tags"> <TagsInput id="tags-input" tags={dashboard.tags} onChange={onTagsChange} /> </Field> <Field label="Folder"> <FolderPicker inputId="dashboard-folder-input" initialTitle={dashboard.meta.folderTitle} initialFolderId={dashboard.meta.folderId} onChange={onFolderChange} enableCreateNew={true} dashboardId={dashboard.id} skipInitialLoad={true} /> </Field> <Field label="Editable" description="Set to read-only to disable all editing. Reload the dashboard for changes to take effect" > <RadioButtonGroup value={dashboard.editable} options={editableOptions} onChange={onEditableChange} /> </Field> </div> {config.featureToggles.dashboardPreviews && config.featureToggles.dashboardPreviewsAdmin && ( <PreviewSettings uid={dashboard.uid} /> )} <TimePickerSettings onTimeZoneChange={onTimeZoneChange} onWeekStartChange={onWeekStartChange} onRefreshIntervalChange={onRefreshIntervalChange} onNowDelayChange={onNowDelayChange} onHideTimePickerChange={onHideTimePickerChange} onLiveNowChange={onLiveNowChange} refreshIntervals={dashboard.timepicker.refresh_intervals} timePickerHidden={dashboard.timepicker.hidden} nowDelay={dashboard.timepicker.nowDelay} timezone={dashboard.timezone} weekStart={dashboard.weekStart} liveNow={dashboard.liveNow} /> <CollapsableSection label="Panel options" isOpen={true}> <Field label="Graph tooltip" description="Controls tooltip and hover highlight behavior across different panels" > <RadioButtonGroup onChange={onTooltipChange} options={GRAPH_TOOLTIP_OPTIONS} value={dashboard.graphTooltip} /> </Field> </CollapsableSection> <div className="gf-form-button-row"> {dashboard.meta.canDelete && <DeleteDashboardButton dashboard={dashboard} />} </div> </div> ); } const mapDispatchToProps = {<|fim▁hole|> updateWeekStart: updateWeekStartDashboard, }; const connector = connect(null, mapDispatchToProps); export const GeneralSettings = connector(GeneralSettingsUnconnected);<|fim▁end|>
updateTimeZone: updateTimeZoneDashboard,
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>extern crate rustc_serialize; use rustc_serialize::json; #[derive(RustcDecodable, RustcEncodable)] struct Command { cid: u64, uid: u32, target: String, } fn get_string(cmd: &Command) -> String {<|fim▁hole|> let bs = Command{ cid: 0, uid: 1, target: "something".to_string() }; println!("So guess what?: {}", get_string(&bs)); } #[test] fn some_test() { assert_eq!( get_string( &Command{cid: 0, uid: 1, target: "cat".to_string()} ), "{\"cid\":0,\"uid\":1,\"target\":\"cat\"}"); } #[test] #[should_panic] fn some_fail() { assert!(false); }<|fim▁end|>
return json::encode(&cmd).unwrap(); } fn main(){
<|file_name|>color.py<|end_file_name|><|fim▁begin|>#Written by: Karim shoair - D4Vinci ( Dr0p1t-Framework ) import sys from os.path import * global G, Y, B, R, W , M , C , end def set_colors(): global G, Y, B, R, W , M , C , end if sys.platform.startswith('win'): # Windows deserve coloring too :D try: import win_unicode_console , colorama win_unicode_console.enable() colorama.init() #Now the unicode will work ^_^ G = '\033[92m' # green Y = '\033[93m' # yellow B = '\033[94m' # blue R = '\033[91m' # red W = '\033[0m' # white M = '\x1b[35m' # magenta C = '\x1b[36m' # cyan end = '\33[97m' except: #print("[!] Error: Coloring libraries not installed ,no coloring will be used [Check the readme]") G = Y = B = R = W = G = Y = B = R = W = '' else: G = '\033[92m' # green Y = '\033[93m' # yellow B = '\033[94m' # blue R = '\033[91m' # red W = '\033[0m' # white M = '\x1b[35m' # magenta C = '\x1b[36m' # cyan end = '\33[97m' set_colors() # Console Colors def colored_print(text,color): global G, Y, B, R, W , M , C , end def cprint(text,color,end=end ): print(color+text+end) if color.lower()=="g":color=G elif color.lower()=="y":color=Y elif color.lower()=="b":color=B elif color.lower()=="r":color=R elif color.lower()=="w":color=W elif color.lower()=="m":color=M elif color.lower()=="c":color=C cprint( text, color, end ) def print_banner(banner,info,c1,c2): global G, Y, B, R, W , M , C , end end = '\33[97m' def cprint(text,info,c1,c2): print(c1+text+end) print(c2+info+end) cprint( banner,info,c1,c2 ) def warn(): global G, Y, B, R, W , M , C , end return '''{} # Disclaimer Alert #{} Dr0p1t Framework not responsible for misuse or illegal purposes. {} Use it only for {}work{} or {} educational purpose {} !!!'''.format(R,B,Y,R,Y,R,W) def print_status(args): global G, Y, B, R, W , M , C , end set_colors() # because of some non logical error on some users devices :3 if args.s: c1,a = G," Loaded " else: c1,a = R,"Unloaded" if args.t: c2,b = G," Loaded " else: c2,b = R,"Unloaded" if args.k: c3,c = G," Loaded " else: c3,c = R,"Unloaded" if args.b: c4,d = G," Loaded " cx1,bat = M,args.b else: c4,d = R,"Unloaded" cx1,bat = Y,"None" if args.p: c5,e = G," Loaded " cx2,ps1 = M,args.p else: c5,e = R,"Unloaded" cx2,ps1 = Y,"None" if args.v: c6,f = G," Loaded "<|fim▁hole|> cx3,vbs = M,args.v else: c6,f = R,"Unloaded" cx3,vbs = Y,"None" if args.upx: c7,g = G," Loaded " else: c7,g = R,"Unloaded" if args.nouac: c8,h = G," Loaded " else: c8,h = R,"Unloaded" if args.a: c9,i = G," Loaded " else: c9,i = R,"Unloaded" if args.runas: c10,j = G," Loaded " else: c10,j = R,"Unloaded" if args.spoof: c11,k = G," Loaded " cx4,ext=M,args.spoof else: c11,k = R,"Unloaded" cx4,ext=Y,"None" if args.i: c12,l = G," Loaded " cx5,ico=M,args.i else: c12,l = R,"Unloaded" cx5,ico=Y,args.i print("\n"+Y+"[+] "+W+"Malware url : "+B+"%s"%args.url+W+ "\n"+Y+"\n[+] "+W+"Modules :"+ "\n\tStartup persistence\t: "+c1+"[%s]"%a+W+ "\n\tTask persistence\t: "+c2+"[%s]"%b+W+ "\n\tPowershell persistence\t: "+c9+"[%s]"%i+W+ #jklmn "\n\tKill antivirus\t\t: "+c3+"[%s]"%c+W+ "\n\tDisable UAC\t\t: "+c8+"[%s]"%h+W+ "\n\tRun as admin\t\t: "+c10+"[%s]"%j+W+ "\n\tCompress with UPX\t: "+c7+"[%s]"%g+W+ "\n"+Y+"\n[+] "+W+"Scripts :"+ "\n\tBAT file : "+cx1+"%s"%bat+W+ "\n\tPS1 file : "+cx2+"%s"%ps1+W+ "\n\tVBS file : "+cx3+"%s"%vbs+W+"\n"+ "\n"+Y+"\n[+] "+W+"Spoofing :"+ "\n\tIcon spoof \t: "+cx5+"%s"%ico+W+ "\n\tExtension spoof : "+cx4+"%s"%ext+W+"\n" )<|fim▁end|>