prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>protobuf-bin-gen-rust.rs<|end_file_name|><|fim▁begin|>#![crate_type = "bin"]
#![feature(globs)]
extern crate protobuf;
extern crate getopts;
use std::io::fs::*;
use std::io::Reader;
use std::io::Writer;
use std::path::Path;
use std::os;
use protobuf::parse_from_reader;
use protobuf::descriptor::*;
use protobuf::codegen::*;
fn write_file(bin: &str, gen_options: &GenOptions) {
let mut is = File::open(&Path::new(bin)).unwrap();
let fds = parse_from_reader::<FileDescriptorSet>(&mut is as &mut Reader).unwrap();
let file_names: Vec<String> = fds.get_file().iter()
.map(|f| f.get_name().to_string())
.collect();
let results = gen(fds.get_file(), file_names.as_slice(), gen_options);
for r in results.iter() {
let mut file_writer = File::create(&Path::new(r.name.as_slice())).unwrap();
file_writer.write(r.content.as_slice()).unwrap();
}
}
fn main() {
let args = os::args();
let opts = vec!();
let matches = getopts::getopts(args.tail(), opts.as_slice()).unwrap();
let pb_bin = match matches.free.as_slice() {
[ref pb_bin] => pb_bin.to_string(),<|fim▁hole|> };
let gen_options = GenOptions {
dummy: false,
};
write_file(pb_bin.as_slice(), &gen_options);
}<|fim▁end|> | _ => panic!("must have exactly one argument") |
<|file_name|>index.d.ts<|end_file_name|><|fim▁begin|>// Type definitions for Dropzone 5.0.0
// Project: http://www.dropzonejs.com/
// Definitions by: Natan Vivo <https://github.com/nvivo>, Andy Hawkins <https://github.com/a904guy/,http://a904guy.com/,http://www.bmbsqd.com>, Vasya Aksyonov <https://github.com/outring>, Simon Huber <https://github.com/renuo>, Sebastiaan de Rooij <https://github.com/Hikariii>, Ted Bicknell <https://github.com/tedbcsgpro>
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
// TypeScript Version: 2.3
/// <reference types="jquery"/>
declare namespace Dropzone {
export interface DropzoneResizeInfo {
srcX?: number;
srcY?: number;
trgX?: number;
trgY?: number;
srcWidth?: number;
srcHeight?: number;
trgWidth?: number;
trgHeight?: number;
}
export interface DropzoneFile extends File {
previewElement: HTMLElement;
previewTemplate: HTMLElement;
previewsContainer: HTMLElement;
status: string;
accepted: boolean;
xhr?: XMLHttpRequest;
}
export interface DropzoneDictFileSizeUnits {
tb?: string;
gb?: string;
mb?: string;
kb?: string;
b?: string;
}
export interface DropzoneOptions {
url?: string;
method?: string;
withCredentials?: boolean;
timeout?: number;
parallelUploads?: number;
uploadMultiple?: boolean;
chunking?: boolean;
forceChunking?: boolean;
chunkSize?: number;
parallelChunkUploads?: boolean;
retryChunks?: boolean;
retryChunksLimit?: number;
maxFilesize?: number;
paramName?: string;
createImageThumbnails?: boolean;
maxThumbnailFilesize?: number;
thumbnailWidth?: number;
thumbnailHeight?: number;
thumbnailMethod?: string;
resizeWidth?: number;
resizeHeight?: number;
resizeMimeType?: string;
resizeQuality?: number;<|fim▁hole|> maxFiles?: number;
params?: {};
headers?: {};
clickable?: boolean | string | HTMLElement | (string | HTMLElement)[];
ignoreHiddenFiles?: boolean;
acceptedFiles?: string;
renameFilename?(name: string): string;
autoProcessQueue?: boolean;
autoQueue?: boolean;
addRemoveLinks?: boolean;
previewsContainer?: boolean | string | HTMLElement;
hiddenInputContainer?: HTMLElement;
capture?: string;
dictDefaultMessage?: string;
dictFallbackMessage?: string;
dictFallbackText?: string;
dictFileTooBig?: string;
dictInvalidFileType?: string;
dictResponseError?: string;
dictCancelUpload?: string;
dictCancelUploadConfirmation?: string;
dictRemoveFile?: string;
dictRemoveFileConfirmation?: string;
dictMaxFilesExceeded?: string;
dictFileSizeUnits?: DropzoneDictFileSizeUnits;
accept?(file: DropzoneFile, done: (error?: string | Error) => void): void;
chunksUploaded?(file: DropzoneFile, done: (error?: string | Error) => void): void;
init?(): void;
forceFallback?: boolean;
fallback?(): void;
resize?(file: DropzoneFile, width?: number, height?: number, resizeMethod?: string): DropzoneResizeInfo;
drop?(e: DragEvent): void;
dragstart?(e: DragEvent): void;
dragend?(e: DragEvent): void;
dragenter?(e: DragEvent): void;
dragover?(e: DragEvent): void;
dragleave?(e: DragEvent): void;
paste?(e: DragEvent): void;
reset?(): void;
addedfile?(file: DropzoneFile): void;
addedfiles?(files: DropzoneFile[]): void;
removedfile?(file: DropzoneFile): void;
thumbnail?(file: DropzoneFile, dataUrl: string): void;
error?(file: DropzoneFile, message: string | Error, xhr: XMLHttpRequest): void;
errormultiple?(files: DropzoneFile[], message: string | Error, xhr: XMLHttpRequest): void;
processing?(file: DropzoneFile): void;
processingmultiple?(files: DropzoneFile[]): void;
uploadprogress?(file: DropzoneFile, progress: number, bytesSent: number): void;
totaluploadprogress?(totalProgress: number, totalBytes: number, totalBytesSent: number): void;
sending?(file: DropzoneFile, xhr: XMLHttpRequest, formData: FormData): void;
sendingmultiple?(files: DropzoneFile[], xhr: XMLHttpRequest, formData: FormData): void;
success?(file: DropzoneFile, response: Object | string): void;
successmultiple?(files: DropzoneFile[], responseText: string): void;
canceled?(file: DropzoneFile): void;
canceledmultiple?(file: DropzoneFile[]): void;
complete?(file: DropzoneFile): void;
completemultiple?(file: DropzoneFile[]): void;
maxfilesexceeded?(file: DropzoneFile): void;
maxfilesreached?(files: DropzoneFile[]): void;
queuecomplete?(): void;
previewTemplate?: string;
}
}
declare class Dropzone {
constructor(container: string | HTMLElement, options?: Dropzone.DropzoneOptions);
static autoDiscover: boolean;
static options: any;
static confirm: (question: string, accepted: () => void, rejected?: () => void) => void;
static createElement(string: string): HTMLElement;
static isBrowserSupported(): boolean;
static instances: Dropzone[];
static ADDED: string;
static QUEUED: string;
static ACCEPTED: string;
static UPLOADING: string;
static PROCESSING: string;
static CANCELED: string;
static ERROR: string;
static SUCCESS: string;
files: Dropzone.DropzoneFile[];
defaultOptions: Dropzone.DropzoneOptions;
enable(): void;
disable(): void;
destroy(): Dropzone;
addFile(file: Dropzone.DropzoneFile): void;
removeFile(file: Dropzone.DropzoneFile): void;
removeAllFiles(cancelIfNecessary?: boolean): void;
resizeImage(file: Dropzone.DropzoneFile, width?: number, height?: number, resizeMethod?: string, callback?: (...args: any[]) => void): void;
processQueue(): void;
cancelUpload(file: Dropzone.DropzoneFile): void;
createThumbnail(file: Dropzone.DropzoneFile, width?: number, height?: number, resizeMethod?: string, fixOrientation?: boolean, callback?: (...args: any[]) => void): any;
createThumbnailFromUrl(file: Dropzone.DropzoneFile, width?: number, height?: number, resizeMethod?: string, fixOrientation?: boolean, callback?: (...args: any[]) => void, crossOrigin?: string): any;
processFiles(files: Dropzone.DropzoneFile[]): void;
processFile(file: Dropzone.DropzoneFile): void;
uploadFile(file: Dropzone.DropzoneFile): void;
uploadFiles(files: Dropzone.DropzoneFile[]): void;
getAcceptedFiles(): Dropzone.DropzoneFile[];
getActiveFiles(): Dropzone.DropzoneFile[];
getAddedFiles(): Dropzone.DropzoneFile[];
getRejectedFiles(): Dropzone.DropzoneFile[];
getQueuedFiles(): Dropzone.DropzoneFile[];
getUploadingFiles(): Dropzone.DropzoneFile[];
accept(file: Dropzone.DropzoneFile, done: (error?: string | Error) => void): void;
getActiveFiles(): Dropzone.DropzoneFile[];
getFilesWithStatus(status: string): Dropzone.DropzoneFile[];
enqueueFile(file: Dropzone.DropzoneFile): void;
enqueueFiles(file: Dropzone.DropzoneFile[]): void;
createThumbnail(file: Dropzone.DropzoneFile, callback?: (...args: any[]) => void): any;
createThumbnailFromUrl(file: Dropzone.DropzoneFile, url: string, callback?: (...args: any[]) => void): any;
on(eventName: string, callback: (...args: any[]) => void): Dropzone;
off(): Dropzone;
off(eventName: string, callback?: (...args: any[]) => void): Dropzone;
emit(eventName: string, ...args: any[]): Dropzone;
on(eventName: "drop", callback: (e: DragEvent) => any): Dropzone;
on(eventName: "dragstart", callback: (e: DragEvent) => any): Dropzone;
on(eventName: "dragend", callback: (e: DragEvent) => any): Dropzone;
on(eventName: "dragenter", callback: (e: DragEvent) => any): Dropzone;
on(eventName: "dragover", callback: (e: DragEvent) => any): Dropzone;
on(eventName: "dragleave", callback: (e: DragEvent) => any): Dropzone;
on(eventName: "paste", callback: (e: DragEvent) => any): Dropzone;
on(eventName: "reset"): Dropzone;
on(eventName: "addedfile", callback: (file: Dropzone.DropzoneFile) => any): Dropzone;
on(eventName: "addedfiles", callback: (files: Dropzone.DropzoneFile[]) => any): Dropzone;
on(eventName: "removedfile", callback: (file: Dropzone.DropzoneFile) => any): Dropzone;
on(eventName: "thumbnail", callback: (file: Dropzone.DropzoneFile, dataUrl: string) => any): Dropzone;
on(eventName: "error", callback: (file: Dropzone.DropzoneFile, message: string | Error) => any): Dropzone;
on(eventName: "errormultiple", callback: (files: Dropzone.DropzoneFile[], message: string | Error) => any): Dropzone;
on(eventName: "processing", callback: (file: Dropzone.DropzoneFile) => any): Dropzone;
on(eventName: "processingmultiple", callback: (files: Dropzone.DropzoneFile[]) => any): Dropzone;
on(eventName: "uploadprogress", callback: (file: Dropzone.DropzoneFile, progress: number, bytesSent: number) => any): Dropzone;
on(eventName: "totaluploadprogress", callback: (totalProgress: number, totalBytes: number, totalBytesSent: number) => any): Dropzone;
on(eventName: "sending", callback: (file: Dropzone.DropzoneFile, xhr: XMLHttpRequest, formData: FormData) => any): Dropzone;
on(eventName: "sendingmultiple", callback: (files: Dropzone.DropzoneFile[], xhr: XMLHttpRequest, formData: FormData) => any): Dropzone;
on(eventName: "success", callback: (file: Dropzone.DropzoneFile) => any): Dropzone;
on(eventName: "successmultiple", callback: (files: Dropzone.DropzoneFile[]) => any): Dropzone;
on(eventName: "canceled", callback: (file: Dropzone.DropzoneFile) => any): Dropzone;
on(eventName: "canceledmultiple", callback: (file: Dropzone.DropzoneFile[]) => any): Dropzone;
on(eventName: "complete", callback: (file: Dropzone.DropzoneFile) => any): Dropzone;
on(eventName: "completemultiple", callback: (file: Dropzone.DropzoneFile[]) => any): Dropzone;
on(eventName: "maxfilesexceeded", callback: (file: Dropzone.DropzoneFile) => any): Dropzone;
on(eventName: "maxfilesreached", callback: (files: Dropzone.DropzoneFile[]) => any): Dropzone;
on(eventName: "queuecomplete"): Dropzone;
emit(eventName: "drop", e: DragEvent): Dropzone;
emit(eventName: "dragstart", e: DragEvent): Dropzone;
emit(eventName: "dragend", e: DragEvent): Dropzone;
emit(eventName: "dragenter", e: DragEvent): Dropzone;
emit(eventName: "dragover", e: DragEvent): Dropzone;
emit(eventName: "dragleave", e: DragEvent): Dropzone;
emit(eventName: "paste", e: DragEvent): Dropzone;
emit(eventName: "reset"): Dropzone;
emit(eventName: "addedfile", file: Dropzone.DropzoneFile): Dropzone;
emit(eventName: "addedfiles", files: Dropzone.DropzoneFile[]): Dropzone;
emit(eventName: "removedfile", file: Dropzone.DropzoneFile): Dropzone;
emit(eventName: "thumbnail", file: Dropzone.DropzoneFile, dataUrl: string): Dropzone;
emit(eventName: "error", file: Dropzone.DropzoneFile, message: string | Error): Dropzone;
emit(eventName: "errormultiple", files: Dropzone.DropzoneFile[], message: string | Error): Dropzone;
emit(eventName: "processing", file: Dropzone.DropzoneFile): Dropzone;
emit(eventName: "processingmultiple", files: Dropzone.DropzoneFile[]): Dropzone;
emit(eventName: "uploadprogress", file: Dropzone.DropzoneFile, progress: number, bytesSent: number): Dropzone;
emit(eventName: "totaluploadprogress", totalProgress: number, totalBytes: number, totalBytesSent: number): Dropzone;
emit(eventName: "sending", file: Dropzone.DropzoneFile, xhr: XMLHttpRequest, formData: FormData): Dropzone;
emit(eventName: "sendingmultiple", files: Dropzone.DropzoneFile[], xhr: XMLHttpRequest, formData: FormData): Dropzone;
emit(eventName: "success", file: Dropzone.DropzoneFile): Dropzone;
emit(eventName: "successmultiple", files: Dropzone.DropzoneFile[]): Dropzone;
emit(eventName: "canceled", file: Dropzone.DropzoneFile): Dropzone;
emit(eventName: "canceledmultiple", file: Dropzone.DropzoneFile[]): Dropzone;
emit(eventName: "complete", file: Dropzone.DropzoneFile): Dropzone;
emit(eventName: "completemultiple", file: Dropzone.DropzoneFile[]): Dropzone;
emit(eventName: "maxfilesexceeded", file: Dropzone.DropzoneFile): Dropzone;
emit(eventName: "maxfilesreached", files: Dropzone.DropzoneFile[]): Dropzone;
emit(eventName: "queuecomplete"): Dropzone;
}
interface JQuery {
dropzone(options: Dropzone.DropzoneOptions): Dropzone;
}
export = Dropzone;
export as namespace Dropzone;<|fim▁end|> | resizeMethod?: string;
filesizeBase?: number; |
<|file_name|>california_housing.py<|end_file_name|><|fim▁begin|>"""California housing dataset.
The original database is available from StatLib
http://lib.stat.cmu.edu/
The data contains 20,640 observations on 9 variables.
This dataset contains the average house value as target variable
and the following input variables (features): average income,
housing average age, average rooms, average bedrooms, population,
average occupation, latitude, and longitude in that order.
References
----------
Pace, R. Kelley and Ronald Barry, Sparse Spatial Autoregressions,
Statistics and Probability Letters, 33 (1997) 291-297.
"""
# Authors: Peter Prettenhofer
# License: BSD 3 clause
from io import BytesIO
from os.path import join, exists
from os import makedirs
from zipfile import ZipFile
try:
# Python 2
from urllib2 import urlopen
except ImportError:
# Python 3+
from urllib.request import urlopen
import numpy as np
from .base import get_data_home, Bunch
from ..externals import joblib
DATA_URL = "http://lib.stat.cmu.edu/modules.php?op=modload&name=Downloads&"\
"file=index&req=getit&lid=83"
TARGET_FILENAME = "cal_housing.pkz"
# Grab the module-level docstring to use as a description of the
# dataset
MODULE_DOCS = __doc__
def fetch_california_housing(data_home=None, download_if_missing=True):
"""Loader for the California housing dataset from StatLib.
Parameters
----------
data_home : optional, default: None
Specify another download and cache folder for the datasets. By default
all scikit learn data is stored in '~/scikit_learn_data' subfolders.
download_if_missing: optional, True by default
If False, raise a IOError if the data is not locally available
instead of trying to download the data from the source site.
Returns
-------
dataset : dict-like object with the following attributes:
dataset.data : ndarray, shape [20640, 8]
Each row corresponding to the 8 feature values in order.
dataset.target : numpy array of shape (20640,)
Each value corresponds to the average house value in units of 100,000.
dataset.feature_names : array of length 8
Array of ordered feature names used in the dataset.<|fim▁hole|>
dataset.DESCR : string
Description of the California housing dataset.
Notes
------
This dataset consists of 20,640 samples and 9 features.
"""
data_home = get_data_home(data_home=data_home)
if not exists(data_home):
makedirs(data_home)
if not exists(join(data_home, TARGET_FILENAME)):
print('downloading Cal. housing from %s to %s' % (DATA_URL, data_home))
fhandle = urlopen(DATA_URL)
buf = BytesIO(fhandle.read())
zip_file = ZipFile(buf)
try:
cadata_fd = zip_file.open('cadata.txt', 'r')
cadata = BytesIO(cadata_fd.read())
# skip the first 27 lines (documentation)
cal_housing = np.loadtxt(cadata, skiprows=27)
joblib.dump(cal_housing, join(data_home, TARGET_FILENAME),
compress=6)
finally:
zip_file.close()
else:
cal_housing = joblib.load(join(data_home, TARGET_FILENAME))
feature_names = ["MedInc", "HouseAge", "AveRooms", "AveBedrms",
"Population", "AveOccup", "Latitude", "Longitude"]
target, data = cal_housing[:, 0], cal_housing[:, 1:]
# avg rooms = total rooms / households
data[:, 2] /= data[:, 5]
# avg bed rooms = total bed rooms / households
data[:, 3] /= data[:, 5]
# avg occupancy = population / housholds
data[:, 5] = data[:, 4] / data[:, 5]
# target in units of 100,000
target = target / 100000.0
return Bunch(data=data,
target=target,
feature_names=feature_names,
DESCR=MODULE_DOCS)<|fim▁end|> | |
<|file_name|>BookEdit.js<|end_file_name|><|fim▁begin|>import React from 'react';
import HomeLayout from '../layouts/HomeLayout';
import BookEditor from '../components/BookEditor';
import { get } from '../utils/request';
class BookEdit extends React.Component {
constructor(props) {
super(props);<|fim▁hole|> this.state = {
book: null
};
}
componentWillMount() {
const bookId = this.context.router.params.id;
get('http://localhost:3000/book/' + bookId)
.then(res => {
this.setState({
book: res
});
});
}
render() {
const { book } = this.state;
return book ? <BookEditor editTarget={book} /> : <span>加载中...</span>;
}
}
BookEdit.contextTypes = {
router: React.PropTypes.object.isRequired
};
export default BookEdit;<|fim▁end|> | |
<|file_name|>imported-xml-component.spec.ts<|end_file_name|><|fim▁begin|>import { expect } from "chai";
import { Element, xml2js } from "xml-js";
import { EMPTY_OBJECT, ImportedXmlComponent } from "./";
import { IContext } from "./base";
import { convertToXmlComponent } from "./imported-xml-component";
const xmlString = `
<w:p w:one="value 1" w:two="value 2">
<w:rPr>
<w:noProof>some value</w:noProof>
</w:rPr>
<w:r active="true">
<w:t>Text 1</w:t>
</w:r>
<w:r active="true">
<w:t>Text 2</w:t>
</w:r>
</w:p>
`;
const convertedXmlElement = {
root: [
{
rootKey: "w:p",
root: [
{ rootKey: "_attr", root: { "w:one": "value 1", "w:two": "value 2" } },
{ rootKey: "w:rPr", root: [{ rootKey: "w:noProof", root: ["some value"] }] },
{
rootKey: "w:r",
root: [
{ rootKey: "_attr", root: { active: "true" } },
{ rootKey: "w:t", root: ["Text 1"] },
],
},
{
rootKey: "w:r",
root: [
{ rootKey: "_attr", root: { active: "true" } },
{ rootKey: "w:t", root: ["Text 2"] },
],
},
],
},
],
rootKey: undefined,
};
describe("ImportedXmlComponent", () => {
let importedXmlComponent: ImportedXmlComponent;
beforeEach(() => {
const attributes = {
someAttr: "1",
otherAttr: "2",
};<|fim▁hole|> importedXmlComponent.push(new ImportedXmlComponent("w:child"));
});
describe("#prepForXml()", () => {
it("should transform for xml", () => {
// tslint:disable-next-line: no-object-literal-type-assertion
const converted = importedXmlComponent.prepForXml({} as IContext);
expect(converted).to.deep.equal({
"w:test": [
{
_attr: {
someAttr: "1",
otherAttr: "2",
},
},
{
"w:child": EMPTY_OBJECT,
},
],
});
});
});
it("should create XmlComponent from xml string", () => {
const converted = ImportedXmlComponent.fromXmlString(xmlString);
expect(converted).to.deep.equal(convertedXmlElement);
});
describe("convertToXmlComponent", () => {
it("should convert to xml component", () => {
const xmlObj = xml2js(xmlString, { compact: false }) as Element;
const converted = convertToXmlComponent(xmlObj);
expect(converted).to.deep.equal(convertedXmlElement);
});
});
});<|fim▁end|> | importedXmlComponent = new ImportedXmlComponent("w:test", attributes); |
<|file_name|>data.py<|end_file_name|><|fim▁begin|>from lazyrunner import pmodule, PModule, preset, presetTree, defaults
from treedict import TreeDict
p = defaults()
p.data_defaults.a = 1
p.data_defaults.b = 2
@preset
def change_default_a(p):
p.data_defaults.a = 10
@pmodule
class Data(PModule):
# Use this to set up the local branch of the preset tree. Calling
# defaults() requests the local branch of the
p = defaults()
p.x = 1
@preset
def set_X_2(p):
p.x = 2
@preset
def set_X(p, x = 2):
p.x = x
# The current version of the pmodule. The caching facilities
# assume results are different between different versions.
version = 0.01
# Include dependencies here; alternatively, these may be given as
# class methods, optionally accepting the parameter tree, to
# provide parameter-dependent dependency checking. See
# documentation for more info.
parameter_dependencies = ['data_defaults']
result_dependencies = []
module_dependencies = []
# If true, the results are never saved or loaded from the cache.
# Switch to True once the module is tested.
disable_result_caching = True
def setup(self):
# Setup the Pmodule. Called whenever the module is created.
pass
def run(self):
# Run the module and return a TreeDict instance holding the
# results. Note that this function is not necessarily called
# if the results can be loaded from cache
x = self.p.x
self.log.info("The value of X is %d." % self.p.x)
return TreeDict(x = self.p.x,
a = self.parameters.data_defaults.a,
b = self.parameters.data_defaults.b<|fim▁hole|> def reportResult(cls, parameters, p, r):
# Report on results, even if they are loaded from
# cache. `parameters` is the full parameter tree as specified
# by all parameter dependencies, `p` is the local parameter
# tree branch for this module, and `r` is the result of run(),
# possibly loaded from cache.
self.log.info("The reported value of X is %d. " % r.x)<|fim▁end|> | )
@classmethod |
<|file_name|>local-modularized-tricky-pass-2.rs<|end_file_name|><|fim▁begin|>// check-pass
//
// `#[macro_export] macro_rules` that doesn't originate from macro expansions can be placed
// into the root module soon enough to act as usual items and shadow globs and preludes.
#![feature(decl_macro)]
// `macro_export` shadows globs
use inner1::*;
mod inner1 {
pub macro exported() {}
}
exported!();
mod deep {
fn deep() {
type Deeper = [u8; {
#[macro_export]
macro_rules! exported {
() => ( struct Б; )
}
0
}];<|fim▁hole|>fn main() {
panic!();
}
mod inner3 {
#[macro_export]
macro_rules! panic {
() => ( struct Г; )
}
}
// `macro_export` shadows builtin macros
include!();
mod inner4 {
#[macro_export]
macro_rules! include {
() => ( struct Д; )
}
}<|fim▁end|> | }
}
// `macro_export` shadows std prelude |
<|file_name|>ScoreTransfer.js<|end_file_name|><|fim▁begin|>#pragma strict
var playerFinalScore: int;
var Users: List.< GameObject >;
var userData: GameObject[];
var maxPlayers: int;
var gameManager: GameObject;
var currentPlayer: int;
var currentName: String;
function Awake () {
//var users :GameObject = GameObject.Find("_UserControllerFB");
userData = GameObject.FindGameObjectsWithTag("Player");
}
function Start()
{
gameManager = GameObject.Find("__GameManager");
NotificationCenter.DefaultCenter().AddObserver(this, "PlayerChanger");
}
function Update () {
if(Users.Count>0)
playerFinalScore = Users[0].GetComponent(_GameManager).playerFinalScore;
}
function PlayerChanger()
{
currentPlayer++;
for(var i: int; i<Users.Count; i++)
{
if(i!=currentPlayer)Users[i].SetActive(false);
else Users[i].SetActive(true);
}
print("Change Player");
if(currentPlayer>maxPlayers) currentPlayer=0;
}
function OnLevelWasLoaded(level: int)
{
maxPlayers = userData.Length;
gameManager = GameObject.Find("__GameManager");
for(var i: int; i< userData.Length; i++)
{<|fim▁hole|> Users[i].transform.parent = gameManager.transform;
}
}<|fim▁end|> | if(i!=0)Users[i].SetActive(false);
Users[i].name = userData[i].name;
currentName = Users[0].name; |
<|file_name|>github.py<|end_file_name|><|fim▁begin|># Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A utility for GitHub REST API.
This script handles GitHub Issue, Pull Request, Comment, Label and Artifact
"""
import requests
import json
import shutil
import re
from absl import logging
from requests.adapters import HTTPAdapter
from requests.packages.urllib3.util.retry import Retry
RETRIES = 3
BACKOFF = 5
RETRY_STATUS = (403, 500, 502, 504)
TIMEOUT = 5
OWNER = 'firebase'
REPO = 'firebase-cpp-sdk'
BASE_URL = 'https://api.github.com'
GITHUB_API_URL = '%s/repos/%s/%s' % (BASE_URL, OWNER, REPO)
logging.set_verbosity(logging.INFO)
def set_repo_url(repo):
match = re.match(r'https://github\.com/([^/]+)/([^/.]+)', repo)
if not match:
logging.info('Error, only pattern https://github.com/\{repo_owner\}/\{repo_name\} are allowed.')
return False
(repo_owner, repo_name) = match.groups()
global OWNER, REPO, GITHUB_API_URL
OWNER = repo_owner
REPO = repo_name
GITHUB_API_URL = '%s/repos/%s/%s' % (BASE_URL, OWNER, REPO)
return True
def requests_retry_session(retries=RETRIES,
backoff_factor=BACKOFF,
status_forcelist=RETRY_STATUS):
session = requests.Session()
retry = Retry(total=retries,
read=retries,
connect=retries,
backoff_factor=backoff_factor,
status_forcelist=status_forcelist)
adapter = HTTPAdapter(max_retries=retry)
session.mount('http://', adapter)
session.mount('https://', adapter)
return session
def create_issue(token, title, label, body):
"""Create an issue: https://docs.github.com/en/rest/reference/issues#create-an-issue"""
url = f'{GITHUB_API_URL}/issues'
headers = {'Accept': 'application/vnd.github.v3+json', 'Authorization': f'token {token}'}
data = {'title': title, 'labels': [label], 'body': body}
with requests.post(url, headers=headers, data=json.dumps(data), timeout=TIMEOUT) as response:
logging.info("create_issue: %s response: %s", url, response)
return response.json()
def get_issue_body(token, issue_number):
"""https://docs.github.com/en/rest/reference/issues#get-an-issue-comment"""
url = f'{GITHUB_API_URL}/issues/{issue_number}'
headers = {'Accept': 'application/vnd.github.v3+json', 'Authorization': f'token {token}'}
with requests_retry_session().get(url, headers=headers, timeout=TIMEOUT) as response:
logging.info("get_issue_body: %s response: %s", url, response)
return response.json()["body"]
def update_issue(token, issue_number, data):
"""Update an issue: https://docs.github.com/en/rest/reference/issues#update-an-issue"""
url = f'{GITHUB_API_URL}/issues/{issue_number}'
headers = {'Accept': 'application/vnd.github.v3+json', 'Authorization': f'token {token}'}
with requests_retry_session().patch(url, headers=headers, data=json.dumps(data), timeout=TIMEOUT) as response:
logging.info("update_issue: %s response: %s", url, response)
def open_issue(token, issue_number):
update_issue(token, issue_number, data={'state': 'open'})
def close_issue(token, issue_number):<|fim▁hole|>
def update_issue_comment(token, issue_number, comment):
update_issue(token, issue_number, data={'body': comment})
def search_issues_by_label(label):
"""https://docs.github.com/en/rest/reference/search#search-issues-and-pull-requests"""
url = f'{BASE_URL}/search/issues?q=repo:{OWNER}/{REPO}+label:"{label}"+is:issue'
headers = {'Accept': 'application/vnd.github.v3+json'}
with requests_retry_session().get(url, headers=headers, timeout=TIMEOUT) as response:
logging.info("search_issues_by_label: %s response: %s", url, response)
return response.json()["items"]
def list_comments(token, issue_number):
"""https://docs.github.com/en/rest/reference/issues#list-issue-comments"""
url = f'{GITHUB_API_URL}/issues/{issue_number}/comments'
headers = {'Accept': 'application/vnd.github.v3+json', 'Authorization': f'token {token}'}
with requests_retry_session().get(url, headers=headers, timeout=TIMEOUT) as response:
logging.info("list_comments: %s response: %s", url, response)
return response.json()
def add_comment(token, issue_number, comment):
"""https://docs.github.com/en/rest/reference/issues#create-an-issue-comment"""
url = f'{GITHUB_API_URL}/issues/{issue_number}/comments'
headers = {'Accept': 'application/vnd.github.v3+json', 'Authorization': f'token {token}'}
data = {'body': comment}
with requests.post(url, headers=headers, data=json.dumps(data), timeout=TIMEOUT) as response:
logging.info("add_comment: %s response: %s", url, response)
def update_comment(token, comment_id, comment):
"""https://docs.github.com/en/rest/reference/issues#update-an-issue-comment"""
url = f'{GITHUB_API_URL}/issues/comments/{comment_id}'
headers = {'Accept': 'application/vnd.github.v3+json', 'Authorization': f'token {token}'}
data = {'body': comment}
with requests_retry_session().patch(url, headers=headers, data=json.dumps(data), timeout=TIMEOUT) as response:
logging.info("update_comment: %s response: %s", url, response)
def delete_comment(token, comment_id):
"""https://docs.github.com/en/rest/reference/issues#delete-an-issue-comment"""
url = f'{GITHUB_API_URL}/issues/comments/{comment_id}'
headers = {'Accept': 'application/vnd.github.v3+json', 'Authorization': f'token {token}'}
with requests.delete(url, headers=headers, timeout=TIMEOUT) as response:
logging.info("delete_comment: %s response: %s", url, response)
def add_label(token, issue_number, label):
"""https://docs.github.com/en/rest/reference/issues#add-labels-to-an-issue"""
url = f'{GITHUB_API_URL}/issues/{issue_number}/labels'
headers={}
headers = {'Accept': 'application/vnd.github.v3+json', 'Authorization': f'token {token}'}
data = [label]
with requests.post(url, headers=headers, data=json.dumps(data), timeout=TIMEOUT) as response:
logging.info("add_label: %s response: %s", url, response)
def delete_label(token, issue_number, label):
"""https://docs.github.com/en/rest/reference/issues#delete-a-label"""
url = f'{GITHUB_API_URL}/issues/{issue_number}/labels/{label}'
headers = {'Accept': 'application/vnd.github.v3+json', 'Authorization': f'token {token}'}
with requests.delete(url, headers=headers, timeout=TIMEOUT) as response:
logging.info("delete_label: %s response: %s", url, response)
def list_artifacts(token, run_id):
"""https://docs.github.com/en/rest/reference/actions#list-workflow-run-artifacts"""
url = f'{GITHUB_API_URL}/actions/runs/{run_id}/artifacts'
headers = {'Accept': 'application/vnd.github.v3+json', 'Authorization': f'token {token}'}
with requests_retry_session().get(url, headers=headers, timeout=TIMEOUT) as response:
logging.info("list_artifacts: %s response: %s", url, response)
return response.json()["artifacts"]
def download_artifact(token, artifact_id, output_path):
"""https://docs.github.com/en/rest/reference/actions#download-an-artifact"""
url = f'{GITHUB_API_URL}/actions/artifacts/{artifact_id}/zip'
headers = {'Accept': 'application/vnd.github.v3+json', 'Authorization': f'token {token}'}
with requests.get(url, headers=headers, stream=True, timeout=TIMEOUT) as response:
logging.info("download_artifact: %s response: %s", url, response)
with open(output_path, 'wb') as file:
shutil.copyfileobj(response.raw, file)
def dismiss_review(token, pull_number, review_id, message):
"""https://docs.github.com/en/rest/reference/pulls#dismiss-a-review-for-a-pull-request"""
url = f'{GITHUB_API_URL}/pulls/{pull_number}/reviews/{review_id}/dismissals'
headers = {'Accept': 'application/vnd.github.v3+json', 'Authorization': f'token {token}'}
data = {'message': message}
with requests_retry_session().put(url, headers=headers, data=json.dumps(data),
stream=True, timeout=TIMEOUT) as response:
logging.info("dismiss_review: %s response: %s", url, response)
return response.json()
def get_reviews(token, pull_number):
"""https://docs.github.com/en/rest/reference/pulls#list-reviews-for-a-pull-request"""
url = f'{GITHUB_API_URL}/pulls/{pull_number}/reviews'
headers = {'Accept': 'application/vnd.github.v3+json', 'Authorization': f'token {token}'}
page = 1
per_page = 100
results = []
keep_going = True
while keep_going:
params = {'per_page': per_page, 'page': page}
page = page + 1
keep_going = False
with requests_retry_session().get(url, headers=headers, params=params,
stream=True, timeout=TIMEOUT) as response:
logging.info("get_reviews: %s response: %s", url, response)
results = results + response.json()
# If exactly per_page results were retrieved, read the next page.
keep_going = (len(response.json()) == per_page)
return results
def create_workflow_dispatch(token, workflow_id, ref, inputs):
"""https://docs.github.com/en/rest/reference/actions#create-a-workflow-dispatch-event"""
url = f'{GITHUB_API_URL}/actions/workflows/{workflow_id}/dispatches'
headers = {'Accept': 'application/vnd.github.v3+json', 'Authorization': f'token {token}'}
data = {'ref': ref, 'inputs': inputs}
with requests.post(url, headers=headers, data=json.dumps(data),
stream=True, timeout=TIMEOUT) as response:
logging.info("create_workflow_dispatch: %s response: %s", url, response)
# Response Status: 204 No Content
return True if response.status_code == 204 else False
def list_workflows(token, workflow_id, branch):
"""https://docs.github.com/en/rest/reference/actions#list-workflow-runs-for-a-repository"""
url = f'{GITHUB_API_URL}/actions/workflows/{workflow_id}/runs'
headers = {'Accept': 'application/vnd.github.v3+json', 'Authorization': f'token {token}'}
data = {'event': 'workflow_dispatch', 'branch': branch}
with requests.get(url, headers=headers, data=json.dumps(data),
stream=True, timeout=TIMEOUT) as response:
logging.info("list_workflows: %s response: %s", url, response)
return response.json()
def create_pull_request(token, head, base, title, body, maintainer_can_modify):
"""https://docs.github.com/en/rest/reference/pulls#create-a-pull-request"""
url = f'{GITHUB_API_URL}/pulls'
headers = {'Accept': 'application/vnd.github.v3+json', 'Authorization': f'token {token}'}
data = {'head': head, 'base': base, 'title': title, 'body': body,
'maintainer_can_modify': maintainer_can_modify}
with requests.post(url, headers=headers, data=json.dumps(data),
stream=True, timeout=TIMEOUT) as response:
logging.info("create_pull_request: %s response: %s", head, response)
return True if response.status_code == 201 else False
def list_pull_requests(token, state, head, base):
"""https://docs.github.com/en/rest/reference/pulls#list-pull-requests"""
url = f'{GITHUB_API_URL}/pulls'
headers = {'Accept': 'application/vnd.github.v3+json', 'Authorization': f'token {token}'}
page = 1
per_page = 100
results = []
keep_going = True
while keep_going:
params = {'per_page': per_page, 'page': page}
if state: params.update({'state': state})
if head: params.update({'head': head})
if base: params.update({'base': base})
page = page + 1
keep_going = False
with requests_retry_session().get(url, headers=headers, params=params,
stream=True, timeout=TIMEOUT) as response:
logging.info("get_reviews: %s response: %s", url, response)
results = results + response.json()
# If exactly per_page results were retrieved, read the next page.
keep_going = (len(response.json()) == per_page)
return results<|fim▁end|> | update_issue(token, issue_number, data={'state': 'closed'}) |
<|file_name|>fetcher.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import errno
import hashlib
import os
import re
import shutil
import stat
import subprocess
import sys
import tarfile
import time
import zipfile
from datetime import datetime
from typing import Dict, NamedTuple
from urllib.parse import urlparse
from urllib.request import urlretrieve
from .copytree import prefetch_dir_if_eden
from .envfuncs import Env
from .errors import TransientFailure
from .platform import is_windows
from .runcmd import run_cmd
def file_name_is_cmake_file(file_name):
file_name = file_name.lower()
base = os.path.basename(file_name)
return (
base.endswith(".cmake")
or base.endswith(".cmake.in")
or base == "cmakelists.txt"
)
class ChangeStatus(object):
"""Indicates the nature of changes that happened while updating
the source directory. There are two broad uses:
* When extracting archives for third party software we want to
know that we did something (eg: we either extracted code or
we didn't do anything)
* For 1st party code where we use shipit to transform the code,
we want to know if we changed anything so that we can perform
a build, but we generally want to be a little more nuanced
and be able to distinguish between just changing a source file
and whether we might need to reconfigure the build system.
"""
def __init__(self, all_changed: bool = False) -> None:
"""Construct a ChangeStatus object. The default is to create
a status that indicates no changes, but passing all_changed=True
will create one that indicates that everything changed"""
if all_changed:
self.source_files = 1
self.make_files = 1
else:
self.source_files = 0
self.make_files = 0
def record_change(self, file_name) -> None:
"""Used by the shipit fetcher to record changes as it updates
files in the destination. If the file name might be one used
in the cmake build system that we use for 1st party code, then
record that as a "make file" change. We could broaden this
to match any file used by various build systems, but it is
only really useful for our internal cmake stuff at this time.
If the file isn't a build file and is under the `fbcode_builder`
dir then we don't class that as an interesting change that we
might need to rebuild, so we ignore it.
Otherwise we record the file as a source file change."""
file_name = file_name.lower()
if file_name_is_cmake_file(file_name):
self.make_files += 1
elif "/fbcode_builder/cmake" in file_name:
self.source_files += 1
elif "/fbcode_builder/" not in file_name:
self.source_files += 1
def sources_changed(self) -> bool:
"""Returns true if any source files were changed during
an update operation. This will typically be used to decide
that the build system to be run on the source dir in an
incremental mode"""
return self.source_files > 0
def build_changed(self) -> bool:
"""Returns true if any build files were changed during
an update operation. This will typically be used to decidfe
that the build system should be reconfigured and re-run
as a full build"""
return self.make_files > 0
class Fetcher(object):
"""The Fetcher is responsible for fetching and extracting the
sources for project. The Fetcher instance defines where the
extracted data resides and reports this to the consumer via
its `get_src_dir` method."""
def update(self) -> ChangeStatus:
"""Brings the src dir up to date, ideally minimizing
changes so that a subsequent build doesn't over-build.
Returns a ChangeStatus object that helps the caller to
understand the nature of the changes required during
the update."""
return ChangeStatus()
def clean(self) -> None:
"""Reverts any changes that might have been made to
the src dir"""
pass
def hash(self) -> None:
"""Returns a hash that identifies the version of the code in the
working copy. For a git repo this is commit hash for the working
copy. For other Fetchers this should relate to the version of
the code in the src dir. The intent is that if a manifest
changes the version/rev of a project that the hash be different.
Importantly, this should be computable without actually fetching
the code, as we want this to factor into a hash used to download
a pre-built version of the code, without having to first download
and extract its sources (eg: boost on windows is pretty painful).
"""
pass
def get_src_dir(self) -> None:
"""Returns the source directory that the project was
extracted into"""
pass
class LocalDirFetcher(object):
"""This class exists to override the normal fetching behavior, and
use an explicit user-specified directory for the project sources.
This fetcher cannot update or track changes. It always reports that the
project has changed, forcing it to always be built."""
def __init__(self, path) -> None:
self.path = os.path.realpath(path)
def update(self) -> ChangeStatus:
return ChangeStatus(all_changed=True)
def hash(self) -> str:
return "0" * 40
def get_src_dir(self):
return self.path
class SystemPackageFetcher(object):
def __init__(self, build_options, packages) -> None:
self.manager = build_options.host_type.get_package_manager()
self.packages = packages.get(self.manager)
self.host_type = build_options.host_type
if self.packages:
self.installed = None
else:
self.installed = False
def packages_are_installed(self):
if self.installed is not None:
return self.installed
cmd = None
if self.manager == "rpm":
cmd = ["rpm", "-q"] + sorted(self.packages)
elif self.manager == "deb":
cmd = ["dpkg", "-s"] + sorted(self.packages)
elif self.manager == "homebrew":
cmd = ["brew", "ls", "--versions"] + sorted(self.packages)
if cmd:
proc = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
if proc.returncode == 0:
# captured as binary as we will hash this later
self.installed = proc.stdout
else:
# Need all packages to be present to consider us installed
self.installed = False
else:
self.installed = False
return bool(self.installed)
def update(self) -> ChangeStatus:
assert self.installed
return ChangeStatus(all_changed=False)
def hash(self) -> str:
if self.packages_are_installed():
return hashlib.sha256(self.installed).hexdigest()
else:
return "0" * 40
def get_src_dir(self) -> None:
return None
class PreinstalledNopFetcher(SystemPackageFetcher):
def __init__(self) -> None:
self.installed = True
class GitFetcher(Fetcher):
DEFAULT_DEPTH = 1
def __init__(self, build_options, manifest, repo_url, rev, depth) -> None:
# Extract the host/path portions of the URL and generate a flattened
# directory name. eg:
# github.com/facebook/folly.git -> github.com-facebook-folly.git
url = urlparse(repo_url)
directory = "%s%s" % (url.netloc, url.path)
for s in ["/", "\\", ":"]:
directory = directory.replace(s, "-")
# Place it in a repos dir in the scratch space
repos_dir = os.path.join(build_options.scratch_dir, "repos")
if not os.path.exists(repos_dir):
os.makedirs(repos_dir)
self.repo_dir = os.path.join(repos_dir, directory)
if not rev and build_options.project_hashes:
hash_file = os.path.join(
build_options.project_hashes,
re.sub("\\.git$", "-rev.txt", url.path[1:]),
)
if os.path.exists(hash_file):
with open(hash_file, "r") as f:
data = f.read()
m = re.match("Subproject commit ([a-fA-F0-9]{40})", data)
if not m:
raise Exception("Failed to parse rev from %s" % hash_file)
rev = m.group(1)
print("Using pinned rev %s for %s" % (rev, repo_url))
self.rev = rev or "main"
self.origin_repo = repo_url
self.manifest = manifest
self.depth = depth if depth else GitFetcher.DEFAULT_DEPTH
def _update(self) -> ChangeStatus:
current_hash = (
subprocess.check_output(["git", "rev-parse", "HEAD"], cwd=self.repo_dir)
.strip()
.decode("utf-8")
)
target_hash = (
subprocess.check_output(["git", "rev-parse", self.rev], cwd=self.repo_dir)
.strip()
.decode("utf-8")
)
if target_hash == current_hash:
# It's up to date, so there are no changes. This doesn't detect eg:
# if origin/main moved and rev='main', but that's ok for our purposes;
# we should be using explicit hashes or eg: a stable branch for the cases
# that we care about, and it isn't unreasonable to require that the user
# explicitly perform a clean build if those have moved. For the most
# part we prefer that folks build using a release tarball from github
# rather than use the git protocol, as it is generally a bit quicker
# to fetch and easier to hash and verify tarball downloads.
return ChangeStatus()
print("Updating %s -> %s" % (self.repo_dir, self.rev))
run_cmd(["git", "fetch", "origin", self.rev], cwd=self.repo_dir)
run_cmd(["git", "checkout", self.rev], cwd=self.repo_dir)
run_cmd(["git", "submodule", "update", "--init"], cwd=self.repo_dir)
return ChangeStatus(True)
def update(self) -> ChangeStatus:
if os.path.exists(self.repo_dir):
return self._update()
self._clone()
return ChangeStatus(True)
def _clone(self) -> None:
print("Cloning %s..." % self.origin_repo)
# The basename/dirname stuff allows us to dance around issues where
# eg: this python process is native win32, but the git.exe is cygwin
# or msys and doesn't like the absolute windows path that we'd otherwise
# pass to it. Careful use of cwd helps avoid headaches with cygpath.
run_cmd(
[
"git",
"clone",
"--depth=" + str(self.depth),
"--",
self.origin_repo,
os.path.basename(self.repo_dir),
],
cwd=os.path.dirname(self.repo_dir),
)
self._update()
def clean(self) -> None:
if os.path.exists(self.repo_dir):
run_cmd(["git", "clean", "-fxd"], cwd=self.repo_dir)
def hash(self):
return self.rev
def get_src_dir(self):
return self.repo_dir
def does_file_need_update(src_name, src_st, dest_name):
try:
target_st = os.lstat(dest_name)
except OSError as exc:
if exc.errno != errno.ENOENT:
raise
return True
if src_st.st_size != target_st.st_size:
return True
if stat.S_IFMT(src_st.st_mode) != stat.S_IFMT(target_st.st_mode):
return True
if stat.S_ISLNK(src_st.st_mode):
return os.readlink(src_name) != os.readlink(dest_name)
if not stat.S_ISREG(src_st.st_mode):
return True
# They might have the same content; compare.
with open(src_name, "rb") as sf, open(dest_name, "rb") as df:
chunk_size = 8192
while True:
src_data = sf.read(chunk_size)
dest_data = df.read(chunk_size)
if src_data != dest_data:
return True
if len(src_data) < chunk_size:
# EOF
break
return False
def copy_if_different(src_name, dest_name) -> bool:
"""Copy src_name -> dest_name, but only touch dest_name
if src_name is different from dest_name, making this a
more build system friendly way to copy."""
src_st = os.lstat(src_name)
if not does_file_need_update(src_name, src_st, dest_name):
return False
dest_parent = os.path.dirname(dest_name)
if not os.path.exists(dest_parent):
os.makedirs(dest_parent)
if stat.S_ISLNK(src_st.st_mode):
try:
os.unlink(dest_name)
except OSError as exc:
if exc.errno != errno.ENOENT:
raise
target = os.readlink(src_name)
print("Symlinking %s -> %s" % (dest_name, target))
os.symlink(target, dest_name)
else:
print("Copying %s -> %s" % (src_name, dest_name))
shutil.copy2(src_name, dest_name)
return True
def list_files_under_dir_newer_than_timestamp(dir_to_scan, ts):
for root, _dirs, files in os.walk(dir_to_scan):
for src_file in files:
full_name = os.path.join(root, src_file)
st = os.lstat(full_name)
if st.st_mtime > ts:
yield full_name
class ShipitPathMap(object):
def __init__(self) -> None:
self.roots = []
self.mapping = []
self.exclusion = []
def add_mapping(self, fbsource_dir, target_dir) -> None:
"""Add a posix path or pattern. We cannot normpath the input
here because that would change the paths from posix to windows
form and break the logic throughout this class."""
self.roots.append(fbsource_dir)
self.mapping.append((fbsource_dir, target_dir))
def add_exclusion(self, pattern) -> None:
self.exclusion.append(re.compile(pattern))
def _minimize_roots(self) -> None:
"""compute the de-duplicated set of roots within fbsource.
We take the shortest common directory prefix to make this
determination"""
self.roots.sort(key=len)
minimized = []
for r in self.roots:
add_this_entry = True
for existing in minimized:
if r.startswith(existing + "/"):
add_this_entry = False
break
if add_this_entry:
minimized.append(r)
self.roots = minimized
def _sort_mapping(self) -> None:
self.mapping.sort(reverse=True, key=lambda x: len(x[0]))
def _map_name(self, norm_name, dest_root):
if norm_name.endswith(".pyc") or norm_name.endswith(".swp"):
# Ignore some incidental garbage while iterating
return None
for excl in self.exclusion:
if excl.match(norm_name):
return None
for src_name, dest_name in self.mapping:
if norm_name == src_name or norm_name.startswith(src_name + "/"):
rel_name = os.path.relpath(norm_name, src_name)
# We can have "." as a component of some paths, depending
# on the contents of the shipit transformation section.
# normpath doesn't always remove `.` as the final component
# of the path, which be problematic when we later mkdir
# the dirname of the path that we return. Take care to avoid
# returning a path with a `.` in it.
rel_name = os.path.normpath(rel_name)
if dest_name == ".":
return os.path.normpath(os.path.join(dest_root, rel_name))
dest_name = os.path.normpath(dest_name)
return os.path.normpath(os.path.join(dest_root, dest_name, rel_name))
raise Exception("%s did not match any rules" % norm_name)
def mirror(self, fbsource_root, dest_root) -> ChangeStatus:
self._minimize_roots()
self._sort_mapping()
change_status = ChangeStatus()
# Record the full set of files that should be in the tree
full_file_list = set()
if sys.platform == "win32":
# Let's not assume st_dev has a consistent value on Windows.
def st_dev(path):
return 1
else:
def st_dev(path):
return os.lstat(path).st_dev
for fbsource_subdir in self.roots:
dir_to_mirror = os.path.join(fbsource_root, fbsource_subdir)
root_dev = st_dev(dir_to_mirror)
prefetch_dir_if_eden(dir_to_mirror)
if not os.path.exists(dir_to_mirror):
raise Exception(
"%s doesn't exist; check your sparse profile!" % dir_to_mirror
)
for root, dirs, files in os.walk(dir_to_mirror):
dirs[:] = [d for d in dirs if root_dev == st_dev(os.path.join(root, d))]
for src_file in files:
full_name = os.path.join(root, src_file)
rel_name = os.path.relpath(full_name, fbsource_root)
norm_name = rel_name.replace("\\", "/")
target_name = self._map_name(norm_name, dest_root)
if target_name:<|fim▁hole|> # Compare the list of previously shipped files; if a file is
# in the old list but not the new list then it has been
# removed from the source and should be removed from the
# destination.
# Why don't we simply create this list by walking dest_root?
# Some builds currently have to be in-source builds and
# may legitimately need to keep some state in the source tree :-/
installed_name = os.path.join(dest_root, ".shipit_shipped")
if os.path.exists(installed_name):
with open(installed_name, "rb") as f:
for name in f.read().decode("utf-8").splitlines():
name = name.strip()
if name not in full_file_list:
print("Remove %s" % name)
os.unlink(name)
change_status.record_change(name)
with open(installed_name, "wb") as f:
for name in sorted(list(full_file_list)):
f.write(("%s\n" % name).encode("utf-8"))
return change_status
class FbsourceRepoData(NamedTuple):
hash: str
date: str
FBSOURCE_REPO_DATA: Dict[str, FbsourceRepoData] = {}
def get_fbsource_repo_data(build_options) -> FbsourceRepoData:
"""Returns the commit metadata for the fbsource repo.
Since we may have multiple first party projects to
hash, and because we don't mutate the repo, we cache
this hash in a global."""
cached_data = FBSOURCE_REPO_DATA.get(build_options.fbsource_dir)
if cached_data:
return cached_data
cmd = ["hg", "log", "-r.", "-T{node}\n{date|hgdate}"]
env = Env()
env.set("HGPLAIN", "1")
log_data = subprocess.check_output(
cmd, cwd=build_options.fbsource_dir, env=dict(env.items())
).decode("ascii")
(hash, datestr) = log_data.split("\n")
# datestr is like "seconds fractionalseconds"
# We want "20200324.113140"
(unixtime, _fractional) = datestr.split(" ")
date = datetime.fromtimestamp(int(unixtime)).strftime("%Y%m%d.%H%M%S")
cached_data = FbsourceRepoData(hash=hash, date=date)
FBSOURCE_REPO_DATA[build_options.fbsource_dir] = cached_data
return cached_data
class SimpleShipitTransformerFetcher(Fetcher):
def __init__(self, build_options, manifest, ctx) -> None:
self.build_options = build_options
self.manifest = manifest
self.repo_dir = os.path.join(build_options.scratch_dir, "shipit", manifest.name)
self.ctx = ctx
def clean(self) -> None:
if os.path.exists(self.repo_dir):
shutil.rmtree(self.repo_dir)
def update(self) -> ChangeStatus:
mapping = ShipitPathMap()
for src, dest in self.manifest.get_section_as_ordered_pairs(
"shipit.pathmap", self.ctx
):
mapping.add_mapping(src, dest)
if self.manifest.shipit_fbcode_builder:
mapping.add_mapping(
"fbcode/opensource/fbcode_builder", "build/fbcode_builder"
)
for pattern in self.manifest.get_section_as_args("shipit.strip", self.ctx):
mapping.add_exclusion(pattern)
return mapping.mirror(self.build_options.fbsource_dir, self.repo_dir)
# pyre-fixme[15]: `hash` overrides method defined in `Fetcher` inconsistently.
def hash(self) -> str:
# We return a fixed non-hash string for in-fbsource builds.
# We're relying on the `update` logic to correctly invalidate
# the build in the case that files have changed.
return "fbsource"
def get_src_dir(self):
return self.repo_dir
class ShipitTransformerFetcher(Fetcher):
SHIPIT = "/var/www/scripts/opensource/shipit/run_shipit.php"
def __init__(self, build_options, project_name) -> None:
self.build_options = build_options
self.project_name = project_name
self.repo_dir = os.path.join(build_options.scratch_dir, "shipit", project_name)
def update(self) -> ChangeStatus:
if os.path.exists(self.repo_dir):
return ChangeStatus()
self.run_shipit()
return ChangeStatus(True)
def clean(self) -> None:
if os.path.exists(self.repo_dir):
shutil.rmtree(self.repo_dir)
@classmethod
def available(cls):
return os.path.exists(cls.SHIPIT)
def run_shipit(self) -> None:
tmp_path = self.repo_dir + ".new"
try:
if os.path.exists(tmp_path):
shutil.rmtree(tmp_path)
# Run shipit
run_cmd(
[
"php",
ShipitTransformerFetcher.SHIPIT,
"--project=" + self.project_name,
"--create-new-repo",
"--source-repo-dir=" + self.build_options.fbsource_dir,
"--source-branch=.",
"--skip-source-init",
"--skip-source-pull",
"--skip-source-clean",
"--skip-push",
"--skip-reset",
"--destination-use-anonymous-https",
"--create-new-repo-output-path=" + tmp_path,
]
)
# Remove the .git directory from the repository it generated.
# There is no need to commit this.
repo_git_dir = os.path.join(tmp_path, ".git")
shutil.rmtree(repo_git_dir)
os.rename(tmp_path, self.repo_dir)
except Exception:
# Clean up after a failed extraction
if os.path.exists(tmp_path):
shutil.rmtree(tmp_path)
self.clean()
raise
# pyre-fixme[15]: `hash` overrides method defined in `Fetcher` inconsistently.
def hash(self) -> str:
# We return a fixed non-hash string for in-fbsource builds.
return "fbsource"
def get_src_dir(self):
return self.repo_dir
def download_url_to_file_with_progress(url: str, file_name) -> None:
print("Download %s -> %s ..." % (url, file_name))
class Progress(object):
last_report = 0
def progress(self, count, block, total):
if total == -1:
total = "(Unknown)"
amount = count * block
if sys.stdout.isatty():
sys.stdout.write("\r downloading %s of %s " % (amount, total))
else:
# When logging to CI logs, avoid spamming the logs and print
# status every few seconds
now = time.time()
if now - self.last_report > 5:
sys.stdout.write(".. %s of %s " % (amount, total))
self.last_report = now
sys.stdout.flush()
progress = Progress()
start = time.time()
try:
(_filename, headers) = urlretrieve(url, file_name, reporthook=progress.progress)
except (OSError, IOError) as exc: # noqa: B014
raise TransientFailure(
"Failed to download %s to %s: %s" % (url, file_name, str(exc))
)
end = time.time()
sys.stdout.write(" [Complete in %f seconds]\n" % (end - start))
sys.stdout.flush()
print(f"{headers}")
class ArchiveFetcher(Fetcher):
def __init__(self, build_options, manifest, url, sha256) -> None:
self.manifest = manifest
self.url = url
self.sha256 = sha256
self.build_options = build_options
url = urlparse(self.url)
basename = "%s-%s" % (manifest.name, os.path.basename(url.path))
self.file_name = os.path.join(build_options.scratch_dir, "downloads", basename)
self.src_dir = os.path.join(build_options.scratch_dir, "extracted", basename)
self.hash_file = self.src_dir + ".hash"
def _verify_hash(self) -> None:
h = hashlib.sha256()
with open(self.file_name, "rb") as f:
while True:
block = f.read(8192)
if not block:
break
h.update(block)
digest = h.hexdigest()
if digest != self.sha256:
os.unlink(self.file_name)
raise Exception(
"%s: expected sha256 %s but got %s" % (self.url, self.sha256, digest)
)
def _download_dir(self):
"""returns the download dir, creating it if it doesn't already exist"""
download_dir = os.path.dirname(self.file_name)
if not os.path.exists(download_dir):
os.makedirs(download_dir)
return download_dir
def _download(self) -> None:
self._download_dir()
download_url_to_file_with_progress(self.url, self.file_name)
self._verify_hash()
def clean(self) -> None:
if os.path.exists(self.src_dir):
shutil.rmtree(self.src_dir)
def update(self) -> ChangeStatus:
try:
with open(self.hash_file, "r") as f:
saved_hash = f.read().strip()
if saved_hash == self.sha256 and os.path.exists(self.src_dir):
# Everything is up to date
return ChangeStatus()
print(
"saved hash %s doesn't match expected hash %s, re-validating"
% (saved_hash, self.sha256)
)
os.unlink(self.hash_file)
except EnvironmentError:
pass
# If we got here we know the contents of src_dir are either missing
# or wrong, so blow away whatever happened to be there first.
if os.path.exists(self.src_dir):
shutil.rmtree(self.src_dir)
# If we already have a file here, make sure it looks legit before
# proceeding: any errors and we just remove it and re-download
if os.path.exists(self.file_name):
try:
self._verify_hash()
except Exception:
if os.path.exists(self.file_name):
os.unlink(self.file_name)
if not os.path.exists(self.file_name):
self._download()
if tarfile.is_tarfile(self.file_name):
opener = tarfile.open
elif zipfile.is_zipfile(self.file_name):
opener = zipfile.ZipFile
else:
raise Exception("don't know how to extract %s" % self.file_name)
os.makedirs(self.src_dir)
print("Extract %s -> %s" % (self.file_name, self.src_dir))
t = opener(self.file_name)
if is_windows():
# Ensure that we don't fall over when dealing with long paths
# on windows
src = r"\\?\%s" % os.path.normpath(self.src_dir)
else:
src = self.src_dir
# The `str` here is necessary to ensure that we don't pass a unicode
# object down to tarfile.extractall on python2. When extracting
# the boost tarball it makes some assumptions and tries to convert
# a non-ascii path to ascii and throws.
src = str(src)
t.extractall(src)
with open(self.hash_file, "w") as f:
f.write(self.sha256)
return ChangeStatus(True)
def hash(self):
return self.sha256
def get_src_dir(self):
return self.src_dir
def homebrew_package_prefix(package):
cmd = ["brew", "--prefix", package]
try:
proc = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
except FileNotFoundError:
return
if proc.returncode == 0:
return proc.stdout.decode("utf-8").rstrip()<|fim▁end|> | full_file_list.add(target_name)
if copy_if_different(full_name, target_name):
change_status.record_change(target_name)
|
<|file_name|>get-lang-arg.js<|end_file_name|><|fim▁begin|>/*
This utility processes the argument passed with the `lang` option
in ember-cli, i.e. `ember (new||init||addon) app-name --lang=langArg`
Execution Context (usage, input, output, error handling, etc.):
- called directly by `init` IFF `--lang` flag is used in (new||init||addon)
- receives single input: the argument passed with `lang` (herein `langArg`)
- processes `langArg`: lang code validation + error detection / handling
- DOES emit Warning messages if necessary
- DOES NOT halt execution process / throw errors / disrupt the build
- returns single result as output (to `init`):
- `langArg` (if it is a valid language code)
- `undefined` (otherwise)
- `init` assigns the value of `commandOptions.lang` to the returned result
- downstream, the `lang` attribute is assigned via inline template control:
- file: `blueprints/app/files/app/index.html`
- logic: `<html<% if(lang) { %> lang="<%= lang %>"<% } %>>
Internal Mechanics -- the utility processes `langArg` to determine:
- the value to return to `init` (i.e. validated lang code or undefined)
- a descriptive category for the usage: `correct`, `incorrect`, `edge`, etc.
- what message text (if any: category-dependent) to emit before return
Warning Messages (if necessary):
- An internal instance of `console-ui` is used to emit messages
- IFF there is a message, it will be emitted before returning the result
- Components of all emitted messages -- [Name] (writeLevel): 'example':
- [`HEAD`] (WARNING): 'A warning was generated while processing `--lang`:'
- [`BODY`] (WARNING): 'Invalid language code, `en-UK`'
- [`STATUS`] (WARNING): '`lang` will NOT be set to `en-UK` in `index.html`'
- [`HELP`] (INFO): 'Correct usage of `--lang`: ... '
<|fim▁hole|>*/
'use strict';
const { isLangCode } = require('is-language-code');
// Primary language code validation function (boolean)
function isValidLangCode(langArg) {
return isLangCode(langArg).res;
}
// Generates the result to pass back to `init`
function getResult(langArg) {
return isValidLangCode(langArg) ? langArg : undefined;
}
/*
Misuse case: attempt to set application programming language via `lang`
AND
Edge case: valid language code AND a common programming language abbreviation
-------------------------------------------------------------------------------
It is possible that a user might mis-interpret the type of `language` that is
specified by the `--lang` flag. One notable potential `misuse case` is one in
which the user thinks `--lang` specifies the application's programming
language. For example, the user might call `ember new my-app --lang=typescript`
expecting to achieve an effect similar to the one provided by the
`ember-cli-typescript` addon.
This misuse case is handled by checking the input `langArg` against an Array
containing notable programming language-related values: language names
(e.g. `JavaScript`), abbreviations (e.g. `js`), file extensions (e.g. `.js`),
or versions (e.g. `ES6`), etc. Specifically, if `langArg` is found within this
reference list, a WARNING message that describes correct `--lang` usage will
be emitted. The `lang` attribute will not be assigned in `index.html`, and the
user will be notified with a corresponding STATUS message.
There are several edge cases (marked) where `langArg` is both a commonly-used
abbreviation for a programming language AND a valid language code. The behavior
for these cases is to assume the user has used `--lang` correctly and set the
`lang` attribute to the valid code in `index.html`. To cover for potential
misuage, several helpful messages will also be emitted:
- `ts` is a valid language code AND a common programming language abbreviation
- the `lang` attribute will be set to `ts` in the application
- if this is not correct, it can be changed in `app/index.html` directly
- (general `help` information about correct `--lang` usage)
*/
const PROG_LANGS = [
'javascript',
'.js',
'js',
'emcascript2015',
'emcascript6',
'es6',
'emcascript2016',
'emcascript7',
'es7',
'emcascript2017',
'emcascript8',
'es8',
'emcascript2018',
'emcascript9',
'es9',
'emcascript2019',
'emcascript10',
'es10',
'typescript',
'.ts',
'node.js',
'node',
'handlebars',
'.hbs',
'hbs',
'glimmer',
'glimmer.js',
'glimmer-vm',
'markdown',
'markup',
'html5',
'html4',
'.md',
'.html',
'.htm',
'.xhtml',
'.xml',
'.xht',
'md',
'html',
'htm',
'xhtml',
'.sass',
'.scss',
'.css',
'sass',
'scss',
// Edge Cases
'ts', // Tsonga
'TS', // Tsonga (case insensitivity check)
'xml', // Malaysian Sign Language
'xht', // Hattic
'css', // Costanoan
];
function isProgLang(langArg) {
return langArg && PROG_LANGS.includes(langArg.toLowerCase().trim());
}
function isValidCodeAndProg(langArg) {
return isValidLangCode(langArg) && isProgLang(langArg);
}
/*
Misuse case: `--lang` called without `langArg` (NB: parser bug workaround)
-------------------------------------------------------------------------------
This is a workaround for handling an existing bug in the ember-cli parser
where the `--lang` option is specified in the command without a corresponding
value for `langArg`.
As examples, the parser behavior would likely affect the following usages:
1. `ember new app-name --lang --skip-npm
2. `ember new app-name --lang`
In this context, the unintended parser behavior is that `langArg` will be
assingned to the String that immediately follows `--lang` in the command. If
`--lang` is the last explicitly defined part of the command (as in the second
example above), the first of any any `hidden` options pushed onto the command
after the initial parse (e.g. `--disable-analytics`, `--no-watcher`) will be
used when assigning `langArg`.
In the above examples, `langArg` would likely be assigned as follows:
1. `ember new app-name --lang --skip-npm => `langArg='--skip-npm'`
2. `ember new app-name --lang` => `langArg='--disable-analytics'`
The workaround impelemented herein is to check whether or not the value of
`langArg` starts with a hyphen. The rationale for this approach is based on
the following underlying assumptions:
- ALL CLI options start with (at least one) hyphen
- NO valid language codes start with a hyphen
If the leading hyphen is detected, the current behavior is to assume `--lang`
was declared without a corresponding specification. A WARNING message that
describes correct `--lang` usage will be emitted. The `lang` attribute will not
be assigned in `index.html`, and the user will be notified with a corresponding
STATUS message. Execution will not be halted.
Other complications related to this parser behavior are considered out-of-scope
and not handled here. In the first example above, this workaround would ensure
that `lang` is not assigned to `--skip-npm`, but it would not guarantee that
`--skip-npm` is correctly processed as a command option. That is, `npm` may or
may not get skipped during execution.
*/
function startsWithHyphen(langArg) {
return langArg && langArg[0] === '-';
}
// MESSAGE GENERATION:
// 1. `HEAD` Message: template for all `--lang`-related warnings emitted
const MSG_HEAD = `An issue with the \`--lang\` flag returned the following message:`;
// 2. `BODY` Messages: category-dependent context information
// Message context from language code validation (valid: null, invalid: reason)
function getLangCodeMsg(langArg) {
return isLangCode(langArg).message;
}
// Edge case: valid language code AND a common programming language abbreviation
function getValidAndProgMsg(langArg) {
return `The \`--lang\` flag has been used with argument \`${langArg}\`,
which is BOTH a valid language code AND an abbreviation for a programming language.
${getProgLangMsg(langArg)}`;
}
// Misuse case: attempt to set application programming language via `lang`
function getProgLangMsg(langArg) {
return `Trying to set the app programming language to \`${langArg}\`?
This is not the intended usage of the \`--lang\` flag.`;
}
// Misuse case: `--lang` called without `langArg` (NB: parser bug workaround)
function getCliMsg() {
return `Detected a \`--lang\` specification starting with command flag \`-\`.
This issue is likely caused by using the \`--lang\` flag without a specification.`;
}
// 3. `STATUS` message: report if `lang` will be set in `index.html`
function getStatusMsg(langArg, willSet) {
return `The human language of the application will ${willSet ? `be set to ${langArg}` : `NOT be set`} in
the \`<html>\` element's \`lang\` attribute in \`index.html\`.`;
}
// 4. `HELP` message: template for all `--lang`-related warnings emitted
const MSG_HELP = `If this was not your intention, you may edit the \`<html>\` element's
\`lang\` attribute in \`index.html\` manually after the process is complete.
Information about using the \`--lang\` flag:
The \`--lang\` flag sets the base human language of an app or test app:
- \`app/index.html\` (app)
- \`tests/dummy/app/index.html\` (addon test app)
If used, the lang option must specfify a valid language code.
For default behavior, remove the flag.
See \`ember <command> help\` for more information.`;
function getBodyMsg(langArg) {
return isValidCodeAndProg(langArg)
? getValidAndProgMsg(langArg)
: isProgLang(langArg)
? getProgLangMsg(langArg)
: startsWithHyphen(langArg)
? getCliMsg(langArg)
: getLangCodeMsg(langArg);
}
function getFullMsg(langArg) {
return {
head: MSG_HEAD,
body: getBodyMsg(langArg),
status: getStatusMsg(langArg, isValidCodeAndProg(langArg)),
help: MSG_HELP,
};
}
function writeFullMsg(fullMsg, ui) {
ui.setWriteLevel('WARNING');
ui.writeWarnLine(`${fullMsg.head}\n ${fullMsg.body}\``);
ui.writeWarnLine(fullMsg.status);
ui.setWriteLevel('INFO');
ui.writeInfoLine(fullMsg.help);
}
module.exports = function getLangArg(langArg, ui) {
let fullMsg = getFullMsg(langArg);
if (fullMsg.body) {
writeFullMsg(fullMsg, ui);
}
return getResult(langArg);
};<|fim▁end|> | |
<|file_name|>0001_initial.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import easy_thumbnails.fields
from django.conf import settings
import zds.utils.models
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Alert',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('scope', models.CharField(db_index=True, max_length=1, choices=[(b'A', b"Commentaire d'article"), (b'F', b'Forum'), (b'T', b'Commentaire de tuto')])),
('text', models.TextField(verbose_name=b"Texte d'alerte")),
('pubdate', models.DateTimeField(verbose_name=b'Date de publication', db_index=True)),
('author', models.ForeignKey(related_name='alerts', verbose_name=b'Auteur', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'Alerte',
'verbose_name_plural': 'Alertes',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Category',<|fim▁hole|> fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(max_length=80, verbose_name=b'Titre')),
('description', models.TextField(verbose_name=b'Description')),
('position', models.IntegerField(default=0, verbose_name=b'Position')),
('slug', models.SlugField(max_length=80)),
],
options={
'verbose_name': 'Categorie',
'verbose_name_plural': 'Categories',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='CategorySubCategory',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('is_main', models.BooleanField(default=True, db_index=True, verbose_name=b'Est la cat\xc3\xa9gorie principale')),
('category', models.ForeignKey(verbose_name=b'Cat\xc3\xa9gorie', to='utils.Category')),
],
options={
'verbose_name': 'Hierarchie cat\xe9gorie',
'verbose_name_plural': 'Hierarchies cat\xe9gories',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Comment',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('ip_address', models.CharField(max_length=39, verbose_name=b"Adresse IP de l'auteur ")),
('position', models.IntegerField(verbose_name=b'Position', db_index=True)),
('text', models.TextField(verbose_name=b'Texte')),
('text_html', models.TextField(verbose_name=b'Texte en Html')),
('like', models.IntegerField(default=0, verbose_name=b'Likes')),
('dislike', models.IntegerField(default=0, verbose_name=b'Dislikes')),
('pubdate', models.DateTimeField(auto_now_add=True, verbose_name=b'Date de publication', db_index=True)),
('update', models.DateTimeField(null=True, verbose_name=b"Date d'\xc3\xa9dition", blank=True)),
('is_visible', models.BooleanField(default=True, verbose_name=b'Est visible')),
('text_hidden', models.CharField(default=b'', max_length=80, verbose_name=b'Texte de masquage ')),
('author', models.ForeignKey(related_name='comments', verbose_name=b'Auteur', to=settings.AUTH_USER_MODEL)),
('editor', models.ForeignKey(related_name='comments-editor', verbose_name=b'Editeur', blank=True, to=settings.AUTH_USER_MODEL, null=True)),
],
options={
'verbose_name': 'Commentaire',
'verbose_name_plural': 'Commentaires',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='CommentDislike',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('comments', models.ForeignKey(to='utils.Comment')),
('user', models.ForeignKey(related_name='post_disliked', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'Ce message est inutile',
'verbose_name_plural': 'Ces messages sont inutiles',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='CommentLike',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('comments', models.ForeignKey(to='utils.Comment')),
('user', models.ForeignKey(related_name='post_liked', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'Ce message est utile',
'verbose_name_plural': 'Ces messages sont utiles',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='HelpWriting',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(max_length=20, verbose_name=b'Name')),
('slug', models.SlugField(max_length=20)),
('tablelabel', models.CharField(max_length=150, verbose_name=b'TableLabel')),
('image', easy_thumbnails.fields.ThumbnailerImageField(upload_to=zds.utils.models.image_path_help)),
],
options={
'verbose_name': 'Aide \xe0 la r\xe9daction',
'verbose_name_plural': 'Aides \xe0 la r\xe9daction',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Licence',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('code', models.CharField(max_length=20, verbose_name=b'Code')),
('title', models.CharField(max_length=80, verbose_name=b'Titre')),
('description', models.TextField(verbose_name=b'Description')),
],
options={
'verbose_name': 'Licence',
'verbose_name_plural': 'Licences',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='SubCategory',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(max_length=80, verbose_name=b'Titre')),
('subtitle', models.CharField(max_length=200, verbose_name=b'Sous-titre')),
('image', models.ImageField(null=True, upload_to=zds.utils.models.image_path_category, blank=True)),
('slug', models.SlugField(max_length=80)),
],
options={
'verbose_name': 'Sous-categorie',
'verbose_name_plural': 'Sous-categories',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Tag',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(max_length=20, verbose_name=b'Titre')),
('slug', models.SlugField(max_length=20)),
],
options={
'verbose_name': 'Tag',
'verbose_name_plural': 'Tags',
},
bases=(models.Model,),
),
migrations.AddField(
model_name='categorysubcategory',
name='subcategory',
field=models.ForeignKey(verbose_name=b'Sous-Cat\xc3\xa9gorie', to='utils.SubCategory'),
preserve_default=True,
),
migrations.AddField(
model_name='alert',
name='comment',
field=models.ForeignKey(related_name='alerts', verbose_name=b'Commentaire', to='utils.Comment'),
preserve_default=True,
),
]<|fim▁end|> | |
<|file_name|>mojo_app_connection_impl.cc<|end_file_name|><|fim▁begin|>// Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "content/browser/mojo/mojo_app_connection_impl.h"
#include <stdint.h>
#include <utility>
#include "base/bind.h"
#include "content/browser/mojo/mojo_shell_context.h"
<|fim▁hole|>namespace content {
const char kBrowserMojoAppUrl[] = "system:content_browser";
namespace {
void OnGotInstanceID(shell::mojom::ConnectResult result,
const std::string& user_id,
uint32_t remote_id) {}
} // namespace
// static
std::unique_ptr<MojoAppConnection> MojoAppConnection::Create(
const std::string& user_id,
const std::string& name,
const std::string& requestor_name) {
return std::unique_ptr<MojoAppConnection>(
new MojoAppConnectionImpl(user_id, name, requestor_name));
}
MojoAppConnectionImpl::MojoAppConnectionImpl(
const std::string& user_id,
const std::string& name,
const std::string& requestor_name) {
MojoShellContext::ConnectToApplication(
user_id, name, requestor_name, mojo::GetProxy(&interfaces_),
shell::mojom::InterfaceProviderPtr(), base::Bind(&OnGotInstanceID));
}
MojoAppConnectionImpl::~MojoAppConnectionImpl() {
}
void MojoAppConnectionImpl::GetInterface(
const std::string& interface_name,
mojo::ScopedMessagePipeHandle handle) {
interfaces_->GetInterface(interface_name, std::move(handle));
}
} // namespace content<|fim▁end|> | |
<|file_name|>forum.ts<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | export * from './src/forum'; |
<|file_name|>FileBrowser.cpp<|end_file_name|><|fim▁begin|>/*
* FileBrowser.cpp - implementation of the project-, preset- and
* sample-file-browser
*
* Copyright (c) 2004-2014 Tobias Doerffel <tobydox/at/users.sourceforge.net>
*
* This file is part of LMMS - http://lmms.io
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* General Public License for more details.
*
* You should have received a copy of the GNU General Public
* License along with this program (see COPYING); if not, write to the
* Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301 USA.
*
*/
#include <QHBoxLayout>
#include <QKeyEvent>
#include <QLineEdit>
#include <QMenu>
#include <QPushButton>
#include <QMdiArea>
#include <QMdiSubWindow>
#include "FileBrowser.h"
#include "BBTrackContainer.h"
#include "ConfigManager.h"
#include "debug.h"
#include "embed.h"
#include "Engine.h"
#include "gui_templates.h"
#include "ImportFilter.h"
#include "Instrument.h"
#include "InstrumentTrack.h"
#include "MainWindow.h"
#include "DataFile.h"
#include "PresetPreviewPlayHandle.h"
#include "SamplePlayHandle.h"
#include "Song.h"
#include "StringPairDrag.h"
#include "TextFloat.h"
enum TreeWidgetItemTypes
{
TypeFileItem = QTreeWidgetItem::UserType,
TypeDirectoryItem
} ;
FileBrowser::FileBrowser(const QString & directories, const QString & filter,
const QString & title, const QPixmap & pm,
QWidget * parent, bool dirs_as_items ) :
SideBarWidget( title, pm, parent ),
m_directories( directories ),
m_filter( filter ),
m_dirsAsItems( dirs_as_items )
{
setWindowTitle( tr( "Browser" ) );
m_l = new FileBrowserTreeWidget( contentParent() );
addContentWidget( m_l );
QWidget * ops = new QWidget( contentParent() );
ops->setFixedHeight( 24 );
QHBoxLayout * opl = new QHBoxLayout( ops );
opl->setMargin( 0 );
opl->setSpacing( 0 );
m_filterEdit = new QLineEdit( ops );
connect( m_filterEdit, SIGNAL( textEdited( const QString & ) ),
this, SLOT( filterItems( const QString & ) ) );
QPushButton * reload_btn = new QPushButton(
embed::getIconPixmap( "reload" ),
QString::null, ops );
connect( reload_btn, SIGNAL( clicked() ), this, SLOT( reloadTree() ) );
opl->addWidget( m_filterEdit );
opl->addSpacing( 5 );
opl->addWidget( reload_btn );
addContentWidget( ops );
reloadTree();
show();
}
FileBrowser::~FileBrowser()
{
}
void FileBrowser::filterItems( const QString & filter )
{
const bool show_all = filter.isEmpty();
for( int i = 0; i < m_l->topLevelItemCount(); ++i )
{
QTreeWidgetItem * it = m_l->topLevelItem( i );
// show all items if filter is empty
if( show_all )
{
it->setHidden( false );
if( it->childCount() )
{
filterItems( it, filter );
}
}
// is directory?
else if( it->childCount() )
{
// matches filter?
if( it->text( 0 ).
contains( filter, Qt::CaseInsensitive ) )
{
// yes, then show everything below
it->setHidden( false );
filterItems( it, QString::null );
}
else
{
// only show if item below matches filter
it->setHidden( !filterItems( it, filter ) );
}
}
// a standard item (i.e. no file or directory item?)
else if( it->type() == QTreeWidgetItem::Type )
{
// hide in every case when filtering
it->setHidden( true );
}
else
{
// file matches filter?
it->setHidden( !it->text( 0 ).
contains( filter, Qt::CaseInsensitive ) );
}
}
}
bool FileBrowser::filterItems(QTreeWidgetItem * item, const QString & filter )
{
const bool show_all = filter.isEmpty();
bool matched = false;
for( int i = 0; i < item->childCount(); ++i )
{
QTreeWidgetItem * it = item->child( i );
bool cm = false; // whether current item matched
// show all items if filter is empty
if( show_all )
{
it->setHidden( false );
if( it->childCount() )
{
filterItems( it, filter );
}
}
// is directory?
else if( it->childCount() )
{
// matches filter?
if( it->text( 0 ).
contains( filter, Qt::CaseInsensitive ) )
{
// yes, then show everything below
it->setHidden( false );
filterItems( it, QString::null );
cm = true;
}
else
{
// only show if item below matches filter
cm = filterItems( it, filter );
it->setHidden( !cm );
}
}
// a standard item (i.e. no file or directory item?)
else if( it->type() == QTreeWidgetItem::Type )
{
// hide in every case when filtering
it->setHidden( true );
}
else
{
// file matches filter?
cm = it->text( 0 ).
contains( filter, Qt::CaseInsensitive );
it->setHidden( !cm );
}
if( cm )
{
matched = true;
}
}
return matched;
}
void FileBrowser::reloadTree( void )
{
const QString text = m_filterEdit->text();
m_filterEdit->clear();
m_l->clear();
QStringList paths = m_directories.split( '*' );
for( QStringList::iterator it = paths.begin(); it != paths.end(); ++it )
{
addItems( *it );
}
m_filterEdit->setText( text );
filterItems( text );
}
void FileBrowser::addItems(const QString & path )
{
if( m_dirsAsItems )
{
m_l->addTopLevelItem( new Directory( path,
QString::null, m_filter ) );
return;
}
QDir cdir( path );
QStringList files = cdir.entryList( QDir::Dirs, QDir::Name );
for( QStringList::const_iterator it = files.constBegin();
it != files.constEnd(); ++it )
{
QString cur_file = *it;
if( cur_file[0] != '.' )
{
bool orphan = true;
for( int i = 0; i < m_l->topLevelItemCount(); ++i )
{
Directory * d = dynamic_cast<Directory *>(
m_l->topLevelItem( i ) );
if( d == NULL || cur_file < d->text( 0 ) )
{
m_l->insertTopLevelItem( i,
new Directory( cur_file, path,
m_filter ) );
orphan = false;
break;
}
else if( cur_file == d->text( 0 ) )
{
d->addDirectory( path );
orphan = false;
break;
}
}
if( orphan )
{
m_l->addTopLevelItem( new Directory( cur_file,
path, m_filter ) );
}
}
}
files = cdir.entryList( QDir::Files, QDir::Name );
for( QStringList::const_iterator it = files.constBegin();
it != files.constEnd(); ++it )
{
QString cur_file = *it;
if( cur_file[0] != '.' )
{
// TODO: don't insert instead of removing, order changed
// remove existing file-items
QList<QTreeWidgetItem *> existing = m_l->findItems(
cur_file, Qt::MatchFixedString );
if( !existing.empty() )
{
delete existing.front();
}
(void) new FileItem( m_l, cur_file, path );
}
}
}
void FileBrowser::keyPressEvent(QKeyEvent * ke )
{
if( ke->key() == Qt::Key_F5 )
{
reloadTree();
}
else
{
ke->ignore();
}
}
FileBrowserTreeWidget::FileBrowserTreeWidget(QWidget * parent ) :
QTreeWidget( parent ),
m_mousePressed( false ),
m_pressPos(),
m_previewPlayHandle( NULL ),
m_pphMutex( QMutex::Recursive ),
m_contextMenuItem( NULL )
{
setColumnCount( 1 );
headerItem()->setHidden( true );
setSortingEnabled( false );
setFont( pointSizeF( font(), 7.5f ) );
connect( this, SIGNAL( itemDoubleClicked( QTreeWidgetItem *, int ) ),
SLOT( activateListItem( QTreeWidgetItem *, int ) ) );
connect( this, SIGNAL( itemCollapsed( QTreeWidgetItem * ) ),
SLOT( updateDirectory( QTreeWidgetItem * ) ) );
connect( this, SIGNAL( itemExpanded( QTreeWidgetItem * ) ),
SLOT( updateDirectory( QTreeWidgetItem * ) ) );
}
FileBrowserTreeWidget::~FileBrowserTreeWidget()
{
}
void FileBrowserTreeWidget::contextMenuEvent(QContextMenuEvent * e )
{<|fim▁hole|> if( f != NULL && ( f->handling() == FileItem::LoadAsPreset ||
f->handling() == FileItem::LoadByPlugin ) )
{
m_contextMenuItem = f;
QMenu contextMenu( this );
contextMenu.addAction( tr( "Send to active instrument-track" ),
this,
SLOT( sendToActiveInstrumentTrack() ) );
contextMenu.addAction( tr( "Open in new instrument-track/"
"Song-Editor" ),
this,
SLOT( openInNewInstrumentTrackSE() ) );
contextMenu.addAction( tr( "Open in new instrument-track/"
"B+B Editor" ),
this,
SLOT( openInNewInstrumentTrackBBE() ) );
contextMenu.exec( e->globalPos() );
m_contextMenuItem = NULL;
}
}
void FileBrowserTreeWidget::mousePressEvent(QMouseEvent * me )
{
QTreeWidget::mousePressEvent( me );
if( me->button() != Qt::LeftButton )
{
return;
}
QTreeWidgetItem * i = itemAt( me->pos() );
if ( i )
{
// TODO: Restrict to visible selection
// if ( _me->x() > header()->cellPos( header()->mapToActual( 0 ) )
// + treeStepSize() * ( i->depth() + ( rootIsDecorated() ?
// 1 : 0 ) ) + itemMargin() ||
// _me->x() < header()->cellPos(
// header()->mapToActual( 0 ) ) )
// {
m_pressPos = me->pos();
m_mousePressed = true;
// }
}
FileItem * f = dynamic_cast<FileItem *>( i );
if( f != NULL )
{
m_pphMutex.lock();
if( m_previewPlayHandle != NULL )
{
Engine::mixer()->removePlayHandle(
m_previewPlayHandle );
m_previewPlayHandle = NULL;
}
// in special case of sample-files we do not care about
// handling() rather than directly creating a SamplePlayHandle
if( f->type() == FileItem::SampleFile )
{
TextFloat * tf = TextFloat::displayMessage(
tr( "Loading sample" ),
tr( "Please wait, loading sample for "
"preview..." ),
embed::getIconPixmap( "sample_file",
24, 24 ), 0 );
qApp->processEvents(
QEventLoop::ExcludeUserInputEvents );
SamplePlayHandle * s = new SamplePlayHandle(
f->fullName() );
s->setDoneMayReturnTrue( false );
m_previewPlayHandle = s;
delete tf;
}
else if( f->type() != FileItem::VstPluginFile &&
( f->handling() == FileItem::LoadAsPreset ||
f->handling() == FileItem::LoadByPlugin ) )
{
m_previewPlayHandle = new PresetPreviewPlayHandle( f->fullName(), f->handling() == FileItem::LoadByPlugin );
}
if( m_previewPlayHandle != NULL )
{
if( !Engine::mixer()->addPlayHandle(
m_previewPlayHandle ) )
{
m_previewPlayHandle = NULL;
}
}
m_pphMutex.unlock();
}
}
void FileBrowserTreeWidget::mouseMoveEvent( QMouseEvent * me )
{
if( m_mousePressed == true &&
( m_pressPos - me->pos() ).manhattanLength() >
QApplication::startDragDistance() )
{
// make sure any playback is stopped
mouseReleaseEvent( NULL );
FileItem * f = dynamic_cast<FileItem *>( itemAt( m_pressPos ) );
if( f != NULL )
{
switch( f->type() )
{
case FileItem::PresetFile:
new StringPairDrag( f->handling() == FileItem::LoadAsPreset ?
"presetfile" : "pluginpresetfile",
f->fullName(),
embed::getIconPixmap( "preset_file" ), this );
break;
case FileItem::SampleFile:
new StringPairDrag( "samplefile", f->fullName(),
embed::getIconPixmap( "sample_file" ), this );
break;
case FileItem::SoundFontFile:
new StringPairDrag( "soundfontfile", f->fullName(),
embed::getIconPixmap( "soundfont_file" ), this );
break;
case FileItem::VstPluginFile:
new StringPairDrag( "vstpluginfile", f->fullName(),
embed::getIconPixmap( "vst_plugin_file" ), this );
break;
case FileItem::MidiFile:
// don't allow dragging FLP-files as FLP import filter clears project
// without asking
// case fileItem::FlpFile:
new StringPairDrag( "importedproject", f->fullName(),
embed::getIconPixmap( "midi_file" ), this );
break;
default:
break;
}
}
}
}
void FileBrowserTreeWidget::mouseReleaseEvent(QMouseEvent * me )
{
m_mousePressed = false;
m_pphMutex.lock();
if( m_previewPlayHandle != NULL )
{
// if there're samples shorter than 3 seconds, we don't
// stop them if the user releases mouse-button...
if( m_previewPlayHandle->type() == PlayHandle::TypeSamplePlayHandle )
{
SamplePlayHandle * s = dynamic_cast<SamplePlayHandle *>(
m_previewPlayHandle );
if( s && s->totalFrames() - s->framesDone() <=
static_cast<f_cnt_t>( Engine::mixer()->
processingSampleRate() * 3 ) )
{
s->setDoneMayReturnTrue( true );
m_previewPlayHandle = NULL;
m_pphMutex.unlock();
return;
}
}
Engine::mixer()->removePlayHandle( m_previewPlayHandle );
m_previewPlayHandle = NULL;
}
m_pphMutex.unlock();
}
void FileBrowserTreeWidget::handleFile(FileItem * f, InstrumentTrack * it )
{
Engine::mixer()->lock();
switch( f->handling() )
{
case FileItem::LoadAsProject:
if( Engine::mainWindow()->mayChangeProject() )
{
Engine::getSong()->loadProject( f->fullName() );
}
break;
case FileItem::LoadByPlugin:
{
const QString e = f->extension();
Instrument * i = it->instrument();
if( i == NULL ||
!i->descriptor()->supportsFileType( e ) )
{
i = it->loadInstrument(
Engine::pluginFileHandling()[e] );
}
i->loadFile( f->fullName() );
break;
}
case FileItem::LoadAsPreset:
{
DataFile dataFile( f->fullName() );
InstrumentTrack::removeMidiPortNode( dataFile );
it->setSimpleSerializing();
it->loadSettings( dataFile.content().toElement() );
break;
}
case FileItem::ImportAsProject:
if( f->type() == FileItem::FlpFile &&
!Engine::mainWindow()->mayChangeProject() )
{
break;
}
ImportFilter::import( f->fullName(),
Engine::getSong() );
break;
case FileItem::NotSupported:
default:
break;
}
Engine::mixer()->unlock();
}
void FileBrowserTreeWidget::activateListItem(QTreeWidgetItem * item,
int column )
{
FileItem * f = dynamic_cast<FileItem *>( item );
if( f == NULL )
{
return;
}
if( f->handling() == FileItem::LoadAsProject ||
f->handling() == FileItem::ImportAsProject )
{
handleFile( f, NULL );
}
else if( f->handling() != FileItem::NotSupported )
{
// engine::mixer()->lock();
InstrumentTrack * it = dynamic_cast<InstrumentTrack *>(
Track::create( Track::InstrumentTrack,
Engine::getBBTrackContainer() ) );
handleFile( f, it );
// engine::mixer()->unlock();
}
}
void FileBrowserTreeWidget::openInNewInstrumentTrack( TrackContainer* tc )
{
if( m_contextMenuItem->handling() == FileItem::LoadAsPreset ||
m_contextMenuItem->handling() == FileItem::LoadByPlugin )
{
// engine::mixer()->lock();
InstrumentTrack * it = dynamic_cast<InstrumentTrack *>(
Track::create( Track::InstrumentTrack, tc ) );
handleFile( m_contextMenuItem, it );
// engine::mixer()->unlock();
}
}
void FileBrowserTreeWidget::openInNewInstrumentTrackBBE( void )
{
openInNewInstrumentTrack( Engine::getBBTrackContainer() );
}
void FileBrowserTreeWidget::openInNewInstrumentTrackSE( void )
{
openInNewInstrumentTrack( Engine::getSong() );
}
void FileBrowserTreeWidget::sendToActiveInstrumentTrack( void )
{
// get all windows opened in the workspace
QList<QMdiSubWindow*> pl =
Engine::mainWindow()->workspace()->
subWindowList( QMdiArea::StackingOrder );
QListIterator<QMdiSubWindow *> w( pl );
w.toBack();
// now we travel through the window-list until we find an
// instrument-track
while( w.hasPrevious() )
{
InstrumentTrackWindow * itw =
dynamic_cast<InstrumentTrackWindow *>(
w.previous()->widget() );
if( itw != NULL && itw->isHidden() == false )
{
handleFile( m_contextMenuItem, itw->model() );
break;
}
}
}
void FileBrowserTreeWidget::updateDirectory(QTreeWidgetItem * item )
{
Directory * dir = dynamic_cast<Directory *>( item );
if( dir != NULL )
{
dir->update();
}
}
QPixmap * Directory::s_folderPixmap = NULL;
QPixmap * Directory::s_folderOpenedPixmap = NULL;
QPixmap * Directory::s_folderLockedPixmap = NULL;
Directory::Directory(const QString & filename, const QString & path,
const QString & filter ) :
QTreeWidgetItem( QStringList( filename ), TypeDirectoryItem ),
m_directories( path ),
m_filter( filter )
{
initPixmaps();
setChildIndicatorPolicy( QTreeWidgetItem::ShowIndicator );
if( !QDir( fullName() ).isReadable() )
{
setIcon( 0, *s_folderLockedPixmap );
}
else
{
setIcon( 0, *s_folderPixmap );
}
}
void Directory::initPixmaps( void )
{
if( s_folderPixmap == NULL )
{
s_folderPixmap = new QPixmap(
embed::getIconPixmap( "folder" ) );
}
if( s_folderOpenedPixmap == NULL )
{
s_folderOpenedPixmap = new QPixmap(
embed::getIconPixmap( "folder_opened" ) );
}
if( s_folderLockedPixmap == NULL )
{
s_folderLockedPixmap = new QPixmap(
embed::getIconPixmap( "folder_locked" ) );
}
}
void Directory::update( void )
{
if( !isExpanded() )
{
setIcon( 0, *s_folderPixmap );
return;
}
setIcon( 0, *s_folderOpenedPixmap );
if( !childCount() )
{
for( QStringList::iterator it = m_directories.begin();
it != m_directories.end(); ++it )
{
int top_index = childCount();
if( addItems( fullName( *it ) ) &&
( *it ).contains(
ConfigManager::inst()->dataDir() ) )
{
QTreeWidgetItem * sep = new QTreeWidgetItem;
sep->setText( 0,
FileBrowserTreeWidget::tr(
"--- Factory files ---" ) );
sep->setIcon( 0, embed::getIconPixmap(
"factory_files" ) );
insertChild( top_index, sep );
}
}
}
}
bool Directory::addItems(const QString & path )
{
QDir thisDir( path );
if( !thisDir.isReadable() )
{
return false;
}
treeWidget()->setUpdatesEnabled( false );
bool added_something = false;
QStringList files = thisDir.entryList( QDir::Dirs, QDir::Name );
for( QStringList::const_iterator it = files.constBegin();
it != files.constEnd(); ++it )
{
QString cur_file = *it;
if( cur_file[0] != '.' )
{
bool orphan = true;
for( int i = 0; i < childCount(); ++i )
{
Directory * d = dynamic_cast<Directory *>(
child( i ) );
if( d == NULL || cur_file < d->text( 0 ) )
{
insertChild( i, new Directory( cur_file,
path, m_filter ) );
orphan = false;
break;
}
else if( cur_file == d->text( 0 ) )
{
d->addDirectory( path );
orphan = false;
break;
}
}
if( orphan )
{
addChild( new Directory( cur_file, path,
m_filter ) );
}
added_something = true;
}
}
QList<QTreeWidgetItem*> items;
files = thisDir.entryList( QDir::Files, QDir::Name );
for( QStringList::const_iterator it = files.constBegin();
it != files.constEnd(); ++it )
{
QString cur_file = *it;
if( cur_file[0] != '.' &&
thisDir.match( m_filter, cur_file.toLower() ) )
{
items << new FileItem( cur_file, path );
added_something = true;
}
}
addChildren( items );
treeWidget()->setUpdatesEnabled( true );
return added_something;
}
QPixmap * FileItem::s_projectFilePixmap = NULL;
QPixmap * FileItem::s_presetFilePixmap = NULL;
QPixmap * FileItem::s_sampleFilePixmap = NULL;
QPixmap * FileItem::s_soundfontFilePixmap = NULL;
QPixmap * FileItem::s_vstPluginFilePixmap = NULL;
QPixmap * FileItem::s_midiFilePixmap = NULL;
QPixmap * FileItem::s_flpFilePixmap = NULL;
QPixmap * FileItem::s_unknownFilePixmap = NULL;
FileItem::FileItem(QTreeWidget * parent, const QString & name,
const QString & path ) :
QTreeWidgetItem( parent, QStringList( name) , TypeFileItem ),
m_path( path )
{
determineFileType();
initPixmaps();
}
FileItem::FileItem(const QString & name, const QString & path ) :
QTreeWidgetItem( QStringList( name ), TypeFileItem ),
m_path( path )
{
determineFileType();
initPixmaps();
}
void FileItem::initPixmaps( void )
{
if( s_projectFilePixmap == NULL )
{
s_projectFilePixmap = new QPixmap( embed::getIconPixmap(
"project_file", 16, 16 ) );
}
if( s_presetFilePixmap == NULL )
{
s_presetFilePixmap = new QPixmap( embed::getIconPixmap(
"preset_file", 16, 16 ) );
}
if( s_sampleFilePixmap == NULL )
{
s_sampleFilePixmap = new QPixmap( embed::getIconPixmap(
"sample_file", 16, 16 ) );
}
if ( s_soundfontFilePixmap == NULL )
{
s_soundfontFilePixmap = new QPixmap( embed::getIconPixmap(
"soundfont_file", 16, 16 ) );
}
if ( s_vstPluginFilePixmap == NULL )
{
s_vstPluginFilePixmap = new QPixmap( embed::getIconPixmap(
"vst_plugin_file", 16, 16 ) );
}
if( s_midiFilePixmap == NULL )
{
s_midiFilePixmap = new QPixmap( embed::getIconPixmap(
"midi_file", 16, 16 ) );
}
if( s_flpFilePixmap == NULL )
{
s_flpFilePixmap = new QPixmap( embed::getIconPixmap(
"midi_file", 16, 16 ) );
}
if( s_unknownFilePixmap == NULL )
{
s_unknownFilePixmap = new QPixmap( embed::getIconPixmap(
"unknown_file" ) );
}
switch( m_type )
{
case ProjectFile:
setIcon( 0, *s_projectFilePixmap );
break;
case PresetFile:
setIcon( 0, *s_presetFilePixmap );
break;
case SoundFontFile:
setIcon( 0, *s_soundfontFilePixmap );
break;
case VstPluginFile:
setIcon( 0, *s_vstPluginFilePixmap );
break;
case SampleFile:
case PatchFile: // TODO
setIcon( 0, *s_sampleFilePixmap );
break;
case MidiFile:
setIcon( 0, *s_midiFilePixmap );
break;
case FlpFile:
setIcon( 0, *s_flpFilePixmap );
break;
case UnknownFile:
default:
setIcon( 0, *s_unknownFilePixmap );
break;
}
}
void FileItem::determineFileType( void )
{
m_handling = NotSupported;
const QString ext = extension();
if( ext == "mmp" || ext == "mpt" || ext == "mmpz" )
{
m_type = ProjectFile;
m_handling = LoadAsProject;
}
else if( ext == "xpf" || ext == "xml" )
{
m_type = PresetFile;
m_handling = LoadAsPreset;
}
else if( ext == "xiz" && Engine::pluginFileHandling().contains( ext ) )
{
m_type = PresetFile;
m_handling = LoadByPlugin;
}
else if( ext == "sf2" )
{
m_type = SoundFontFile;
}
else if( ext == "pat" )
{
m_type = PatchFile;
}
else if( ext == "mid" )
{
m_type = MidiFile;
m_handling = ImportAsProject;
}
else if( ext == "flp" )
{
m_type = FlpFile;
m_handling = ImportAsProject;
}
else if( ext == "dll" )
{
m_type = VstPluginFile;
m_handling = LoadByPlugin;
}
else
{
m_type = UnknownFile;
}
if( m_handling == NotSupported &&
!ext.isEmpty() && Engine::pluginFileHandling().contains( ext ) )
{
m_handling = LoadByPlugin;
// classify as sample if not classified by anything yet but can
// be handled by a certain plugin
if( m_type == UnknownFile )
{
m_type = SampleFile;
}
}
}
QString FileItem::extension( void )
{
return extension( fullName() );
}
QString FileItem::extension(const QString & file )
{
return QFileInfo( file ).suffix().toLower();
}<|fim▁end|> | FileItem * f = dynamic_cast<FileItem *>( itemAt( e->pos() ) ); |
<|file_name|>RegexValidation.java<|end_file_name|><|fim▁begin|>package servicios.validacion;
public final class RegexValidation {
public static final class ValidaNombres {
private ValidaNombres() {
}
// public static final String INICIALES = "[a-z,A-Z,ñ,Ñ]{2}$+";
public static final String NOMBRE = "[A-Z]+[a-z,ñ]*+[ ]+[A-Z]+[a-z,ñ]*";<|fim▁hole|>
}
public static final String NUMEROTEL = "[+]?[0-9]{3}+[-]+[0-9]{7}";
}
public static final class ValidaMail {
private ValidaMail() {
}
public static final String EMAIL = "^[_A-Za-z0-9-\\+]+(\\.[_A-Za-z0-9-]+)*@"
+ "[A-Za-z0-9-]+(\\.[A-Za-z0-9]+)*(\\.[A-Za-z]{2,})$";
}
public static final class ValidaDireccion{
private ValidaDireccion() {
}
public static final String DIRECCION = "^[_A-Za-z-\\+]+(\\ [0-9-]+)$";
}
public static final class ValidaPalabra{
private ValidaPalabra() {
}
public static final String PALABRA = "[a-zA-Z]*$";
public static final String PALABRAINICIALMAYUSCULA = "[A-Z]+[a-z]*$";
}
}<|fim▁end|> | }
public static final class ValidaTel {
private ValidaTel() { |
<|file_name|>File.py<|end_file_name|><|fim▁begin|># Copyright 1999 by Jeffrey Chang. All rights reserved.
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
"""Code for more fancy file handles.
Classes:
UndoHandle File object decorator with support for undo-like operations.
StringHandle Wraps a file object around a string.
SGMLStripper Object that strips SGML. This is now considered OBSOLETE, and
is likely to be deprecated in a future release of Biopython,
and later removed.
"""
import StringIO
class UndoHandle:
"""A Python handle that adds functionality for saving lines.
Saves lines in a LIFO fashion.
Added methods:
saveline Save a line to be returned next time.
peekline Peek at the next line without consuming it.
"""
def __init__(self, handle):
self._handle = handle
self._saved = []
def __iter__(self):
return self
def next(self):
next = self.readline()
if not next:<|fim▁hole|> lines = self._saved + self._handle.readlines(*args,**keywds)
self._saved = []
return lines
def readline(self, *args, **keywds):
if self._saved:
line = self._saved.pop(0)
else:
line = self._handle.readline(*args,**keywds)
return line
def read(self, size=-1):
if size == -1:
saved = "".join(self._saved)
self._saved[:] = []
else:
saved = ''
while size > 0 and self._saved:
if len(self._saved[0]) <= size:
size = size - len(self._saved[0])
saved = saved + self._saved.pop(0)
else:
saved = saved + self._saved[0][:size]
self._saved[0] = self._saved[0][size:]
size = 0
return saved + self._handle.read(size)
def saveline(self, line):
if line:
self._saved = [line] + self._saved
def peekline(self):
if self._saved:
line = self._saved[0]
else:
line = self._handle.readline()
self.saveline(line)
return line
def tell(self):
lengths = map(len, self._saved)
sum = reduce(lambda x, y: x+y, lengths, 0)
return self._handle.tell() - sum
def seek(self, *args):
self._saved = []
self._handle.seek(*args)
def __getattr__(self, attr):
return getattr(self._handle, attr)
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self._handle.close()
# I could make this faster by using cStringIO.
# However, cStringIO (in v1.52) does not implement the
# readlines method.
StringHandle = StringIO.StringIO
try:
import sgmllib
except ImportError:
#This isn't available on Python 3, but we don't care much as SGMLStripper
#is obsolete
pass
else:
class SGMLStripper:
"""Object to strip SGML tags (OBSOLETE)."""
class MyParser(sgmllib.SGMLParser):
def __init__(self):
sgmllib.SGMLParser.__init__(self)
self.data = ''
def handle_data(self, data):
self.data = self.data + data
def __init__(self):
import warnings
warnings.warn("This class is obsolete, and likely to be deprecated and later removed in a future version of Biopython", PendingDeprecationWarning)
self._parser = SGMLStripper.MyParser()
def strip(self, str):
"""S.strip(str) -> string
Strip the SGML tags from str.
"""
if not str: # empty string, don't do anything.
return ''
# I need to make sure that I don't return an empty string if
# the buffer is not empty. This can happen if there's a newline
# character embedded within a tag. Thus, I'll first check to
# see if the last character is a newline. If it is, and it's stripped
# away, I'll add it back.
is_newline = str[-1] in ['\n', '\r']
self._parser.data = '' # clear the parser's data (don't reset)
self._parser.feed(str)
if self._parser.data:
str = self._parser.data
elif is_newline:
str = '\n'
else:
str = ''
return str<|fim▁end|> | raise StopIteration
return next
def readlines(self, *args, **keywds): |
<|file_name|>WeatherActivity.java<|end_file_name|><|fim▁begin|>package com.ppweather.app.activity;
import com.ppweather.app.R;
import com.ppweather.app.service.AutoUpdateService;
import com.ppweather.app.util.HttpCallbackListener;
import com.ppweather.app.util.HttpUtil;
import com.ppweather.app.util.Utility;
import android.app.Activity;
import android.content.Intent;
import android.content.SharedPreferences;
import android.os.Bundle;
import android.preference.PreferenceManager;
import android.text.TextUtils;
import android.util.Log;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.Window;
import android.widget.Button;
import android.widget.LinearLayout;
import android.widget.TextView;
public class WeatherActivity extends Activity implements OnClickListener{
private LinearLayout weatherInfoLayout;
/**
* 显示城市名
*/
private TextView cityNameText;
/**
* 显示发布时间
*/
private TextView publishText;
/**
* 显示天气描述信息
*/
private TextView weatherDespText;
/**
* 显示气温1
*/
private TextView temp1Text;
/**
* 显示气温2
*/
private TextView temp2Text;
/**
* 显示当前日期
*/
private TextView currentDateText;
/**
* 切换城市
*/
private Button switchCity;
/**
*更新天气
*/
private Button refreshWeather;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
setContentView(R.layout.weather_layout);
weatherInfoLayout = (LinearLayout)findViewById(R.id.weather_info_layout);
cityNameText = (TextView)findViewById(R.id.city_name);
publishText = (TextView)findViewById(R.id.publish_text);
weatherDespText = (TextView)findViewById(R.id.weather_desp);
temp1Text = (TextView)findViewById(R.id.temp1);
temp2Text = (TextView)findViewById(R.id.temp2);
currentDateText = (TextView)findViewById(R.id.current_date);
switchCity = (Button)findViewById(R.id.switch_city);
refreshWeather = (Button)findViewById(R.id.refresh_weather);
switchCity.setOnClickListener(this);
refreshWeather.setOnClickListener(this);
String countyCode = getIntent().getStringExtra("county_code");
if (!TextUtils.isEmpty(countyCode)) {
//有县级代号时就去查询天气
publishText.setText("同步中。。。");
Log.d("ssss", "countyCode = " + countyCode);
weatherInfoLayout.setVisibility(View.INVISIBLE);
cityNameText.setVisibility(View.INVISIBLE);
queryWeatherCode(countyCode);
} else {
//没有县级代号就直接显示本地天气
showWeather();
}
}
/**
* 查询县级代号对应的天气代号
* @param countyCode
*/
private void queryWeatherCode(String countyCode) {
String address = "http://www.weather.com.cn/data/list3/city" + countyCode + ".xml";
queryFromServer(address, "countyCode");
}
/**
* 查询天气代号对应的天气
* @param weatherCode
*/
private void queryWeatherInfo(String weatherCode) {
String address = "http://www.weather.com.cn/data/cityinfo/" + weatherCode + ".html";
queryFromServer(address, "weatherCode");
}
private void queryFromServer(final String address, final String type) {
// TODO Auto-generated method stub
HttpUtil.sendHttpRequest(address, new HttpCallbackListener() {
@Override<|fim▁hole|> public void onFinish(final String response) {
// TODO Auto-generated method stub
if ("countyCode".equals(type)) {
if (!TextUtils.isEmpty(response)) {
String[] array = response.split("\\|");
if (array != null && array.length == 2) {
String weatherCode = array[1];
queryWeatherInfo(weatherCode);
}
}
} else if("weatherCode".equals(type)) {
Log.d("ssss", "weatherCode run ");
Utility.handleWeatherResponse(WeatherActivity.this, response);
runOnUiThread(new Runnable() {
@Override
public void run() {
// TODO Auto-generated method stub
showWeather();
}
});
}
}
@Override
public void onError(Exception e) {
// TODO Auto-generated method stub
runOnUiThread(new Runnable() {
@Override
public void run() {
// TODO Auto-generated method stub
publishText.setText("同步失败");
}
});
}
});
}
/**
* 从SharedPreference文件中读取存储的天气信息,并显示到桌面上
*/
private void showWeather() {
// TODO Auto-generated method stub
SharedPreferences prefs =PreferenceManager.getDefaultSharedPreferences(this);
cityNameText.setText(prefs.getString("city_name", ""));
Log.d("ssss", "cityNameText = " + cityNameText.getText());
temp1Text.setText(prefs.getString("temp1", ""));
temp2Text.setText(prefs.getString("temp2", ""));
weatherDespText.setText(prefs.getString("weather_desp", ""));
publishText.setText("今天" + prefs.getString("publish_time", "") + "发布");
currentDateText.setText(prefs.getString("current_date", ""));
weatherInfoLayout.setVisibility(View.VISIBLE);
cityNameText.setVisibility(View.VISIBLE);
//在showWeather中启动AutoUpdateService。这样一旦选中某城市并成功更新天气,AutoUpdateService就会在后台运行,并8小时更新一次天气
Intent intent = new Intent(this,AutoUpdateService.class);
startService(intent);
}
@Override
public void onClick(View v) {
// TODO Auto-generated method stub
switch (v.getId()) {
case R.id.switch_city:
Intent intent = new Intent(this, ChooseAreaActivity.class);
intent.putExtra("from_weather_activity", true);
startActivity(intent);
finish();
break;
case R.id.refresh_weather:
publishText.setText("同步中...");
//从SharedPreferences中读取天气代号
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(this);
String weatherCode = prefs.getString("weather_code", "");
if (!TextUtils.isEmpty(weatherCode)) {
queryWeatherInfo(weatherCode);
}
default:
break;
}
}
}<|fim▁end|> | |
<|file_name|>FileDocumentManagerImpl.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.fileEditor.impl;
import com.intellij.AppTopics;
import com.intellij.CommonBundle;
import com.intellij.codeStyle.CodeStyleFacade;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.TransactionGuard;
import com.intellij.openapi.application.TransactionGuardImpl;
import com.intellij.openapi.application.WriteAction;
import com.intellij.openapi.command.CommandProcessor;
import com.intellij.openapi.command.UndoConfirmationPolicy;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.EditorFactory;
import com.intellij.openapi.editor.event.DocumentEvent;
import com.intellij.openapi.editor.ex.DocumentEx;
import com.intellij.openapi.editor.ex.PrioritizedDocumentListener;
import com.intellij.openapi.editor.impl.EditorFactoryImpl;
import com.intellij.openapi.editor.impl.TrailingSpacesStripper;
import com.intellij.openapi.extensions.Extensions;
import com.intellij.openapi.fileEditor.*;
import com.intellij.openapi.fileEditor.impl.text.TextEditorImpl;
import com.intellij.openapi.fileTypes.BinaryFileTypeDecompilers;
import com.intellij.openapi.fileTypes.FileType;
import com.intellij.openapi.fileTypes.UnknownFileType;
import com.intellij.openapi.project.*;
import com.intellij.openapi.ui.DialogWrapper;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.Key;
import com.intellij.openapi.util.io.FileUtilRt;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.*;
import com.intellij.openapi.vfs.encoding.EncodingManager;
import com.intellij.openapi.vfs.newvfs.NewVirtualFileSystem;
import com.intellij.pom.core.impl.PomModelImpl;
import com.intellij.psi.ExternalChangeAction;
import com.intellij.psi.PsiDocumentManager;
import com.intellij.psi.PsiFile;
import com.intellij.psi.SingleRootFileViewProvider;
import com.intellij.psi.impl.source.PsiFileImpl;
import com.intellij.testFramework.LightVirtualFile;
import com.intellij.ui.UIBundle;
import com.intellij.ui.components.JBScrollPane;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.messages.MessageBus;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.annotations.TestOnly;
import javax.swing.*;
import java.awt.*;
import java.io.IOException;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Proxy;
import java.nio.charset.Charset;
import java.util.*;
import java.util.List;
public class FileDocumentManagerImpl extends FileDocumentManager implements VirtualFileListener, VetoableProjectManagerListener, SafeWriteRequestor {
private static final Logger LOG = Logger.getInstance("#com.intellij.openapi.fileEditor.impl.FileDocumentManagerImpl");
public static final Key<Document> HARD_REF_TO_DOCUMENT_KEY = Key.create("HARD_REF_TO_DOCUMENT_KEY");
private static final Key<String> LINE_SEPARATOR_KEY = Key.create("LINE_SEPARATOR_KEY");
private static final Key<VirtualFile> FILE_KEY = Key.create("FILE_KEY");
private static final Key<Boolean> MUST_RECOMPUTE_FILE_TYPE = Key.create("Must recompute file type");
private final Set<Document> myUnsavedDocuments = ContainerUtil.newConcurrentSet();
private final MessageBus myBus;
private static final Object lock = new Object();
private final FileDocumentManagerListener myMultiCaster;
private final TrailingSpacesStripper myTrailingSpacesStripper = new TrailingSpacesStripper();
private boolean myOnClose;
private volatile MemoryDiskConflictResolver myConflictResolver = new MemoryDiskConflictResolver();
private final PrioritizedDocumentListener myPhysicalDocumentChangeTracker = new PrioritizedDocumentListener() {
@Override
public int getPriority() {
return Integer.MIN_VALUE;
}
@Override
public void documentChanged(DocumentEvent e) {
final Document document = e.getDocument();
if (!ApplicationManager.getApplication().hasWriteAction(ExternalChangeAction.ExternalDocumentChange.class)) {
myUnsavedDocuments.add(document);
}
final Runnable currentCommand = CommandProcessor.getInstance().getCurrentCommand();
Project project = currentCommand == null ? null : CommandProcessor.getInstance().getCurrentCommandProject();
if (project == null)
project = ProjectUtil.guessProjectForFile(getFile(document));
String lineSeparator = CodeStyleFacade.getInstance(project).getLineSeparator();
document.putUserData(LINE_SEPARATOR_KEY, lineSeparator);
// avoid documents piling up during batch processing
if (areTooManyDocumentsInTheQueue(myUnsavedDocuments)) {
saveAllDocumentsLater();
}
}
};
public FileDocumentManagerImpl(@NotNull VirtualFileManager virtualFileManager, @NotNull ProjectManager projectManager) {
virtualFileManager.addVirtualFileListener(this);
projectManager.addProjectManagerListener(this);
myBus = ApplicationManager.getApplication().getMessageBus();
myBus.connect().subscribe(ProjectManager.TOPIC, this);
InvocationHandler handler = (proxy, method, args) -> {
multiCast(method, args);
return null;
};
final ClassLoader loader = FileDocumentManagerListener.class.getClassLoader();
myMultiCaster = (FileDocumentManagerListener)Proxy.newProxyInstance(loader, new Class[]{FileDocumentManagerListener.class}, handler);
}
private static void unwrapAndRethrow(Exception e) {
Throwable unwrapped = e;
if (e instanceof InvocationTargetException) {
unwrapped = e.getCause() == null ? e : e.getCause();
}
if (unwrapped instanceof Error) throw (Error)unwrapped;
if (unwrapped instanceof RuntimeException) throw (RuntimeException)unwrapped;
LOG.error(unwrapped);
}
@SuppressWarnings("OverlyBroadCatchBlock")
private void multiCast(@NotNull Method method, Object[] args) {
try {
method.invoke(myBus.syncPublisher(AppTopics.FILE_DOCUMENT_SYNC), args);
}
catch (ClassCastException e) {
LOG.error("Arguments: "+ Arrays.toString(args), e);
}
catch (Exception e) {
unwrapAndRethrow(e);
}
// Allows pre-save document modification
for (FileDocumentManagerListener listener : getListeners()) {
try {
method.invoke(listener, args);
}
catch (Exception e) {
unwrapAndRethrow(e);
}
}
// stripping trailing spaces
try {
method.invoke(myTrailingSpacesStripper, args);
}
catch (Exception e) {
unwrapAndRethrow(e);
}
}
@Override
@Nullable
public Document getDocument(@NotNull final VirtualFile file) {
ApplicationManager.getApplication().assertReadAccessAllowed();
DocumentEx document = (DocumentEx)getCachedDocument(file);
if (document == null) {
if (!file.isValid() || file.isDirectory() || isBinaryWithoutDecompiler(file)) return null;
boolean tooLarge = FileUtilRt.isTooLarge(file.getLength());
if (file.getFileType().isBinary() && tooLarge) return null;
final CharSequence text = tooLarge ? LoadTextUtil.loadText(file, getPreviewCharCount(file)) : LoadTextUtil.loadText(file);
synchronized (lock) {
document = (DocumentEx)getCachedDocument(file);
if (document != null) return document; // Double checking
document = (DocumentEx)createDocument(text, file);
document.setModificationStamp(file.getModificationStamp());
final FileType fileType = file.getFileType();
document.setReadOnly(tooLarge || !file.isWritable() || fileType.isBinary());
if (!(file instanceof LightVirtualFile || file.getFileSystem() instanceof NonPhysicalFileSystem)) {
document.addDocumentListener(myPhysicalDocumentChangeTracker);
}
if (file instanceof LightVirtualFile) {
registerDocument(document, file);
}
else {
document.putUserData(FILE_KEY, file);
cacheDocument(file, document);
}
}
myMultiCaster.fileContentLoaded(file, document);
}
return document;
}
public static boolean areTooManyDocumentsInTheQueue(Collection<Document> documents) {
if (documents.size() > 100) return true;
int totalSize = 0;
for (Document document : documents) {
totalSize += document.getTextLength();
if (totalSize > FileUtilRt.LARGE_FOR_CONTENT_LOADING) return true;
}
return false;
}
private static Document createDocument(final CharSequence text, VirtualFile file) {
boolean acceptSlashR = file instanceof LightVirtualFile && StringUtil.indexOf(text, '\r') >= 0;
boolean freeThreaded = Boolean.TRUE.equals(file.getUserData(SingleRootFileViewProvider.FREE_THREADED));
return ((EditorFactoryImpl)EditorFactory.getInstance()).createDocument(text, acceptSlashR, freeThreaded);
}
@Override
@Nullable
public Document getCachedDocument(@NotNull VirtualFile file) {
Document hard = file.getUserData(HARD_REF_TO_DOCUMENT_KEY);
return hard != null ? hard : getDocumentFromCache(file);
}
public static void registerDocument(@NotNull final Document document, @NotNull VirtualFile virtualFile) {
synchronized (lock) {
document.putUserData(FILE_KEY, virtualFile);
virtualFile.putUserData(HARD_REF_TO_DOCUMENT_KEY, document);
}
}
@Override
@Nullable
public VirtualFile getFile(@NotNull Document document) {
return document.getUserData(FILE_KEY);
}
@TestOnly
public void dropAllUnsavedDocuments() {
if (!ApplicationManager.getApplication().isUnitTestMode()) {
throw new RuntimeException("This method is only for test mode!");
}
ApplicationManager.getApplication().assertWriteAccessAllowed();
if (!myUnsavedDocuments.isEmpty()) {
myUnsavedDocuments.clear();
fireUnsavedDocumentsDropped();
}
}
private void saveAllDocumentsLater() {
// later because some document might have been blocked by PSI right now
ApplicationManager.getApplication().invokeLater(() -> {
if (ApplicationManager.getApplication().isDisposed()) {
return;
}
final Document[] unsavedDocuments = getUnsavedDocuments();
for (Document document : unsavedDocuments) {
VirtualFile file = getFile(document);
if (file == null) continue;
Project project = ProjectUtil.guessProjectForFile(file);
if (project == null) continue;
if (PsiDocumentManager.getInstance(project).isDocumentBlockedByPsi(document)) continue;
saveDocument(document);
}
});
}
@Override
public void saveAllDocuments() {
saveAllDocuments(true);
}
/**
* @param isExplicit caused by user directly (Save action) or indirectly (e.g. Compile)
*/
public void saveAllDocuments(boolean isExplicit) {
ApplicationManager.getApplication().assertIsDispatchThread();
((TransactionGuardImpl)TransactionGuard.getInstance()).assertWriteActionAllowed();
myMultiCaster.beforeAllDocumentsSaving();
if (myUnsavedDocuments.isEmpty()) return;
final Map<Document, IOException> failedToSave = new HashMap<>();
final Set<Document> vetoed = new HashSet<>();
while (true) {
int count = 0;
for (Document document : myUnsavedDocuments) {
if (failedToSave.containsKey(document)) continue;
if (vetoed.contains(document)) continue;
try {
doSaveDocument(document, isExplicit);
}
catch (IOException e) {
//noinspection ThrowableResultOfMethodCallIgnored
failedToSave.put(document, e);
}
catch (SaveVetoException e) {
vetoed.add(document);
}
count++;
}
if (count == 0) break;
}
if (!failedToSave.isEmpty()) {
handleErrorsOnSave(failedToSave);
}
}
@Override
public void saveDocument(@NotNull final Document document) {
saveDocument(document, true);
}
public void saveDocument(@NotNull final Document document, final boolean explicit) {
ApplicationManager.getApplication().assertIsDispatchThread();
((TransactionGuardImpl)TransactionGuard.getInstance()).assertWriteActionAllowed();
if (!myUnsavedDocuments.contains(document)) return;
try {
doSaveDocument(document, explicit);
}
catch (IOException e) {
handleErrorsOnSave(Collections.singletonMap(document, e));
}
catch (SaveVetoException ignored) {
}
}
@Override
public void saveDocumentAsIs(@NotNull Document document) {
VirtualFile file = getFile(document);
boolean spaceStrippingEnabled = true;
if (file != null) {
spaceStrippingEnabled = TrailingSpacesStripper.isEnabled(file);
TrailingSpacesStripper.setEnabled(file, false);
}
try {
saveDocument(document);
}
finally {
if (file != null) {
TrailingSpacesStripper.setEnabled(file, spaceStrippingEnabled);
}
}
}
private static class SaveVetoException extends Exception {}
private void doSaveDocument(@NotNull final Document document, boolean isExplicit) throws IOException, SaveVetoException {
VirtualFile file = getFile(document);
if (file == null || file instanceof LightVirtualFile || file.isValid() && !isFileModified(file)) {
removeFromUnsaved(document);
return;
}
if (file.isValid() && needsRefresh(file)) {
file.refresh(false, false);
if (!myUnsavedDocuments.contains(document)) return;
}
if (!maySaveDocument(file, document, isExplicit)) {
throw new SaveVetoException();
}
WriteAction.run(() -> doSaveDocumentInWriteAction(document, file));
}
private boolean maySaveDocument(VirtualFile file, Document document, boolean isExplicit) {
return !myConflictResolver.hasConflict(file) &&
Arrays.stream(Extensions.getExtensions(FileDocumentSynchronizationVetoer.EP_NAME)).allMatch(vetoer -> vetoer.maySaveDocument(document, isExplicit));
}
private void doSaveDocumentInWriteAction(@NotNull final Document document, @NotNull final VirtualFile file) throws IOException {
if (!file.isValid()) {
removeFromUnsaved(document);
return;
}
if (!file.equals(getFile(document))) {
registerDocument(document, file);
}
if (!isSaveNeeded(document, file)) {
if (document instanceof DocumentEx) {
((DocumentEx)document).setModificationStamp(file.getModificationStamp());
}
removeFromUnsaved(document);
updateModifiedProperty(file);
return;
}
PomModelImpl.guardPsiModificationsIn(() -> {
myMultiCaster.beforeDocumentSaving(document);
LOG.assertTrue(file.isValid());
String text = document.getText();
String lineSeparator = getLineSeparator(document, file);
if (!lineSeparator.equals("\n")) {
text = StringUtil.convertLineSeparators(text, lineSeparator);
}
Project project = ProjectLocator.getInstance().guessProjectForFile(file);
LoadTextUtil.write(project, file, this, text, document.getModificationStamp());
myUnsavedDocuments.remove(document);
LOG.assertTrue(!myUnsavedDocuments.contains(document));
myTrailingSpacesStripper.clearLineModificationFlags(document);
});
}
private static void updateModifiedProperty(@NotNull VirtualFile file) {
for (Project project : ProjectManager.getInstance().getOpenProjects()) {
FileEditorManager fileEditorManager = FileEditorManager.getInstance(project);
for (FileEditor editor : fileEditorManager.getAllEditors(file)) {
if (editor instanceof TextEditorImpl) {
((TextEditorImpl)editor).updateModifiedProperty();
}
}
}
}
private void removeFromUnsaved(@NotNull Document document) {
myUnsavedDocuments.remove(document);
fireUnsavedDocumentsDropped();
LOG.assertTrue(!myUnsavedDocuments.contains(document));
}
private static boolean isSaveNeeded(@NotNull Document document, @NotNull VirtualFile file) throws IOException {
if (file.getFileType().isBinary() || document.getTextLength() > 1000 * 1000) { // don't compare if the file is too big
return true;
}
byte[] bytes = file.contentsToByteArray();
CharSequence loaded = LoadTextUtil.getTextByBinaryPresentation(bytes, file, false, false);
return !Comparing.equal(document.getCharsSequence(), loaded);
}
private static boolean needsRefresh(final VirtualFile file) {
final VirtualFileSystem fs = file.getFileSystem();
return fs instanceof NewVirtualFileSystem && file.getTimeStamp() != ((NewVirtualFileSystem)fs).getTimeStamp(file);
}
@NotNull
public static String getLineSeparator(@NotNull Document document, @NotNull VirtualFile file) {
String lineSeparator = LoadTextUtil.getDetectedLineSeparator(file);
if (lineSeparator == null) {
lineSeparator = document.getUserData(LINE_SEPARATOR_KEY);
assert lineSeparator != null : document;
}
return lineSeparator;
}
@Override
@NotNull
public String getLineSeparator(@Nullable VirtualFile file, @Nullable Project project) {
String lineSeparator = file == null ? null : LoadTextUtil.getDetectedLineSeparator(file);
if (lineSeparator == null) {
CodeStyleFacade settingsManager = project == null
? CodeStyleFacade.getInstance()
: CodeStyleFacade.getInstance(project);
lineSeparator = settingsManager.getLineSeparator();
}
return lineSeparator;
}
@Override
public boolean requestWriting(@NotNull Document document, Project project) {
final VirtualFile file = getInstance().getFile(document);
if (project != null && file != null && file.isValid()) {
return !file.getFileType().isBinary() && ReadonlyStatusHandler.ensureFilesWritable(project, file);
}
if (document.isWritable()) {
return true;
}
document.fireReadOnlyModificationAttempt();
return false;
}
@Override
public void reloadFiles(@NotNull final VirtualFile... files) {
for (VirtualFile file : files) {
if (file.exists()) {
final Document doc = getCachedDocument(file);
if (doc != null) {
reloadFromDisk(doc);
}
}
}
}
@Override
@NotNull
public Document[] getUnsavedDocuments() {
if (myUnsavedDocuments.isEmpty()) {
return Document.EMPTY_ARRAY;
}
List<Document> list = new ArrayList<>(myUnsavedDocuments);
return list.toArray(new Document[list.size()]);
}
@Override
public boolean isDocumentUnsaved(@NotNull Document document) {
return myUnsavedDocuments.contains(document);
}
@Override
public boolean isFileModified(@NotNull VirtualFile file) {
final Document doc = getCachedDocument(file);
return doc != null && isDocumentUnsaved(doc) && doc.getModificationStamp() != file.getModificationStamp();
}
@Override
public void propertyChanged(@NotNull VirtualFilePropertyEvent event) {
final VirtualFile file = event.getFile();
if (VirtualFile.PROP_WRITABLE.equals(event.getPropertyName())) {
final Document document = getCachedDocument(file);
if (document != null) {
ApplicationManager.getApplication().runWriteAction((ExternalChangeAction)() -> document.setReadOnly(!file.isWritable()));
}
}
else if (VirtualFile.PROP_NAME.equals(event.getPropertyName())) {
Document document = getCachedDocument(file);
if (document != null) {
// a file is linked to a document - chances are it is an "unknown text file" now
if (isBinaryWithoutDecompiler(file)) {
unbindFileFromDocument(file, document);
}
}
}
}
private void unbindFileFromDocument(@NotNull VirtualFile file, @NotNull Document document) {
removeDocumentFromCache(file);
file.putUserData(HARD_REF_TO_DOCUMENT_KEY, null);
document.putUserData(FILE_KEY, null);
}
private static boolean isBinaryWithDecompiler(@NotNull VirtualFile file) {
final FileType ft = file.getFileType();
return ft.isBinary() && BinaryFileTypeDecompilers.INSTANCE.forFileType(ft) != null;
}
private static boolean isBinaryWithoutDecompiler(@NotNull VirtualFile file) {
final FileType fileType = file.getFileType();
return fileType.isBinary() && BinaryFileTypeDecompilers.INSTANCE.forFileType(fileType) == null;
}
@Override
public void contentsChanged(@NotNull VirtualFileEvent event) {
if (event.isFromSave()) return;
final VirtualFile file = event.getFile();
final Document document = getCachedDocument(file);
if (document == null) {
myMultiCaster.fileWithNoDocumentChanged(file);
return;
}
if (isBinaryWithDecompiler(file)) {
myMultiCaster.fileWithNoDocumentChanged(file); // This will generate PSI event at FileManagerImpl
}
if (document.getModificationStamp() == event.getOldModificationStamp() || !isDocumentUnsaved(document)) {
reloadFromDisk(document);
}
}
@Override
public void reloadFromDisk(@NotNull final Document document) {
ApplicationManager.getApplication().assertIsDispatchThread();
final VirtualFile file = getFile(document);
assert file != null;
if (!fireBeforeFileContentReload(file, document)) {
return;
}
final Project project = ProjectLocator.getInstance().guessProjectForFile(file);
boolean[] isReloadable = {isReloadable(file, document, project)};
if (isReloadable[0]) {
CommandProcessor.getInstance().executeCommand(project, () -> ApplicationManager.getApplication().runWriteAction(
new ExternalChangeAction.ExternalDocumentChange(document, project) {
@Override
public void run() {
if (!isBinaryWithoutDecompiler(file)) {
LoadTextUtil.setCharsetWasDetectedFromBytes(file, null);
file.setBOM(null); // reset BOM in case we had one and the external change stripped it away
file.setCharset(null, null, false);
boolean wasWritable = document.isWritable();
document.setReadOnly(false);
boolean tooLarge = FileUtilRt.isTooLarge(file.getLength());
CharSequence reloaded = tooLarge ? LoadTextUtil.loadText(file, getPreviewCharCount(file)) : LoadTextUtil.loadText(file);
isReloadable[0] = isReloadable(file, document, project);
if (isReloadable[0]) {
DocumentEx documentEx = (DocumentEx)document;
documentEx.replaceText(reloaded, file.getModificationStamp());
}
document.setReadOnly(!wasWritable);
}
}
}
), UIBundle.message("file.cache.conflict.action"), null, UndoConfirmationPolicy.REQUEST_CONFIRMATION);
}
if (isReloadable[0]) {
myMultiCaster.fileContentReloaded(file, document);
}
else {
unbindFileFromDocument(file, document);
myMultiCaster.fileWithNoDocumentChanged(file);
}
myUnsavedDocuments.remove(document);
}
private static boolean isReloadable(@NotNull VirtualFile file, @NotNull Document document, @Nullable Project project) {
PsiFile cachedPsiFile = project == null ? null : PsiDocumentManager.getInstance(project).getCachedPsiFile(document);
return !(FileUtilRt.isTooLarge(file.getLength()) && file.getFileType().isBinary()) &&
(cachedPsiFile == null || cachedPsiFile instanceof PsiFileImpl || isBinaryWithDecompiler(file));
}
@TestOnly
void setAskReloadFromDisk(@NotNull Disposable disposable, @NotNull MemoryDiskConflictResolver newProcessor) {
final MemoryDiskConflictResolver old = myConflictResolver;
myConflictResolver = newProcessor;
Disposer.register(disposable, () -> myConflictResolver = old);
}
@Override
public void fileDeleted(@NotNull VirtualFileEvent event) {
Document doc = getCachedDocument(event.getFile());
if (doc != null) {
myTrailingSpacesStripper.documentDeleted(doc);
}
}
@Override
public void beforeContentsChange(@NotNull VirtualFileEvent event) {
VirtualFile virtualFile = event.getFile();
// check file type in second order to avoid content detection running
if (virtualFile.getLength() == 0 && virtualFile.getFileType() == UnknownFileType.INSTANCE) {
virtualFile.putUserData(MUST_RECOMPUTE_FILE_TYPE, Boolean.TRUE);
}
myConflictResolver.beforeContentChange(event);
}
public static boolean recomputeFileTypeIfNecessary(@NotNull VirtualFile virtualFile) {
if (virtualFile.getUserData(MUST_RECOMPUTE_FILE_TYPE) != null) {
virtualFile.getFileType();
virtualFile.putUserData(MUST_RECOMPUTE_FILE_TYPE, null);
return true;
}
return false;
}
@Override<|fim▁hole|> public boolean canClose(@NotNull Project project) {
if (!myUnsavedDocuments.isEmpty()) {
myOnClose = true;
try {
saveAllDocuments();
}
finally {
myOnClose = false;
}
}
return myUnsavedDocuments.isEmpty();
}
private void fireUnsavedDocumentsDropped() {
myMultiCaster.unsavedDocumentsDropped();
}
private boolean fireBeforeFileContentReload(final VirtualFile file, @NotNull Document document) {
for (FileDocumentSynchronizationVetoer vetoer : Extensions.getExtensions(FileDocumentSynchronizationVetoer.EP_NAME)) {
try {
if (!vetoer.mayReloadFileContent(file, document)) {
return false;
}
}
catch (Exception e) {
LOG.error(e);
}
}
myMultiCaster.beforeFileContentReload(file, document);
return true;
}
@NotNull
private static FileDocumentManagerListener[] getListeners() {
return FileDocumentManagerListener.EP_NAME.getExtensions();
}
private static int getPreviewCharCount(@NotNull VirtualFile file) {
Charset charset = EncodingManager.getInstance().getEncoding(file, false);
float bytesPerChar = charset == null ? 2 : charset.newEncoder().averageBytesPerChar();
return (int)(FileUtilRt.LARGE_FILE_PREVIEW_SIZE / bytesPerChar);
}
private void handleErrorsOnSave(@NotNull Map<Document, IOException> failures) {
if (ApplicationManager.getApplication().isUnitTestMode()) {
IOException ioException = ContainerUtil.getFirstItem(failures.values());
if (ioException != null) {
throw new RuntimeException(ioException);
}
return;
}
for (IOException exception : failures.values()) {
LOG.warn(exception);
}
final String text = StringUtil.join(failures.values(), Throwable::getMessage, "\n");
final DialogWrapper dialog = new DialogWrapper(null) {
{
init();
setTitle(UIBundle.message("cannot.save.files.dialog.title"));
}
@Override
protected void createDefaultActions() {
super.createDefaultActions();
myOKAction.putValue(Action.NAME, UIBundle
.message(myOnClose ? "cannot.save.files.dialog.ignore.changes" : "cannot.save.files.dialog.revert.changes"));
myOKAction.putValue(DEFAULT_ACTION, null);
if (!myOnClose) {
myCancelAction.putValue(Action.NAME, CommonBundle.getCloseButtonText());
}
}
@Override
protected JComponent createCenterPanel() {
final JPanel panel = new JPanel(new BorderLayout(0, 5));
panel.add(new JLabel(UIBundle.message("cannot.save.files.dialog.message")), BorderLayout.NORTH);
final JTextPane area = new JTextPane();
area.setText(text);
area.setEditable(false);
area.setMinimumSize(new Dimension(area.getMinimumSize().width, 50));
panel.add(new JBScrollPane(area, ScrollPaneConstants.VERTICAL_SCROLLBAR_ALWAYS, ScrollPaneConstants.HORIZONTAL_SCROLLBAR_NEVER),
BorderLayout.CENTER);
return panel;
}
};
if (dialog.showAndGet()) {
for (Document document : failures.keySet()) {
reloadFromDisk(document);
}
}
}
private final Map<VirtualFile, Document> myDocumentCache = ContainerUtil.createConcurrentWeakValueMap();
// used in Upsource
protected void cacheDocument(@NotNull VirtualFile file, @NotNull Document document) {
myDocumentCache.put(file, document);
}
// used in Upsource
protected void removeDocumentFromCache(@NotNull VirtualFile file) {
myDocumentCache.remove(file);
}
// used in Upsource
protected Document getDocumentFromCache(@NotNull VirtualFile file) {
return myDocumentCache.get(file);
}
}<|fim▁end|> | |
<|file_name|>lockhandler.py<|end_file_name|><|fim▁begin|>"""
A *lock* defines access to a particular subsystem or property of
Evennia. For example, the "owner" property can be impmemented as a
lock. Or the disability to lift an object or to ban users.
A lock consists of three parts:
- access_type - this defines what kind of access this lock regulates. This
just a string.
- function call - this is one or many calls to functions that will determine
if the lock is passed or not.
- lock function(s). These are regular python functions with a special
set of allowed arguments. They should always return a boolean depending
on if they allow access or not.
A lock function is defined by existing in one of the modules
listed by settings.LOCK_FUNC_MODULES. It should also always
take four arguments looking like this:
funcname(accessing_obj, accessed_obj, *args, **kwargs):
[...]
The accessing object is the object wanting to gain access.
The accessed object is the object this lock resides on
args and kwargs will hold optional arguments and/or keyword arguments
to the function as a list and a dictionary respectively.
Example:
perm(accessing_obj, accessed_obj, *args, **kwargs):
"Checking if the object has a particular, desired permission"
if args:
desired_perm = args[0]
return desired_perm in accessing_obj.permissions.all()
return False
Lock functions should most often be pretty general and ideally possible to
re-use and combine in various ways to build clever locks.
Lock definition ("Lock string")
A lock definition is a string with a special syntax. It is added to
each object's lockhandler, making that lock available from then on.
The lock definition looks like this:
'access_type:[NOT] func1(args)[ AND|OR][NOT] func2() ...'
That is, the access_type, a colon followed by calls to lock functions
combined with AND or OR. NOT negates the result of the following call.
Example:
We want to limit who may edit a particular object (let's call this access_type
for 'edit', it depends on what the command is looking for). We want this to
only work for those with the Permission 'Builders'. So we use our lock
function above and define it like this:
'edit:perm(Builders)'
Here, the lock-function perm() will be called with the string
'Builders' (accessing_obj and accessed_obj are added automatically,
you only need to add the args/kwargs, if any).
If we wanted to make sure the accessing object was BOTH a Builders and a
GoodGuy, we could use AND:
'edit:perm(Builders) AND perm(GoodGuy)'
To allow EITHER Builders and GoodGuys, we replace AND with OR. perm() is just
one example, the lock function can do anything and compare any properties of
the calling object to decide if the lock is passed or not.
'lift:attrib(very_strong) AND NOT attrib(bad_back)'
To make these work, add the string to the lockhandler of the object you want
to apply the lock to:
obj.lockhandler.add('edit:perm(Builders)')
From then on, a command that wants to check for 'edit' access on this
object would do something like this:
if not target_obj.lockhandler.has_perm(caller, 'edit'):
caller.msg("Sorry, you cannot edit that.")
All objects also has a shortcut called 'access' that is recommended to
use instead:
if not target_obj.access(caller, 'edit'):
caller.msg("Sorry, you cannot edit that.")
Permissions
Permissions are just text strings stored in a comma-separated list on
typeclassed objects. The default perm() lock function uses them,
taking into account settings.PERMISSION_HIERARCHY. Also, the
restricted @perm command sets them, but otherwise they are identical
to any other identifier you can use.
"""
from __future__ import print_function
from builtins import object
import re
import inspect
from django.conf import settings
from evennia.utils import logger, utils
from django.utils.translation import ugettext as _
__all__ = ("LockHandler", "LockException")
WARNING_LOG = "lockwarnings.log"
#
# Exception class. This will be raised
# by errors in lock definitions.
#
class LockException(Exception):
"""
Raised during an error in a lock.
"""
pass
#
# Cached lock functions
#
_LOCKFUNCS = {}
def _cache_lockfuncs():
"""
Updates the cache.
"""
global _LOCKFUNCS
_LOCKFUNCS = {}
for modulepath in settings.LOCK_FUNC_MODULES:
_LOCKFUNCS.update(utils.callables_from_module(modulepath))
#
# pre-compiled regular expressions
#
_RE_FUNCS = re.compile(r"\w+\([^)]*\)")
_RE_SEPS = re.compile(r"(?<=[ )])AND(?=\s)|(?<=[ )])OR(?=\s)|(?<=[ )])NOT(?=\s)")
_RE_OK = re.compile(r"%s|and|or|not")
#
#
# Lock handler
#
#
class LockHandler(object):
"""
This handler should be attached to all objects implementing
permission checks, under the property 'lockhandler'.
"""
def __init__(self, obj):
"""
Loads and pre-caches all relevant locks and their functions.
Args:
obj (object): The object on which the lockhandler is
defined.
"""
if not _LOCKFUNCS:
_cache_lockfuncs()
self.obj = obj
self.locks = {}
self.reset()
def __str__(self):
return ";".join(self.locks[key][2] for key in sorted(self.locks))
def _log_error(self, message):
"Try to log errors back to object"
raise LockException(message)
def _parse_lockstring(self, storage_lockstring):
"""
Helper function. This is normally only called when the
lockstring is cached and does preliminary checking. locks are
stored as a string
atype:[NOT] lock()[[ AND|OR [NOT] lock()[...]];atype...
Args:
storage_locksring (str): The lockstring to parse.
"""
locks = {}
if not storage_lockstring:
return locks
duplicates = 0
elist = [] # errors
wlist = [] # warnings
for raw_lockstring in storage_lockstring.split(';'):
if not raw_lockstring:
continue
lock_funcs = []
try:
access_type, rhs = (part.strip() for part in raw_lockstring.split(':', 1))
except ValueError:
logger.log_trace()
return locks
# parse the lock functions and separators
funclist = _RE_FUNCS.findall(rhs)
evalstring = rhs
for pattern in ('AND', 'OR', 'NOT'):
evalstring = re.sub(r"\b%s\b" % pattern, pattern.lower(), evalstring)
nfuncs = len(funclist)
for funcstring in funclist:
funcname, rest = (part.strip().strip(')') for part in funcstring.split('(', 1))
func = _LOCKFUNCS.get(funcname, None)
if not callable(func):
elist.append(_("Lock: lock-function '%s' is not available.") % funcstring)
continue
args = list(arg.strip() for arg in rest.split(',') if arg and not '=' in arg)
kwargs = dict([arg.split('=', 1) for arg in rest.split(',') if arg and '=' in arg])
lock_funcs.append((func, args, kwargs))
evalstring = evalstring.replace(funcstring, '%s')
if len(lock_funcs) < nfuncs:
continue
try:
# purge the eval string of any superfluous items, then test it
evalstring = " ".join(_RE_OK.findall(evalstring))
eval(evalstring % tuple(True for func in funclist), {}, {})
except Exception:
elist.append(_("Lock: definition '%s' has syntax errors.") % raw_lockstring)
continue
if access_type in locks:
duplicates += 1
wlist.append(_("LockHandler on %(obj)s: access type '%(access_type)s' changed from '%(source)s' to '%(goal)s' " % \
{"obj":self.obj, "access_type":access_type, "source":locks[access_type][2], "goal":raw_lockstring}))
locks[access_type] = (evalstring, tuple(lock_funcs), raw_lockstring)
if wlist:
# a warning text was set, it's not an error, so only report
logger.log_file("\n".join(wlist), WARNING_LOG)
if elist:
# an error text was set, raise exception.
raise LockException("\n".join(elist))
# return the gathered locks in an easily executable form
return locks
def _cache_locks(self, storage_lockstring):
"""
Store data
"""
self.locks = self._parse_lockstring(storage_lockstring)
def _save_locks(self):
"""
Store locks to obj
"""
self.obj.lock_storage = ";".join([tup[2] for tup in self.locks.values()])
def cache_lock_bypass(self, obj):
"""
We cache superuser bypass checks here for efficiency. This
needs to be re-run when a player is assigned to a character.
We need to grant access to superusers. We need to check both
directly on the object (players), through obj.player and using
the get_player() method (this sits on serversessions, in some
rare cases where a check is done before the login process has
yet been fully finalized)
Args:
obj (object): This is checked for the `is_superuser` property.
"""
self.lock_bypass = hasattr(obj, "is_superuser") and obj.is_superuser
def add(self, lockstring):
"""
Add a new lockstring to handler.
Args:
lockstring (str): A string on the form
`"<access_type>:<functions>"`. Multiple access types
should be separated by semicolon (`;`).
Returns:
success (bool): The outcome of the addition, `False` on
error.
"""
# sanity checks
for lockdef in lockstring.split(';'):
if not ':' in lockstring:
self._log_error(_("Lock: '%s' contains no colon (:).") % lockdef)
return False
access_type, rhs = [part.strip() for part in lockdef.split(':', 1)]
if not access_type:
self._log_error(_("Lock: '%s' has no access_type (left-side of colon is empty).") % lockdef)
return False
if rhs.count('(') != rhs.count(')'):
self._log_error(_("Lock: '%s' has mismatched parentheses.") % lockdef)
return False
if not _RE_FUNCS.findall(rhs):
self._log_error(_("Lock: '%s' has no valid lock functions.") % lockdef)
return False
# get the lock string
storage_lockstring = self.obj.lock_storage
if storage_lockstring:
storage_lockstring = storage_lockstring + ";" + lockstring
else:
storage_lockstring = lockstring
# cache the locks will get rid of eventual doublets
self._cache_locks(storage_lockstring)
self._save_locks()
return True
def replace(self, lockstring):
"""
Replaces the lockstring entirely.
Args:
lockstring (str): The new lock definition.
Return:
success (bool): False if an error occurred.
Raises:
LockException: If a critical error occurred.
If so, the old string is recovered.
"""
old_lockstring = str(self)
self.clear()
try:
return self.add(lockstring)
except LockException:
self.add(old_lockstring)
raise
def get(self, access_type=None):
"""
Get the full lockstring or the lockstring of a particular
access type.
Args:
access_type (str, optional):
Returns:
lockstring (str): The matched lockstring, or the full
lockstring if no access_type was given.
"""
if access_type:
return self.locks.get(access_type, ["", "", ""])[2]
return str(self)
def remove(self, access_type):
"""
Remove a particular lock from the handler
Args:
access_type (str): The type of lock to remove.
Returns:
success (bool): If the access_type was not found
in the lock, this returns `False`.
"""
if access_type in self.locks:
del self.locks[access_type]
self._save_locks()
return True
return False
delete = remove # alias for historical reasons
def clear(self):
"""
Remove all locks in the handler.
"""
self.locks = {}
self.lock_storage = ""
self._save_locks()
def reset(self):
"""
Set the reset flag, so the the lock will be re-cached at next
checking. This is usually called by @reload.
"""
self._cache_locks(self.obj.lock_storage)
self.cache_lock_bypass(self.obj)
def check(self, accessing_obj, access_type, default=False, no_superuser_bypass=False):
"""
Checks a lock of the correct type by passing execution off to
the lock function(s).
Args:
accessing_obj (object): The object seeking access.
access_type (str): The type of access wanted.
default (bool, optional): If no suitable lock type is
found, default to this result.
no_superuser_bypass (bool): Don't use this unless you
really, really need to, it makes supersusers susceptible
to the lock check.
Notes:<|fim▁hole|> order along with their args/kwargs. These are now executed in
sequence, creating a list of True/False values. This is put
into the evalstring, which is a string of AND/OR/NOT entries
separated by placeholders where each function result should
go. We just put those results in and evaluate the string to
get a final, combined True/False value for the lockstring.
The important bit with this solution is that the full
lockstring is never blindly evaluated, and thus there (should
be) no way to sneak in malign code in it. Only "safe" lock
functions (as defined by your settings) are executed.
"""
try:
# check if the lock should be bypassed (e.g. superuser status)
if accessing_obj.locks.lock_bypass and not no_superuser_bypass:
return True
except AttributeError:
# happens before session is initiated.
if not no_superuser_bypass and ((hasattr(accessing_obj, 'is_superuser') and accessing_obj.is_superuser)
or (hasattr(accessing_obj, 'player') and hasattr(accessing_obj.player, 'is_superuser') and accessing_obj.player.is_superuser)
or (hasattr(accessing_obj, 'get_player') and (not accessing_obj.get_player() or accessing_obj.get_player().is_superuser))):
return True
# no superuser or bypass -> normal lock operation
if access_type in self.locks:
# we have a lock, test it.
evalstring, func_tup, raw_string = self.locks[access_type]
# execute all lock funcs in the correct order, producing a tuple of True/False results.
true_false = tuple(bool(tup[0](accessing_obj, self.obj, *tup[1], **tup[2])) for tup in func_tup)
# the True/False tuple goes into evalstring, which combines them
# with AND/OR/NOT in order to get the final result.
return eval(evalstring % true_false)
else:
return default
def _eval_access_type(self, accessing_obj, locks, access_type):
"""
Helper method for evaluating the access type using eval().
Args:
accessing_obj (object): Object seeking access.
locks (dict): The pre-parsed representation of all access-types.
access_type (str): An access-type key to evaluate.
"""
evalstring, func_tup, raw_string = locks[access_type]
true_false = tuple(tup[0](accessing_obj, self.obj, *tup[1], **tup[2])
for tup in func_tup)
return eval(evalstring % true_false)
def check_lockstring(self, accessing_obj, lockstring, no_superuser_bypass=False,
default=False, access_type=None):
"""
Do a direct check against a lockstring ('atype:func()..'),
without any intermediary storage on the accessed object.
Args:
accessing_obj (object or None): The object seeking access.
Importantly, this can be left unset if the lock functions
don't access it, no updating or storage of locks are made
against this object in this method.
lockstring (str): Lock string to check, on the form
`"access_type:lock_definition"` where the `access_type`
part can potentially be set to a dummy value to just check
a lock condition.
no_superuser_bypass (bool, optional): Force superusers to heed lock.
default (bool, optional): Fallback result to use if `access_type` is set
but no such `access_type` is found in the given `lockstring`.
access_type (str, bool): If set, only this access_type will be looked up
among the locks defined by `lockstring`.
Return:
access (bool): If check is passed or not.
"""
try:
if accessing_obj.locks.lock_bypass and not no_superuser_bypass:
return True
except AttributeError:
if no_superuser_bypass and ((hasattr(accessing_obj, 'is_superuser') and accessing_obj.is_superuser)
or (hasattr(accessing_obj, 'player') and hasattr(accessing_obj.player, 'is_superuser') and accessing_obj.player.is_superuser)
or (hasattr(accessing_obj, 'get_player') and (not accessing_obj.get_player() or accessing_obj.get_player().is_superuser))):
return True
if not ":" in lockstring:
lockstring = "%s:%s" % ("_dummy", lockstring)
locks = self._parse_lockstring(lockstring)
if access_type:
if not access_type in locks:
return default
else:
return self._eval_access_type(
accessing_obj, locks, access_type)
for access_type in locks:
return self._eval_access_type(accessing_obj, locks, access_type)
def _test():
# testing
class TestObj(object):
pass
import pdb
obj1 = TestObj()
obj2 = TestObj()
#obj1.lock_storage = "owner:dbref(#4);edit:dbref(#5) or perm(Wizards);examine:perm(Builders);delete:perm(Wizards);get:all()"
#obj1.lock_storage = "cmd:all();admin:id(1);listen:all();send:all()"
obj1.lock_storage = "listen:perm(Immortals)"
pdb.set_trace()
obj1.locks = LockHandler(obj1)
obj2.permissions.add("Immortals")
obj2.id = 4
#obj1.locks.add("edit:attr(test)")
print("comparing obj2.permissions (%s) vs obj1.locks (%s)" % (obj2.permissions, obj1.locks))
print(obj1.locks.check(obj2, 'owner'))
print(obj1.locks.check(obj2, 'edit'))
print(obj1.locks.check(obj2, 'examine'))
print(obj1.locks.check(obj2, 'delete'))
print(obj1.locks.check(obj2, 'get'))
print(obj1.locks.check(obj2, 'listen'))<|fim▁end|> | A lock is executed in the follwoing way:
Parsing the lockstring, we (during cache) extract the valid
lock functions and store their function objects in the right |
<|file_name|>db.js<|end_file_name|><|fim▁begin|>import util from 'util'
import mongoose from 'mongoose'
import seedData from '../seedData'
const debug = require('debug')('api:server:db')
mongoose.Promise = require('bluebird')
const host = process.env.MONGO_HOST || 'localhost'
const database = process.env.MONGO_DATABASE || 'admin'
const port = process.env.MONGO_PORT || 27017
const url = `mongodb://${host}:${port}/${database}`<|fim▁hole|> debug(`connecting to ${url}...`)
mongoose.connect(url).then(
() => seedData(),
err => {
debug(`unable to connect to database: ${err}`)
setTimeout(configureMongo, 5000)
}
)
if (process.env.MONGO_DEBUG) {
mongoose.set('debug', (collectionName, method, query, doc) => {
debug(`${collectionName}.${method}`, util.inspect(query, false, 20), doc);
});
}
}
export default configureMongo<|fim▁end|> |
const configureMongo = () => { |
<|file_name|>admin.py<|end_file_name|><|fim▁begin|>from django.contrib import admin
from django.contrib.contenttypes import generic
from models import Attribute, BaseModel
from django.utils.translation import ugettext_lazy as _
class MetaInline(generic.GenericTabularInline):
model = Attribute
extra = 0
class BaseAdmin(admin.ModelAdmin):
"""
def get_readonly_fields(self, request, obj=None):
fs = super(BaseAdmin, self).get_readonly_fields(request, obj)
fs += ('created_by', 'last_updated_by',)
return fs
def get_fieldsets(self, request, obj=None):
fs = super(BaseAdmin, self).get_fieldsets(request, obj)
fs[0][1]['fields'].remove('created_by')
fs[0][1]['fields'].remove('last_updated_by')
fs.extend([(_('Other informations'), {'fields':['created_by','last_updated_by'], 'classes':['collapse']})])<|fim▁hole|> return fs
def changelist_view(self, request, extra_context=None):
if request.user.has_perm('%s.can_view_deleted' % self.model._meta.app_label):
if not "deleted_flag" in self.list_filter:
self.list_filter += ("deleted_flag",)
return super(BaseAdmin, self).changelist_view(request, extra_context)
def queryset(self, request):
return super(BaseAdmin, self).queryset(request).exclude(deleted_flag=True)
"""
def save_model(self, request, obj, form, change):
if not change:
obj.created_by = request.user
obj.last_updated_by = request.user
obj.save()
def save_formset(self, request, form, formset, change):
instances = formset.save(commit=False)
for instance in instances:
if isinstance(instance, BaseModel): #Check if it is the correct type of inline
if not instance.created_by_id:
instance.created_by = request.user
instance.last_updated_by = request.user
instance.save()<|fim▁end|> | |
<|file_name|>fis-conf.js<|end_file_name|><|fim▁begin|>require('fis3-smarty')(fis);
<|fim▁hole|><|fim▁end|> | fis.set('namespace', 'common'); |
<|file_name|>momentum_indicators.rs<|end_file_name|><|fim▁begin|>extern crate tars;
use tars::momentum_indicators::rsi::rsi;
use tars::helpers::round_array;
// Some randomly generated data to test against TA-Lib (see generate_data.py & correct_values.py)
const OPEN: &[f64] = &[1984.03, 1959.83, 2041.42, 2019.04, 1969.53, 2082.75, 2209.52, 2200.9,
2364.04, 2543.32, 2423.95, 2483.28, 2604.88, 2393.81, 2231.27, 2420.82,
2544.0, 2766.67, 2919.62, 2763.25];
const HIGH: &[f64] = &[2174.72, 2129.49, 2158.92, 2050.2, 2042.12, 2151.19, 2220.64, 2352.98,<|fim▁hole|> 2311.16, 2463.15, 2651.8, 2749.42];
const CLOSE: &[f64] = &[1959.83, 2041.42, 2019.04, 1969.53, 2082.75, 2209.52, 2200.9, 2364.04,
2543.32, 2423.95, 2483.28, 2604.88, 2393.81, 2231.27, 2420.82, 2544.0,
2766.67, 2919.62, 2763.25, 2922.14];
#[test]
fn rsi_works() {
let mut result = rsi(CLOSE, 6).unwrap();
let expected = &[79.9771, 86.5336, 90.5949, 73.0035, 75.8056, 80.7258, 56.706, 44.4766,
57.3488, 63.879, 72.8847, 77.5072, 64.1009, 70.3536];
round_array(result.as_mut(), 4);
assert_eq!(result, expected);
}<|fim▁end|> | 2456.25, 2691.53, 2572.81, 2494.14, 2845.93, 2682.66, 2527.13, 2455.68,
2607.54, 2872.17, 3004.26, 3036.05];
const LOW: &[f64] = &[1934.7, 1921.02, 1793.77, 1887.36, 1919.72, 1868.23, 1991.19, 2011.08,
2193.91, 2183.96, 2223.15, 2363.19, 2240.03, 2208.31, 2192.15, 2199.02, |
<|file_name|>APIInstall.js<|end_file_name|><|fim▁begin|>var APIInstall = ServiceCaller.extend( {
key_name: 'Install',
className: 'APIInstall',
getLicense: function() {
return this.argumentsHandler( this.className, 'getLicense', arguments );
},
getRequirements: function() {
return this.argumentsHandler( this.className, 'getRequirements', arguments );
},
getDatabaseTypeArray: function() {
return this.argumentsHandler( this.className, 'getDatabaseTypeArray', arguments );
},
getDatabaseConfig: function() {
return this.argumentsHandler( this.className, 'getDatabaseConfig', arguments );
},
createDatabase: function() {
return this.argumentsHandler( this.className, 'createDatabase', arguments );
},
getDatabaseSchema: function() {
return this.argumentsHandler( this.className, 'getDatabaseSchema', arguments );
},
setDatabaseSchema: function() {
return this.argumentsHandler( this.className, 'setDatabaseSchema', arguments );
},
postUpgrade: function() {
return this.argumentsHandler( this.className, 'postUpgrade', arguments );
},
installDone: function() {
return this.argumentsHandler( this.className, 'installDone', arguments );
},
getSystemSettings: function() {
return this.argumentsHandler( this.className, 'getSystemSettings', arguments );
},
setSystemSettings: function() {<|fim▁hole|>
getCompany: function() {
return this.argumentsHandler( this.className, 'getCompany', arguments );
},
setCompany: function() {
return this.argumentsHandler( this.className, 'setCompany', arguments );
},
getUser: function() {
return this.argumentsHandler( this.className, 'getUser', arguments );
},
setUser: function() {
return this.argumentsHandler( this.className, 'setUser', arguments );
},
getProvinceOptions: function() {
return this.argumentsHandler( this.className, 'getProvinceOptions', arguments );
},
getMaintenanceJobs: function() {
return this.argumentsHandler( this.className, 'getMaintenanceJobs', arguments );
},
testConnection: function() {
return this.argumentsHandler( this.className, 'testConnection', arguments );
}
} );<|fim▁end|> | return this.argumentsHandler( this.className, 'setSystemSettings', arguments );
}, |
<|file_name|>stager.go<|end_file_name|><|fim▁begin|>package stager
import (
"crypto/md5"
"fmt"
"io"
"log"
"os"
"github.com/hatofmonkeys/cloudfocker/utils"
"github.com/cloudfoundry-incubator/linux-circus/buildpackrunner"
"github.com/cloudfoundry-incubator/runtime-schema/models"
)
type BuildpackRunner interface {
Run() error
}
func RunBuildpack(writer io.Writer, runner BuildpackRunner) error {
fmt.Fprintln(writer, "Running Buildpacks...")
return runner.Run()
}
<|fim▁hole|> var err error
dirs := []string{}
if dirs, err = utils.SubDirs(buildpackDir); err != nil {
log.Fatalf(" %s", err)
}
config := models.NewCircusTailorConfig(dirs)
return buildpackrunner.New(&config)
}
func ValidateStagedApp(cloudfockerHome string) error {
if _, err := os.Stat(cloudfockerHome + "/droplet/app"); err != nil {
return fmt.Errorf("Staging failed - have you added a buildpack for this type of application?")
}
if _, err := os.Stat(cloudfockerHome + "/droplet/staging_info.yml"); err != nil {
return fmt.Errorf("Staging failed - no staging info was produced by the matching buildpack!")
}
return nil
}
func prepareMd5BuildpacksDir(src string, dst string) {
os.MkdirAll(src, 0755)
os.MkdirAll(dst, 0755)
var err error
dirs := []string{}
if dirs, err = utils.SubDirs(src); err != nil {
log.Fatalf(" %s", err)
}
for _, dir := range dirs {
if err := os.Symlink(src+"/"+dir, dst+"/"+md5sum(dir)); err != nil {
log.Fatalf(" %s", err)
}
}
}
func md5sum(src string) string {
return fmt.Sprintf("%x", md5.Sum([]byte(src)))
}<|fim▁end|> | func NewBuildpackRunner(buildpackDir string) *buildpackrunner.Runner {
prepareMd5BuildpacksDir(buildpackDir, "/tmp/buildpacks") |
<|file_name|>timeout.go<|end_file_name|><|fim▁begin|>package guard
import (
"time"
"gopkg.in/workanator/go-floc.v2"
"gopkg.in/workanator/go-floc.v2/errors"
)
// TimeoutTrigger triggers when the execution of the job timed out.
type TimeoutTrigger func(ctx floc.Context, ctrl floc.Control, id interface{})
// Timeout protects the job from taking too much time on execution.
// The job is run in it's own goroutine while the current goroutine waits
// until the job finished or time went out or the flow is finished.
func Timeout(when WhenTimeoutFunc, id interface{}, job floc.Job) floc.Job {
return OnTimeout(when, id, job, nil)
}
// OnTimeout protects the job from taking too much time on execution.
// In addition it takes TimeoutTrigger func which called if time is out.
// The job is run in it's own goroutine while the current goroutine waits<|fim▁hole|> return func(ctx floc.Context, ctrl floc.Control) error {
// Create the channel to read the result error
done := make(chan error)
defer close(done)
// Create timer
timer := time.NewTimer(when(ctx, id))
defer timer.Stop()
// Run the job
go func() {
var err error
defer func() { done <- err }()
err = job(ctx, ctrl)
}()
// Wait for one of possible events
select {
case <-ctx.Done():
// The execution finished
case err := <-done:
// The job finished. Return the result immediately.
return err
case <-timer.C:
// The execution is timed out
if timeoutTrigger != nil {
timeoutTrigger(ctx, ctrl, id)
} else {
ctrl.Fail(id, errors.NewErrTimeout(id, time.Now().UTC()))
}
}
// Wait until the job is finished and return the result.
return <-done
}
}<|fim▁end|> | // until the job finished or time went out or the flow is finished.
func OnTimeout(when WhenTimeoutFunc, id interface{}, job floc.Job, timeoutTrigger TimeoutTrigger) floc.Job { |
<|file_name|>main.cpp<|end_file_name|><|fim▁begin|>#include <xpcc/architecture/platform.hpp>
#include <xpcc/debug/logger.hpp>
// ----------------------------------------------------------------------------
// Set the log level
#undef XPCC_LOG_LEVEL
#define XPCC_LOG_LEVEL xpcc::log::INFO
typedef GpioInputC0 Adc1In;
typedef GpioInputC2 Adc2In;
typedef GpioInputB13 Adc3In;
typedef GpioInputB12 Adc4In;<|fim▁hole|>static void
printAdc()
{
const float maxVoltage = 3.3;
float voltage = 0.0;
int adcValue = 0;
adcValue = Adc1::getValue();
XPCC_LOG_INFO << "Adc1: value=" << adcValue;
voltage = adcValue * maxVoltage / 0xfff;
XPCC_LOG_INFO << "; voltage=" << voltage << xpcc::endl;
/*
adcValue = Adc2::getValue();
XPCC_LOG_INFO << "Adc2: value=" << adcValue;
voltage = adcValue * maxVoltage / 0xfff;
XPCC_LOG_INFO << "; voltage=" << voltage << xpcc::endl;
adcValue = Adc3::getValue();
XPCC_LOG_INFO << "Adc3: value=" << adcValue;
voltage = adcValue * maxVoltage / 0xfff;
XPCC_LOG_INFO << "; voltage=" << voltage << xpcc::endl;
adcValue = Adc4::getValue();
XPCC_LOG_INFO << "Adc4: value=" << adcValue;
voltage = adcValue * maxVoltage / 0xfff;
XPCC_LOG_INFO << "; voltage=" << voltage << xpcc::endl;
*/
}
// ----------------------------------------------------------------------------
int
main()
{
Board::initialize();
// initialize Uart2 for XPCC_LOG_INFO
GpioOutputA2::connect(Usart2::Tx);
GpioInputA3::connect(Usart2::Rx, Gpio::InputType::PullUp);
Usart2::initialize<Board::systemClock, 115200>(12);
// initialize Adc
Adc1::initialize(Adc1::ClockMode::Asynchronous, Adc1::Prescaler::Div128,
Adc1::CalibrationMode::SingleEndedInputsMode, true);
Adc1::setFreeRunningMode(true);
Adc1In::connect(Adc1::Channel6);
Adc1::setChannel(Adc1In::Adc1Channel, Adc1::SampleTime::Cycles2);
Adc1::startConversion();
Adc2::initialize(Adc2::ClockMode::Asynchronous, Adc2::Prescaler::Div128,
Adc2::CalibrationMode::SingleEndedInputsMode, true);
Adc2::setFreeRunningMode(true);
Adc2In::connect(Adc2::Channel8);
Adc2::setChannel(Adc2In::Adc2Channel, Adc2::SampleTime::Cycles2);
Adc2::startConversion();
Adc3::initialize(Adc3::ClockMode::Asynchronous, Adc3::Prescaler::Div128,
Adc3::CalibrationMode::SingleEndedInputsMode, true);
Adc3::setFreeRunningMode(true);
Adc3In::connect(Adc3::Channel5);
Adc3::setChannel(Adc3In::Adc3Channel, Adc3::SampleTime::Cycles2);
Adc3::startConversion();
Adc4::initialize(Adc4::ClockMode::Asynchronous, Adc4::Prescaler::Div128,
Adc4::CalibrationMode::SingleEndedInputsMode, true);
Adc4::setFreeRunningMode(true);
Adc4In::connect(Adc4::Channel3);
Adc4::setChannel(Adc4In::Adc4Channel, Adc4::SampleTime::Cycles2);
Adc4::startConversion();
while (1)
{
xpcc::delayMilliseconds(200);
printAdc();
}
return 0;
}<|fim▁end|> |
xpcc::IODeviceWrapper< Usart2, xpcc::IOBuffer::BlockIfFull > loggerDevice;
xpcc::log::Logger xpcc::log::info(loggerDevice);
|
<|file_name|>ship.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 *-*
from pyglet.window import key
from pyglet import clock
from . import util, physicalobject
from . import resources
class Ship(physicalobject.PhysicalObject):
"""A class for the player"""
def __init__(self, thrust_image=None, *args, **kwargs):
super().__init__(*args, **kwargs)
# Set some easy-to-tweak constants
# play values
self.rotate_speed = 170.0
self.bullet_speed = 500.0
self.thrust_acc = 500
self.friction = 0.95
self.bullet_duration = 0.6
self.thrust = False
self.thrust_image = thrust_image
self.normal_image = self.image
self.bullets = set() # FIXME: bullet by OOT
def on_key_press(self, symbol, modifiers):
if symbol == key.SPACE:
self.shoot()
elif symbol == key.LEFT:
self.turn(-1)
elif symbol == key.RIGHT:
self.turn(1)
elif symbol == key.UP:
self.set_thrust(True)
def on_key_release(self, symbol, modifiers):
if symbol in (key.LEFT, key.RIGHT):
self.turn(0)
elif symbol == key.UP:
self.set_thrust(False)
def update(self, dt):
super().update(dt)
if self.thrust and self.thrust_image:
self.image = self.thrust_image
else:
self.image = self.normal_image
# update velocity
if self.thrust:
acc = util.angle_to_vector(self.rotation)
for i in (0,1):
self.vel[i] += acc[i] * self.thrust_acc * dt
# add friction
for i in (0,1):
self.vel[i] *= (1 - self.friction * dt)
for bullet in set(self.bullets):
if bullet.update(dt):
self.bullets.remove(bullet)
return False
def set_thrust(self, on):
self.thrust = on
if on:
resources.thrust_sound.seek(0)
resources.thrust_sound.play()
else:
resources.thrust_sound.pause()
def turn(self, clockwise):
self.rotation_speed = clockwise * self.rotate_speed
def shoot(self):<|fim▁hole|>
bullet_pos = [self.x + self.radius * forward[0], self.y + self.radius * forward[1]]
bullet_vel = [self.vel[0] + self.bullet_speed * forward[0], self.vel[1] + self.bullet_speed * forward[1]]
bullet = physicalobject.PhysicalObject(lifespan=self.bullet_duration, vel=bullet_vel, x=bullet_pos[0], y=bullet_pos[1],
img=resources.shot_image, batch=self.batch, group=self.group, screensize=self.screensize)
self.bullets.add(bullet)
def destroy(self):
# check invulnerability
if self.opacity != 255:
return
explosion = super().destroy()
self.rotation = -90
self.x = self.screensize[0] / 2
self.y = self.screensize[1] / 2
self.vel = [0, 0]
self.set_thrust(False)
self.visible = True
return explosion
def normal_mode(self, dt):
self.opacity = 255
def invulnerable(self, time):
# be invulnerable for a brief time
self.opacity = 128
clock.schedule_once(self.normal_mode, time)<|fim▁end|> | resources.bullet_sound.play()
forward = util.angle_to_vector(self.rotation) |
<|file_name|>config.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright (C) 2015 Luis López <[email protected]>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
# USA.
import unittest
from ldotcommons import config
class TestRecord(unittest.TestCase):
def setUp(self):
pass
<|fim▁hole|> def test_init_with_args(self):
a = config.Record({'foo': 1, 'bar': 'x'})
self.assertEqual(a.get('foo'), 1)
b = config.Record()
b.set('foo', 1)
b.set('bar', 'x')
self.assertEqual(a, b)
def test_setget(self):
s = config.Record()
s.set('foo', 1)
s.set('bar', 'x')
s.set('x.y', [])
self.assertEqual(s.get('foo'), 1)
self.assertEqual(s.get('bar'), 'x')
self.assertEqual(s.get('x.y'), [])
def test_nonexistent_key(self):
s = config.Record()
with self.assertRaises(KeyError):
s.get('foo')
def test_delete(self):
s = config.Record()
s.set('foo', 1)
s.set('foo.bar', 2)
s.delete('foo')
with self.assertRaises(KeyError):
s.get('foo.bar')
with self.assertRaises(KeyError):
s.get('foo')
def test_eq(self):
data = {
'foo': 1,
'x.y': 'z',
'dict': {'a': 'b'}
}
a = config.Record(**data.copy())
b = config.Record(**data.copy())
self.assertEqual(a, b)
def test_sub(self):
x = config.Record({
'foo': 1,
'bar.x': 'x',
'bar.y': 'y',
})
y = config.Record({
'x': 'x',
'y': 'y',
})
self.assertEqual(x.sub('bar'), y)
def test_children(self):
x = config.Record({
'foo': 1,
'bar.x': 'x',
'bar.y': 'y',
})
self.assertEqual(set(x.children('bar')), set(['x', 'y']))
class TestRecordAttr(unittest.TestCase):
def test_getset(self):
x = config.RecordAttr({'foo': 1, 'bar': 'x', 'a.b': 2})
self.assertEqual(x.foo, 1)
self.assertEqual(x.a.b, 2)
if __name__ == '__main__':
unittest.main()<|fim▁end|> | |
<|file_name|>xbox-one-webdriver-server.js<|end_file_name|><|fim▁begin|>#!/usr/bin/env node
/**
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
let baseModule;
try {
// To support easy development when making changes in the source repo, we
// first try looking for a copy of the base module using a relative path. In
// this context, we prefer the local copy over one that may already be
// installed.
baseModule = require('../../generic-webdriver-server');
} catch (error) {
if (!error.message.includes('Cannot find module')) {
throw error;
}
// When this module is running in an installed context, we fall back to
// requiring the installed base module by name.
baseModule = require('generic-webdriver-server');
}
const {GenericSingleSessionWebDriverServer, yargs} = baseModule;
const {
checkPlatformRequirements,
loadOnXboxOne,
takeScreenshot,
addXboxOneArgs,
} = require('./xbox-one-utils');
/** WebDriver server backend for Xbox One */
class XboxOneWebDriverServer extends GenericSingleSessionWebDriverServer {
constructor() {
super();<|fim▁hole|> /** @override */
async navigateToSingleSession(url) {
await loadOnXboxOne(this.flags, this.log, url);
}
/** @override */
async closeSingleSession() {
// Send the device back to the home screen.
await loadOnXboxOne(this.flags, this.log, null);
}
/** @override */
async screenshot(sessionId) {
return await takeScreenshot(
this.flags.hostname, this.flags.username, this.flags.password);
}
}
addXboxOneArgs(yargs);
const server = new XboxOneWebDriverServer();
server.listen();<|fim▁end|> | checkPlatformRequirements();
}
|
<|file_name|>test_smart_step_into_native_function_in_return.py<|end_file_name|><|fim▁begin|>def f(s):
s = s[::-1]
return s.swapcase()
<|fim▁hole|><|fim▁end|> | result = f(f(f(f(f('abcdef'))))) # breakpoint |
<|file_name|>parallel_reader.py<|end_file_name|><|fim▁begin|># coding=utf-8
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Implements a parallel data reader with queues and optional shuffling."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint:disable=g-direct-tensorflow-import
from tensorflow.python.framework import dtypes as tf_dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import data_flow_ops
from tensorflow.python.ops import io_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.platform import gfile
from tensorflow.python.summary import summary
from tensorflow.python.training import input as tf_input
from tensorflow.python.training import queue_runner
# pylint:enable=g-direct-tensorflow-import
class ParallelReader(io_ops.ReaderBase):
"""Reader class that uses multiple readers in parallel to improve speed.
See ReaderBase for supported methods.
"""
def __init__(self,
reader_class,
common_queue,
num_readers=4,
reader_kwargs=None):
"""ParallelReader creates num_readers instances of the reader_class.
Each instance is created by calling the `reader_class` function passing
the arguments specified in `reader_kwargs` as in:
reader_class(**read_kwargs)
When you read from a ParallelReader, with its `read()` method,
you just dequeue examples from the `common_queue`.
The readers will read different files in parallel, asynchronously enqueueing
their output into `common_queue`. The `common_queue.dtypes` must be
[tf.string, tf.string]
Because each reader can read from a different file, the examples in the
`common_queue` could be from different files. Due to the asynchronous
reading there is no guarantee that all the readers will read the same
number of examples.
If the `common_queue` is a shuffling queue, then the examples are shuffled.
Usage:
common_queue = tf.queue.RandomShuffleQueue(
capacity=256,
min_after_dequeue=128,
dtypes=[tf.string, tf.string])
p_reader = ParallelReader(tf.compat.v1.TFRecordReader, common_queue)
common_queue = tf.queue.FIFOQueue(
capacity=256,
dtypes=[tf.string, tf.string])
p_reader = ParallelReader(readers, common_queue, num_readers=2)
Args:
reader_class: one of the io_ops.ReaderBase subclasses ex: TFRecordReader
common_queue: a Queue to hold (key, value pairs) with `dtypes` equal to
[tf.string, tf.string]. Must be one of the data_flow_ops.Queues
instances, ex. `tf.queue.FIFOQueue()`, `tf.queue.RandomShuffleQueue()`,
...
num_readers: a integer, number of instances of reader_class to create.
reader_kwargs: an optional dict of kwargs to create the readers.
Raises:
TypeError: if `common_queue.dtypes` is not [tf.string, tf.string].
"""
if len(common_queue.dtypes) != 2:
raise TypeError('common_queue.dtypes must be [tf.string, tf.string]')
for dtype in common_queue.dtypes:
if not dtype.is_compatible_with(tf_dtypes.string):
raise TypeError('common_queue.dtypes must be [tf.string, tf.string]')
reader_kwargs = reader_kwargs or {}
self._readers = [reader_class(**reader_kwargs) for _ in range(num_readers)]
self._common_queue = common_queue
@property
def num_readers(self):
return len(self._readers)
@property
def common_queue(self):
return self._common_queue
def read(self, queue, name=None):
"""Returns the next record (key, value pair) produced by the reader.
The multiple reader instances are all configured to `read()` from the
filenames listed in `queue` and enqueue their output into the `common_queue`
passed to the constructor, and this method returns the next record dequeued
from that `common_queue`.
Readers dequeue a work unit from `queue` if necessary (e.g. when a
reader needs to start reading from a new file since it has finished with
the previous file).
A queue runner for enqueuing in the `common_queue` is automatically added
to the TF QueueRunners collection.
Args:
queue: A Queue or a mutable string Tensor representing a handle to a
Queue, with string work items.
name: A name for the operation (optional).
Returns:
The next record (i.e. (key, value pair)) from the common_queue.
"""
self._configure_readers_by(queue)
return self._common_queue.dequeue(name=name)
def read_up_to(self, queue, num_records, name=None):
"""Returns up to num_records (key, value pairs) produced by a reader.
Will dequeue a work unit from queue if necessary (e.g., when the
Reader needs to start reading from a new file since it has
finished with the previous file).
It may return less than num_records even before the last batch.
**Note** This operation is not supported by all types of `common_queue`s.
If a `common_queue` does not support `dequeue_up_to()`, then a
`tf.errors.UnimplementedError` is raised.
Args:
queue: A Queue or a mutable string Tensor representing a handle to a
Queue, with string work items.
num_records: Number of records to read.
name: A name for the operation (optional).
Returns:
A tuple of Tensors (keys, values) from common_queue.
keys: A 1-D string Tensor.
values: A 1-D string Tensor.
"""
self._configure_readers_by(queue)
return self._common_queue.dequeue_up_to(num_records, name)
def _configure_readers_by(self, queue):<|fim▁hole|> enqueue_ops = []
for reader in self._readers:
enqueue_ops.append(self._common_queue.enqueue(reader.read(queue)))
queue_runner.add_queue_runner(
queue_runner.QueueRunner(self._common_queue, enqueue_ops))
def num_records_produced(self, name=None):
"""Returns the number of records this reader has produced.
Args:
name: A name for the operation (optional).
Returns:
An int64 Tensor.
"""
num_records = [r.num_records_produced() for r in self._readers]
return math_ops.add_n(num_records, name=name)
def num_work_units_completed(self, name=None):
"""Returns the number of work units this reader has finished processing.
Args:
name: A name for the operation (optional).
Returns:
An int64 Tensor.
"""
num_work_units = [r.num_work_units_completed() for r in self._readers]
return math_ops.add_n(num_work_units, name=name)
def parallel_read(data_sources,
reader_class,
num_epochs=None,
num_readers=4,
reader_kwargs=None,
shuffle=True,
dtypes=None,
capacity=256,
min_after_dequeue=128,
seed=None,
scope=None):
"""Reads multiple records in parallel from data_sources using n readers.
It uses a ParallelReader to read from multiple files in parallel using
multiple readers created using `reader_class` with `reader_kwargs'.
If shuffle is True the common_queue would be a RandomShuffleQueue otherwise
it would be a FIFOQueue.
Usage:
data_sources = ['path_to/train*']
key, value = parallel_read(data_sources, tf.CSVReader, num_readers=4)
Args:
data_sources: a list/tuple of files or the location of the data, i.e.
/path/to/train@128, /path/to/train* or /tmp/.../train*
reader_class: one of the io_ops.ReaderBase subclasses ex: TFRecordReader
num_epochs: The number of times each data source is read. If left as None,
the data will be cycled through indefinitely.
num_readers: a integer, number of Readers to create.
reader_kwargs: an optional dict, of kwargs for the reader.
shuffle: boolean, whether should shuffle the files and the records by using
RandomShuffleQueue as common_queue.
dtypes: A list of types. The length of dtypes must equal the number of
elements in each record. If it is None it will default to [tf.string,
tf.string] for (key, value).
capacity: integer, capacity of the common_queue.
min_after_dequeue: integer, minimum number of records in the common_queue
after dequeue. Needed for a good shuffle.
seed: A seed for RandomShuffleQueue.
scope: Optional name scope for the ops.
Returns:
key, value: a tuple of keys and values from the data_source.
"""
data_files = get_data_files(data_sources)
with ops.name_scope(scope, 'parallel_read'):
filename_queue = tf_input.string_input_producer(
data_files,
num_epochs=num_epochs,
shuffle=shuffle,
seed=seed,
name='filenames')
dtypes = dtypes or [tf_dtypes.string, tf_dtypes.string]
if shuffle:
common_queue = data_flow_ops.RandomShuffleQueue(
capacity=capacity,
min_after_dequeue=min_after_dequeue,
dtypes=dtypes,
seed=seed,
name='common_queue')
else:
common_queue = data_flow_ops.FIFOQueue(
capacity=capacity, dtypes=dtypes, name='common_queue')
summary.scalar(
'fraction_of_%d_full' % capacity,
math_ops.cast(common_queue.size(), tf_dtypes.float32) * (1. / capacity))
return ParallelReader(
reader_class,
common_queue,
num_readers=num_readers,
reader_kwargs=reader_kwargs).read(filename_queue)
def single_pass_read(data_sources, reader_class, reader_kwargs=None,
scope=None):
"""Reads sequentially the data_sources using the reader, doing a single pass.
Args:
data_sources: a list/tuple of files or the location of the data, i.e.
/path/to/train@128, /path/to/train* or /tmp/.../train*
reader_class: one of the io_ops.ReaderBase subclasses ex: TFRecordReader.
reader_kwargs: an optional dict, of kwargs for the reader.
scope: Optional name scope for the ops.
Returns:
key, value: a tuple of keys and values from the data_source.
"""
data_files = get_data_files(data_sources)
with ops.name_scope(scope, 'single_pass_read'):
filename_queue = tf_input.string_input_producer(
data_files, num_epochs=1, shuffle=False, capacity=1, name='filenames')
reader_kwargs = reader_kwargs or {}
return reader_class(**reader_kwargs).read(filename_queue)
def get_data_files(data_sources):
"""Get data_files from data_sources.
Args:
data_sources: a list/tuple of files or the location of the data, i.e.
/path/to/train@128, /path/to/train* or /tmp/.../train*
Returns:
a list of data_files.
Raises:
ValueError: if data files are not found
"""
if isinstance(data_sources, (list, tuple)):
data_files = []
for source in data_sources:
data_files += get_data_files(source)
else:
if '*' in data_sources or '?' in data_sources or '[' in data_sources:
data_files = gfile.Glob(data_sources)
else:
data_files = [data_sources]
if not data_files:
raise ValueError('No data files found in %s' % (data_sources,))
return data_files<|fim▁end|> | |
<|file_name|>SalesByDayOfWeekData.ts<|end_file_name|><|fim▁begin|>/*
* Power BI Visualizations
*
* Copyright (c) Microsoft Corporation
* All rights reserved.
* MIT License
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the ""Software""), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
/// <reference path="../_references.ts"/>
module powerbi.visuals.sampleDataViews {
import DataViewTransform = powerbi.data.DataViewTransform;
export class SalesByDayOfWeekData extends SampleDataViews implements ISampleDataViewsMethods {
public name: string = "SalesByDayOfWeekData";
public displayName: string = "Sales by day of week";
public visuals: string[] = ['comboChart',
'dataDotClusteredColumnComboChart',
'dataDotStackedColumnComboChart',
'lineStackedColumnComboChart',
'lineClusteredColumnComboChart'
];
private sampleData1 = [
[742731.43, 162066.43, 283085.78, 300263.49, 376074.57, 814724.34],
[123455.43, 40566.43, 200457.78, 5000.49, 320000.57, 450000.34]
];
private sampleMin1: number = 30000;
private sampleMax1: number = 1000000;
private sampleData2 = [
[31, 17, 24, 30, 37, 40, 12],
[30, 35, 20, 25, 32, 35, 15]
];
private sampleMin2: number = 10;
private sampleMax2: number = 45;
public getDataViews(): DataView[] {
//first dataView - Sales by day of week
var fieldExpr = powerbi.data.SQExprBuilder.fieldDef({ schema: 's', entity: "table1", column: "day of week" });
var categoryValues = ["Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday", "Sunday"];
var categoryIdentities = categoryValues.map(function (value) {
var expr = powerbi.data.SQExprBuilder.equal(fieldExpr, powerbi.data.SQExprBuilder.text(value));
return powerbi.data.createDataViewScopeIdentity(expr);
});
// Metadata, describes the data columns, and provides the visual with hints
// so it can decide how to best represent the data
var dataViewMetadata: powerbi.DataViewMetadata = {
columns: [
{
displayName: 'Day',
queryName: 'Day',
type: powerbi.ValueType.fromDescriptor({ text: true })
},
{
displayName: 'Previous week sales',
isMeasure: true,
format: "$0,000.00",
queryName: 'sales1',
type: powerbi.ValueType.fromDescriptor({ numeric: true }),
objects: { dataPoint: { fill: { solid: { color: 'purple' } } } },
},
{
displayName: 'This week sales',
isMeasure: true,
format: "$0,000.00",
queryName: 'sales2',
type: powerbi.ValueType.fromDescriptor({ numeric: true })
}
]
};
var columns = [
{<|fim▁hole|> {
source: dataViewMetadata.columns[2],
// Sales Amount for 2015
values: this.sampleData1[1],
}
];
var dataValues: DataViewValueColumns = DataViewTransform.createValueColumns(columns);
var tableDataValues = categoryValues.map(function (dayName, idx) {
return [dayName, columns[0].values[idx], columns[1].values[idx]];
});
//first dataView - Sales by day of week END
//second dataView - Temperature by day of week
var fieldExprTemp = powerbi.data.SQExprBuilder.fieldDef({ schema: 's', entity: "table2", column: "day of week" });
var categoryValuesTemp = ["Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday", "Sunday"];
var categoryIdentitiesTemp = categoryValuesTemp.map(function (value) {
var exprTemp = powerbi.data.SQExprBuilder.equal(fieldExprTemp, powerbi.data.SQExprBuilder.text(value));
return powerbi.data.createDataViewScopeIdentity(exprTemp);
});
// Metadata, describes the data columns, and provides the visual with hints
// so it can decide how to best represent the data
var dataViewMetadataTemp: powerbi.DataViewMetadata = {
columns: [
{
displayName: 'Day',
queryName: 'Day',
type: powerbi.ValueType.fromDescriptor({ text: true })
},
{
displayName: 'Previous week temperature',
isMeasure: true,
queryName: 'temp1',
type: powerbi.ValueType.fromDescriptor({ numeric: true }),
//objects: { dataPoint: { fill: { solid: { color: 'purple' } } } },
},
{
displayName: 'This week temperature',
isMeasure: true,
queryName: 'temp2',
type: powerbi.ValueType.fromDescriptor({ numeric: true })
}
]
};
var columnsTemp = [
{
source: dataViewMetadataTemp.columns[1],
// temperature prev week
values: this.sampleData2[0],
},
{
source: dataViewMetadataTemp.columns[2],
// temperature this week
values: this.sampleData2[1],
}
];
var dataValuesTemp: DataViewValueColumns = DataViewTransform.createValueColumns(columnsTemp);
var tableDataValuesTemp = categoryValuesTemp.map(function (dayName, idx) {
return [dayName, columnsTemp[0].values[idx], columnsTemp[1].values[idx]];
});
//first dataView - Sales by day of week END
return [{
metadata: dataViewMetadata,
categorical: {
categories: [{
source: dataViewMetadata.columns[0],
values: categoryValues,
identity: categoryIdentities,
}],
values: dataValues
},
table: {
rows: tableDataValues,
columns: dataViewMetadata.columns,
}
},
{
metadata: dataViewMetadataTemp,
categorical: {
categories: [{
source: dataViewMetadataTemp.columns[0],
values: categoryValuesTemp,
identity: categoryIdentitiesTemp,
}],
values: dataValuesTemp
},
table: {
rows: tableDataValuesTemp,
columns: dataViewMetadataTemp.columns,
}
}];
}
public randomize(): void {
this.sampleData1 = this.sampleData1.map((item) => {
return item.map(() => this.getRandomValue(this.sampleMin1, this.sampleMax1));
});
this.sampleData2 = this.sampleData2.map((item) => {
return item.map(() => this.getRandomValue(this.sampleMin2, this.sampleMax2));
});
}
}
}<|fim▁end|> | source: dataViewMetadata.columns[1],
// Sales Amount for 2014
values: this.sampleData1[0],
}, |
<|file_name|>app.po.ts<|end_file_name|><|fim▁begin|>import { browser, by, element } from 'protractor';
export class CustomLibraryPage {
navigateTo() {
return browser.get('/');
}
getParagraphText() {
return element(by.css('app-root h1')).getText();
}<|fim▁hole|><|fim▁end|> | } |
<|file_name|>benefits.py<|end_file_name|><|fim▁begin|>"""
This module holds models related to benefits features and configurations
"""
from django import forms
from django.db import models
from django.db.models import UniqueConstraint
from django.urls import reverse
from polymorphic.models import PolymorphicModel
from sponsors.models.assets import ImgAsset, TextAsset, FileAsset, ResponseAsset, Response
from sponsors.models.enums import (
PublisherChoices,
LogoPlacementChoices,
AssetsRelatedTo,
)
########################################
# Benefit features abstract classes
from sponsors.models.managers import BenefitFeatureQuerySet
########################################
# Benefit features abstract classes
class BaseLogoPlacement(models.Model):
publisher = models.CharField(
max_length=30,
choices=[(c.value, c.name.replace("_", " ").title()) for c in PublisherChoices],
verbose_name="Publisher",
help_text="On which site should the logo be displayed?"
)
logo_place = models.CharField(
max_length=30,
choices=[(c.value, c.name.replace("_", " ").title()) for c in LogoPlacementChoices],
verbose_name="Logo Placement",
help_text="Where the logo should be placed?"
)
link_to_sponsors_page = models.BooleanField(
default=False,
help_text="Override URL in placement to the PSF Sponsors Page, rather than the sponsor landing page url.",
)
describe_as_sponsor = models.BooleanField(
default=False,
help_text='Override description with "SPONSOR_NAME is a SPONSOR_LEVEL sponsor of the Python Software Foundation".',
)
class Meta:
abstract = True
class BaseTieredQuantity(models.Model):
package = models.ForeignKey("sponsors.SponsorshipPackage", on_delete=models.CASCADE)
quantity = models.PositiveIntegerField()
class Meta:
abstract = True
class BaseEmailTargetable(models.Model):
class Meta:
abstract = True
class BaseAsset(models.Model):
ASSET_CLASS = None
related_to = models.CharField(
max_length=30,
choices=[(c.value, c.name.replace("_", " ").title()) for c in AssetsRelatedTo],
verbose_name="Related To",
help_text="To which instance (Sponsor or Sponsorship) should this asset relate to."
)
internal_name = models.CharField(
max_length=128,
verbose_name="Internal Name",
help_text="Unique name used internally to control if the sponsor/sponsorship already has the asset",
unique=False,
db_index=True,
)
label = models.CharField(
max_length=256,
help_text="What's the title used to display the input to the sponsor?"
)
help_text = models.CharField(
max_length=256,
help_text="Any helper comment on how the input should be populated",
default="",
blank=True
)
class Meta:
abstract = True
class BaseRequiredAsset(BaseAsset):
due_date = models.DateField(default=None, null=True, blank=True)
class Meta:
abstract = True
class BaseProvidedAsset(BaseAsset):
shared = models.BooleanField(
default = False,
)
def shared_value(self):
return None
class Meta:
abstract = True
class AssetConfigurationMixin:
"""
This class should be used to implement assets configuration.
It's a mixin to updates the benefit feature creation to also
create the related assets models
"""
def create_benefit_feature(self, sponsor_benefit, **kwargs):
if not self.ASSET_CLASS:
raise NotImplementedError(
"Subclasses of AssetConfigurationMixin must define an ASSET_CLASS attribute.")
# Super: BenefitFeatureConfiguration.create_benefit_feature
benefit_feature = super().create_benefit_feature(sponsor_benefit, **kwargs)
content_object = sponsor_benefit.sponsorship
if self.related_to == AssetsRelatedTo.SPONSOR.value:
content_object = sponsor_benefit.sponsorship.sponsor
asset_qs = content_object.assets.filter(internal_name=self.internal_name)
if not asset_qs.exists():
asset = self.ASSET_CLASS(
content_object=content_object, internal_name=self.internal_name,
)
asset.save()
return benefit_feature
class Meta:
abstract = True
class BaseRequiredImgAsset(BaseRequiredAsset):
ASSET_CLASS = ImgAsset
min_width = models.PositiveIntegerField()
max_width = models.PositiveIntegerField()
min_height = models.PositiveIntegerField()
max_height = models.PositiveIntegerField()
class Meta(BaseRequiredAsset.Meta):
abstract = True
class BaseRequiredTextAsset(BaseRequiredAsset):
ASSET_CLASS = TextAsset
label = models.CharField(
max_length=256,
help_text="What's the title used to display the text input to the sponsor?"
)
help_text = models.CharField(
max_length=256,
help_text="Any helper comment on how the input should be populated",
default="",
blank=True
)
max_length = models.IntegerField(
default=None,
help_text="Limit to length of the input, empty means unlimited",
null=True,
blank=True,
)
class Meta(BaseRequiredAsset.Meta):
abstract = True
class BaseRequiredResponseAsset(BaseRequiredAsset):
ASSET_CLASS = ResponseAsset
class Meta(BaseRequiredAsset.Meta):
abstract = True
class BaseProvidedTextAsset(BaseProvidedAsset):
ASSET_CLASS = TextAsset
label = models.CharField(
max_length=256,
help_text="What's the title used to display the text input to the sponsor?"
)
help_text = models.CharField(
max_length=256,
help_text="Any helper comment on how the input should be populated",
default="",
blank=True
)
class Meta(BaseProvidedAsset.Meta):
abstract = True
class BaseProvidedFileAsset(BaseProvidedAsset):
ASSET_CLASS = FileAsset
label = models.CharField(
max_length=256,
help_text="What's the title used to display the file to the sponsor?"
)
help_text = models.CharField(
max_length=256,
help_text="Any helper comment on how the file should be used",
default="",
blank=True
)
shared_file = models.FileField(blank=True, null=True)
def shared_value(self):
return self.shared_file
class Meta(BaseProvidedAsset.Meta):
abstract = True
class AssetMixin:
def __related_asset(self):
"""
This method exists to avoid FK relationships between the GenericAsset
and reuired asset objects. This is to decouple the assets set up from the
real assets value in a way that, if the first gets deleted, the second can
still be re used.
"""
object = self.sponsor_benefit.sponsorship
if self.related_to == AssetsRelatedTo.SPONSOR.value:
object = self.sponsor_benefit.sponsorship.sponsor
return object.assets.get(internal_name=self.internal_name)
@property
def value(self):
asset = self.__related_asset()
return asset.value
@value.setter
def value(self, value):
asset = self.__related_asset()
asset.value = value
asset.save()
@property
def user_edit_url(self):
url = reverse("users:update_sponsorship_assets", args=[self.sponsor_benefit.sponsorship.pk])
return url + f"?required_asset={self.pk}"
@property
def user_view_url(self):
url = reverse("users:view_provided_sponsorship_assets", args=[self.sponsor_benefit.sponsorship.pk])
return url + f"?provided_asset={self.pk}"
class RequiredAssetMixin(AssetMixin):
"""
This class should be used to implement required assets.
It's a mixin to get the information submitted by the user
and which is stored in the related asset class.
"""
pass
class ProvidedAssetMixin(AssetMixin):
"""
This class should be used to implement provided assets.
It's a mixin to get the information submitted by the staff
and which is stored in the related asset class.
"""
@AssetMixin.value.getter
def value(self):
if hasattr(self, 'shared') and self.shared:
return self.shared_value()
return super().value
######################################################
# SponsorshipBenefit features configuration models
class BenefitFeatureConfiguration(PolymorphicModel):
"""
Base class for sponsorship benefits configuration.
"""
benefit = models.ForeignKey("sponsors.SponsorshipBenefit", on_delete=models.CASCADE)
class Meta:
verbose_name = "Benefit Feature Configuration"
verbose_name_plural = "Benefit Feature Configurations"
@property
def benefit_feature_class(self):
"""
Return a subclass of BenefitFeature related to this configuration.
Every configuration subclass must implement this property
"""
raise NotImplementedError
def get_benefit_feature_kwargs(self, **kwargs):
"""
Return kwargs dict to initialize the benefit feature.
If the benefit should not be created, return None instead.
"""
# Get all fields from benefit feature configuration base model
base_fields = set(BenefitFeatureConfiguration._meta.get_fields())
# Get only the fields from the abstract base feature model
benefit_fields = set(self._meta.get_fields()) - base_fields
# Configure the related benefit feature using values from the configuration
for field in benefit_fields:
# Skip the OneToOne rel from the base class to BenefitFeatureConfiguration base class
# since this field only exists in child models
if BenefitFeatureConfiguration is getattr(field, 'related_model', None):
continue
kwargs[field.name] = getattr(self, field.name)
return kwargs
def get_benefit_feature(self, **kwargs):
"""
Returns an instance of a configured type of BenefitFeature
"""
BenefitFeatureClass = self.benefit_feature_class
kwargs = self.get_benefit_feature_kwargs(**kwargs)
if kwargs is None:
return None
return BenefitFeatureClass(**kwargs)
def display_modifier(self, name, **kwargs):
return name
def create_benefit_feature(self, sponsor_benefit, **kwargs):
"""
This methods persists a benefit feature from the configuration
"""
feature = self.get_benefit_feature(sponsor_benefit=sponsor_benefit, **kwargs)
if feature is not None:
feature.save()
return feature
class LogoPlacementConfiguration(BaseLogoPlacement, BenefitFeatureConfiguration):
"""
Configuration to control how sponsor logo should be placed
"""
class Meta(BaseLogoPlacement.Meta, BenefitFeatureConfiguration.Meta):
verbose_name = "Logo Placement Configuration"
verbose_name_plural = "Logo Placement Configurations"
@property
def benefit_feature_class(self):
return LogoPlacement
def __str__(self):
return f"Logo Configuration for {self.get_publisher_display()} at {self.get_logo_place_display()}"
class TieredQuantityConfiguration(BaseTieredQuantity, BenefitFeatureConfiguration):
"""
Configuration for tiered quantities among packages
"""
class Meta(BaseTieredQuantity.Meta, BenefitFeatureConfiguration.Meta):
verbose_name = "Tiered Benefit Configuration"
verbose_name_plural = "Tiered Benefit Configurations"
@property
def benefit_feature_class(self):
return TieredQuantity
def get_benefit_feature_kwargs(self, **kwargs):
if kwargs["sponsor_benefit"].sponsorship.package == self.package:
return super().get_benefit_feature_kwargs(**kwargs)
return None
def __str__(self):
return f"Tiered Quantity Configuration for {self.benefit} and {self.package} ({self.quantity})"
def display_modifier(self, name, **kwargs):
if kwargs.get("package") != self.package:
return name
return f"{name} ({self.quantity})"
class EmailTargetableConfiguration(BaseEmailTargetable, BenefitFeatureConfiguration):
"""
Configuration for email targeatable benefits
"""
class Meta(BaseTieredQuantity.Meta, BenefitFeatureConfiguration.Meta):
verbose_name = "Email Targetable Configuration"
verbose_name_plural = "Email Targetable Configurations"
@property
def benefit_feature_class(self):
return EmailTargetable
def __str__(self):
return f"Email targeatable configuration"
class RequiredImgAssetConfiguration(AssetConfigurationMixin, BaseRequiredImgAsset, BenefitFeatureConfiguration):
class Meta(BaseRequiredImgAsset.Meta, BenefitFeatureConfiguration.Meta):
verbose_name = "Require Image Configuration"
verbose_name_plural = "Require Image Configurations"
constraints = [UniqueConstraint(fields=["internal_name"], name="uniq_img_asset_cfg")]
def __str__(self):
return f"Require image configuration"
@property
def benefit_feature_class(self):<|fim▁hole|> return RequiredImgAsset
class RequiredTextAssetConfiguration(AssetConfigurationMixin, BaseRequiredTextAsset,
BenefitFeatureConfiguration):
class Meta(BaseRequiredTextAsset.Meta, BenefitFeatureConfiguration.Meta):
verbose_name = "Require Text Configuration"
verbose_name_plural = "Require Text Configurations"
constraints = [UniqueConstraint(fields=["internal_name"], name="uniq_text_asset_cfg")]
def __str__(self):
return f"Require text configuration"
@property
def benefit_feature_class(self):
return RequiredTextAsset
class RequiredResponseAssetConfiguration(
AssetConfigurationMixin, BaseRequiredResponseAsset, BenefitFeatureConfiguration
):
class Meta(BaseRequiredResponseAsset.Meta, BenefitFeatureConfiguration.Meta):
verbose_name = "Require Response Configuration"
verbose_name_plural = "Require Response Configurations"
constraints = [
UniqueConstraint(fields=["internal_name"], name="uniq_response_asset_cfg")
]
def __str__(self):
return f"Require response configuration"
@property
def benefit_feature_class(self):
return RequiredResponseAsset
class ProvidedTextAssetConfiguration(
AssetConfigurationMixin, BaseProvidedTextAsset, BenefitFeatureConfiguration
):
class Meta(BaseProvidedTextAsset.Meta, BenefitFeatureConfiguration.Meta):
verbose_name = "Provided Text Configuration"
verbose_name_plural = "Provided Text Configurations"
constraints = [UniqueConstraint(fields=["internal_name"], name="uniq_provided_text_asset_cfg")]
def __str__(self):
return f"Provided text configuration"
@property
def benefit_feature_class(self):
return ProvidedTextAsset
class ProvidedFileAssetConfiguration(AssetConfigurationMixin, BaseProvidedFileAsset,
BenefitFeatureConfiguration):
class Meta(BaseProvidedFileAsset.Meta, BenefitFeatureConfiguration.Meta):
verbose_name = "Provided File Configuration"
verbose_name_plural = "Provided File Configurations"
constraints = [UniqueConstraint(fields=["internal_name"], name="uniq_provided_file_asset_cfg")]
def __str__(self):
return f"Provided File configuration"
@property
def benefit_feature_class(self):
return ProvidedFileAsset
####################################
# SponsorBenefit features models
class BenefitFeature(PolymorphicModel):
"""
Base class for sponsor benefits features.
"""
objects = BenefitFeatureQuerySet.as_manager()
sponsor_benefit = models.ForeignKey("sponsors.SponsorBenefit", on_delete=models.CASCADE)
class Meta:
verbose_name = "Benefit Feature"
verbose_name_plural = "Benefit Features"
def display_modifier(self, name, **kwargs):
return name
class LogoPlacement(BaseLogoPlacement, BenefitFeature):
"""
Logo Placement feature for sponsor benefits
"""
class Meta(BaseLogoPlacement.Meta, BenefitFeature.Meta):
verbose_name = "Logo Placement"
verbose_name_plural = "Logo Placement"
def __str__(self):
return f"Logo for {self.get_publisher_display()} at {self.get_logo_place_display()}"
class TieredQuantity(BaseTieredQuantity, BenefitFeature):
"""
Tiered Quantity feature for sponsor benefits
"""
class Meta(BaseTieredQuantity.Meta, BenefitFeature.Meta):
verbose_name = "Tiered Quantity"
verbose_name_plural = "Tiered Quantities"
def display_modifier(self, name, **kwargs):
return f"{name} ({self.quantity})"
def __str__(self):
return f"{self.quantity} of {self.sponsor_benefit} for {self.package}"
class EmailTargetable(BaseEmailTargetable, BenefitFeature):
"""
For email targeatable benefits
"""
class Meta(BaseTieredQuantity.Meta, BenefitFeature.Meta):
verbose_name = "Email Targetable Benefit"
verbose_name_plural = "Email Targetable Benefits"
def __str__(self):
return f"Email targeatable"
class RequiredImgAsset(RequiredAssetMixin, BaseRequiredImgAsset, BenefitFeature):
class Meta(BaseRequiredImgAsset.Meta, BenefitFeature.Meta):
verbose_name = "Require Image"
verbose_name_plural = "Require Images"
def __str__(self):
return f"Require image"
def as_form_field(self, **kwargs):
help_text = kwargs.pop("help_text", self.help_text)
label = kwargs.pop("label", self.label)
required = kwargs.pop("required", False)
return forms.ImageField(required=required, help_text=help_text, label=label, widget=forms.ClearableFileInput, **kwargs)
class RequiredTextAsset(RequiredAssetMixin, BaseRequiredTextAsset, BenefitFeature):
class Meta(BaseRequiredTextAsset.Meta, BenefitFeature.Meta):
verbose_name = "Require Text"
verbose_name_plural = "Require Texts"
def __str__(self):
return f"Require text"
def as_form_field(self, **kwargs):
help_text = kwargs.pop("help_text", self.help_text)
label = kwargs.pop("label", self.label)
required = kwargs.pop("required", False)
max_length = self.max_length
widget = forms.TextInput
if max_length is None or max_length > 256:
widget = forms.Textarea
return forms.CharField(required=required, help_text=help_text, label=label, widget=widget, **kwargs)
class RequiredResponseAsset(RequiredAssetMixin, BaseRequiredResponseAsset, BenefitFeature):
class Meta(BaseRequiredTextAsset.Meta, BenefitFeature.Meta):
verbose_name = "Require Response"
verbose_name_plural = "Required Responses"
def __str__(self):
return f"Require response"
def as_form_field(self, **kwargs):
help_text = kwargs.pop("help_text", self.help_text)
label = kwargs.pop("label", self.label)
required = kwargs.pop("required", False)
return forms.ChoiceField(required=required, choices=Response.choices(), widget=forms.RadioSelect, help_text=help_text, label=label, **kwargs)
class ProvidedTextAsset(ProvidedAssetMixin, BaseProvidedTextAsset, BenefitFeature):
class Meta(BaseProvidedTextAsset.Meta, BenefitFeature.Meta):
verbose_name = "Provided Text"
verbose_name_plural = "Provided Texts"
def __str__(self):
return f"Provided text {self.internal_name}"
class ProvidedFileAsset(ProvidedAssetMixin, BaseProvidedFileAsset, BenefitFeature):
class Meta(BaseProvidedFileAsset.Meta, BenefitFeature.Meta):
verbose_name = "Provided File"
verbose_name_plural = "Provided Files"
def __str__(self):
return f"Provided file"<|fim▁end|> | |
<|file_name|>email.go<|end_file_name|><|fim▁begin|>package email
import (
"bytes"
"crypto/tls"
"html/template"
"net"
"net/mail"
"net/smtp"
"time"
"github.com/labstack/gommon/random"
)
type (
Email struct {
Auth smtp.Auth
Header map[string]string
Template *template.Template
smtpAddress string
}
Message struct {
ID string `json:"id"`
From string `json:"from"`
To string `json:"to"`
CC string `json:"cc"`
Subject string `json:"subject"`
BodyText string `json:"body_text"`
BodyHTML string `json:"body_html"`
Inlines []*File `json:"inlines"`
Attachments []*File `json:"attachments"`
buffer *bytes.Buffer
boundary string
}
File struct {
Name string
Type string
Content string
}
)
func New(smtpAddress string) *Email {
return &Email{
smtpAddress: smtpAddress,
Header: map[string]string{},
}
}
func (m *Message) writeHeader(key, value string) {
m.buffer.WriteString(key)
m.buffer.WriteString(": ")
m.buffer.WriteString(value)
m.buffer.WriteString("\r\n")
}
func (m *Message) writeBoundary() {
m.buffer.WriteString("--")
m.buffer.WriteString(m.boundary)
m.buffer.WriteString("\r\n")
}
func (m *Message) writeText(content string, contentType string) {
m.writeBoundary()
m.writeHeader("Content-Type", contentType+"; charset=UTF-8")
m.buffer.WriteString("\r\n")
m.buffer.WriteString(content)
m.buffer.WriteString("\r\n")
m.buffer.WriteString("\r\n")
}
func (m *Message) writeFile(f *File, disposition string) {
m.writeBoundary()
m.writeHeader("Content-Type", f.Type+`; name="`+f.Name+`"`)
m.writeHeader("Content-Disposition", disposition+`; filename="`+f.Name+`"`)
m.writeHeader("Content-Transfer-Encoding", "base64")
m.buffer.WriteString("\r\n")
m.buffer.WriteString(f.Content)
m.buffer.WriteString("\r\n")
m.buffer.WriteString("\r\n")
}
func (e *Email) Send(m *Message) (err error) {
// Message header
m.buffer = bytes.NewBuffer(make([]byte, 256))
m.buffer.Reset()
m.boundary = random.String(16)
m.writeHeader("MIME-Version", "1.0")
m.writeHeader("Message-ID", m.ID)
m.writeHeader("Date", time.Now().Format(time.RFC1123Z))
m.writeHeader("From", m.From)
m.writeHeader("To", m.To)
if m.CC != "" {
m.writeHeader("CC", m.CC)
}
if m.Subject != "" {
m.writeHeader("Subject", m.Subject)
}
// Extra
for k, v := range e.Header {
m.writeHeader(k, v)
}
m.writeHeader("Content-Type", "multipart/mixed; boundary="+m.boundary)
m.buffer.WriteString("\r\n")
// Message body
if m.BodyText != "" {
m.writeText(m.BodyText, "text/plain")
} else if m.BodyHTML != "" {
m.writeText(m.BodyHTML, "text/html")
} else {
m.writeBoundary()
}
// Inlines/attachments
for _, f := range m.Inlines {
m.writeFile(f, "inline")
}
for _, f := range m.Attachments {
m.writeFile(f, "attachment")
}
m.buffer.WriteString("--")
m.buffer.WriteString(m.boundary)
m.buffer.WriteString("--")
// Dial
c, err := smtp.Dial(e.smtpAddress)
if err != nil {
return
}
defer c.Close()
// Check if TLS is required
if ok, _ := c.Extension("STARTTLS"); ok {
host, _, _ := net.SplitHostPort(e.smtpAddress)
config := &tls.Config{ServerName: host}
if err = c.StartTLS(config); err != nil {
return err
}
}
// Authenticate
if e.Auth != nil {
if err = c.Auth(e.Auth); err != nil {
return
}
}
// Send message
from, err := mail.ParseAddress(m.From)
if err != nil {
return
}
if err = c.Mail(from.Address); err != nil {
return
}
to, err := mail.ParseAddressList(m.To)
if err != nil {
return
}
for _, a := range to {
if err = c.Rcpt(a.Address); err != nil {
return
}
}
wc, err := c.Data()
if err != nil {
return
}
defer wc.Close()
_, err = m.buffer.WriteTo(wc)
return<|fim▁hole|><|fim▁end|> | } |
<|file_name|>manage_topics.js<|end_file_name|><|fim▁begin|>/*
Copyright (C) 2014 PencilBlue, LLC
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/**
* Interface for managing topics
*/
function ManageTopics() {}
//inheritance
util.inherits(ManageTopics, pb.BaseController);
var SUB_NAV_KEY = 'manage_topics';
ManageTopics.prototype.render = function(cb) {
var self = this;
var dao = new pb.DAO();
dao.query('topic', pb.DAO.ANYWHERE, pb.DAO.PROJECT_ALL).then(function(topics) {
if (util.isError(topics)) {
//TODO handle this
}
//none to manage
if(topics.length === 0) {
self.redirect('/admin/content/topics/new', cb);
return;
}
//currently, mongo cannot do case-insensitive sorts. We do it manually
//until a solution for https://jira.mongodb.org/browse/SERVER-90 is merged.
topics.sort(function(a, b) {
var x = a.name.toLowerCase();
var y = b.name.toLowerCase();
return ((x < y) ? -1 : ((x > y) ? 1 : 0));
});
var angularObjects = pb.js.getAngularObjects(
{
navigation: pb.AdminNavigation.get(self.session, ['content', 'topics'], self.ls),
pills: pb.AdminSubnavService.get(SUB_NAV_KEY, self.ls, SUB_NAV_KEY),
topics: topics
});
self.setPageName(self.ls.get('MANAGE_TOPICS'));
self.ts.registerLocal('angular_objects', new pb.TemplateValue(angularObjects, false));
self.ts.load('admin/content/topics/manage_topics', function(err, data) {
var result = '' + data;
cb({content: result});
});
});
};
ManageTopics.getSubNavItems = function(key, ls, data) {
return [{
name: SUB_NAV_KEY,<|fim▁hole|> title: ls.get('MANAGE_TOPICS'),
icon: 'refresh',
href: '/admin/content/topics'
}, {
name: 'import_topics',
title: '',
icon: 'upload',
href: '/admin/content/topics/import'
}, {
name: 'new_topic',
title: '',
icon: 'plus',
href: '/admin/content/topics/new'
}];
};
//register admin sub-nav
pb.AdminSubnavService.registerFor(SUB_NAV_KEY, ManageTopics.getSubNavItems);
//exports
module.exports = ManageTopics;<|fim▁end|> | |
<|file_name|>gym_test5.py<|end_file_name|><|fim▁begin|><|fim▁hole|>from gym import envs
print(envs.registry.all())<|fim▁end|> | |
<|file_name|>a.py<|end_file_name|><|fim▁begin|><|fim▁hole|> prev_dict = {}
cur_dict = {}
for i in xrange(n):
ri = n-1-i
for j in xrange(m):
if i == 0:
cur_dict[power[ri][j]] = power[ri][j]
else:
new_k = power[ri][j]
for k, v in prev_dict.items():
all_bullet = new_k + k - min(v, bullet[ri][j])
if cur_dict.has_key(all_bullet):
cur_dict[all_bullet] = min(new_k, cur_dict[all_bullet])
else:
cur_dict[all_bullet] = new_k
prev_dict = {}
for c, t in cur_dict.items():
small = True
for c1, t1 in cur_dict.items():
if c1 < c and t1 < t:
small = False
break
if small:
prev_dict[c] = t
# print "%s" % (prev_dict)
cur_dict = {}
smallest = None
for t in prev_dict.keys():
if smallest is None or t < smallest:
smallest = t
print smallest
return smallest
def mtest1():
n = 3
m = 3
power = [[1, 2, 3], [3, 2, 1], [3, 2, 1]]
bullet = [[1, 2, 3], [3, 2, 1], [1, 2, 3]]
run_test(n, m, power, bullet)
def mtest2():
n = 3
m = 2
power = [[1, 8], [6, 1], [4, 6]]
bullet = [[2, 1], [4, 1], [3, 1]]
run_test(n, m, power, bullet)
def mtest3():
n = 3
m = 3
power = [[3, 2, 5], [8, 9, 1], [4, 7, 6]]
bullet = [[1, 1, 1], [1, 1, 1], [1, 1, 1]]
run_test(n, m, power, bullet)
def mtest3():
n = 3
m = 2
power = [[5, 10], [50, 60], [20, 25]]
bullet = [[5, 50], [5, 20], [1, 1]]
run_test(n, m, power, bullet)
def manual_test():
mtest1()
mtest2()
mtest3()
if __name__ == "__main__":
manual_test()<|fim▁end|> | #!/usr/bin/env python
def run_test(n, m, power, bullet): |
<|file_name|>switch.py<|end_file_name|><|fim▁begin|>"""Support for switches through the SmartThings cloud API."""
from __future__ import annotations
from collections.abc import Sequence
from pysmartthings import Capability<|fim▁hole|>from . import SmartThingsEntity
from .const import DATA_BROKERS, DOMAIN
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Add switches for a config entry."""
broker = hass.data[DOMAIN][DATA_BROKERS][config_entry.entry_id]
async_add_entities(
[
SmartThingsSwitch(device)
for device in broker.devices.values()
if broker.any_assigned(device.device_id, "switch")
]
)
def get_capabilities(capabilities: Sequence[str]) -> Sequence[str] | None:
"""Return all capabilities supported if minimum required are present."""
# Must be able to be turned on/off.
if Capability.switch in capabilities:
return [Capability.switch, Capability.energy_meter, Capability.power_meter]
return None
class SmartThingsSwitch(SmartThingsEntity, SwitchEntity):
"""Define a SmartThings switch."""
async def async_turn_off(self, **kwargs) -> None:
"""Turn the switch off."""
await self._device.switch_off(set_status=True)
# State is set optimistically in the command above, therefore update
# the entity state ahead of receiving the confirming push updates
self.async_write_ha_state()
async def async_turn_on(self, **kwargs) -> None:
"""Turn the switch on."""
await self._device.switch_on(set_status=True)
# State is set optimistically in the command above, therefore update
# the entity state ahead of receiving the confirming push updates
self.async_write_ha_state()
@property
def is_on(self) -> bool:
"""Return true if light is on."""
return self._device.status.switch<|fim▁end|> |
from homeassistant.components.switch import SwitchEntity
|
<|file_name|>update.pb.go<|end_file_name|><|fim▁begin|>// Code generated by protoc-gen-go.
// source: update.proto
// DO NOT EDIT!
package fgupdate
import proto "code.google.com/p/goprotobuf/proto"
import json "encoding/json"
import math "math"
// Reference proto, json, and math imports to suppress error if they are not otherwise used.
var _ = proto.Marshal
var _ = &json.SyntaxError{}
var _ = math.Inf
type UPDATE_MESSAGEID int32
const (
UPDATE_MESSAGEID_C2U_QUERY_UPDATE UPDATE_MESSAGEID = 0
UPDATE_MESSAGEID_U2C_REP_UPDATE UPDATE_MESSAGEID = 1
UPDATE_MESSAGEID_U2P_QUERY_VER_ISSUE_LIST UPDATE_MESSAGEID = 2
UPDATE_MESSAGEID_P2U_RSP_VER_ISSUE_LIST UPDATE_MESSAGEID = 3
UPDATE_MESSAGEID_P2U_REQ_DISABLE_VER_ISSUE UPDATE_MESSAGEID = 4
UPDATE_MESSAGEID_U2P_RSP_DISABLE_VER_ISSUE UPDATE_MESSAGEID = 5
UPDATE_MESSAGEID_P2U_REQ_DUMP_ALL_VER_ISSUE UPDATE_MESSAGEID = 6
UPDATE_MESSAGEID_U2P_RSP_DUMP_ALL_VER_ISSUE UPDATE_MESSAGEID = 7
)
var UPDATE_MESSAGEID_name = map[int32]string{
0: "C2U_QUERY_UPDATE",
1: "U2C_REP_UPDATE",
2: "U2P_QUERY_VER_ISSUE_LIST",
3: "P2U_RSP_VER_ISSUE_LIST",
4: "P2U_REQ_DISABLE_VER_ISSUE",
5: "U2P_RSP_DISABLE_VER_ISSUE",
6: "P2U_REQ_DUMP_ALL_VER_ISSUE",
7: "U2P_RSP_DUMP_ALL_VER_ISSUE",
}
var UPDATE_MESSAGEID_value = map[string]int32{
"C2U_QUERY_UPDATE": 0,
"U2C_REP_UPDATE": 1,
"U2P_QUERY_VER_ISSUE_LIST": 2,
"P2U_RSP_VER_ISSUE_LIST": 3,
"P2U_REQ_DISABLE_VER_ISSUE": 4,
"U2P_RSP_DISABLE_VER_ISSUE": 5,
"P2U_REQ_DUMP_ALL_VER_ISSUE": 6,
"U2P_RSP_DUMP_ALL_VER_ISSUE": 7,
}
func (x UPDATE_MESSAGEID) Enum() *UPDATE_MESSAGEID {
p := new(UPDATE_MESSAGEID)
*p = x
return p
}
func (x UPDATE_MESSAGEID) String() string {
return proto.EnumName(UPDATE_MESSAGEID_name, int32(x))
}
func (x UPDATE_MESSAGEID) MarshalJSON() ([]byte, error) {
return json.Marshal(x.String())
}
func (x *UPDATE_MESSAGEID) UnmarshalJSON(data []byte) error {
value, err := proto.UnmarshalJSONEnum(UPDATE_MESSAGEID_value, data, "UPDATE_MESSAGEID")
if err != nil {
return err
}
*x = UPDATE_MESSAGEID(value)
return nil
}
type UpdateType int32
const (
UpdateType_FORCE UpdateType = 0
UpdateType_OPTIONAL UpdateType = 1
UpdateType_KEEP UpdateType = 2
UpdateType_TIPS UpdateType = 3
)
var UpdateType_name = map[int32]string{
0: "FORCE",
1: "OPTIONAL",
2: "KEEP",
3: "TIPS",
}
var UpdateType_value = map[string]int32{
"FORCE": 0,
"OPTIONAL": 1,
"KEEP": 2,
"TIPS": 3,
}
func (x UpdateType) Enum() *UpdateType {
p := new(UpdateType)
*p = x
return p
}
func (x UpdateType) String() string {
return proto.EnumName(UpdateType_name, int32(x))
}
func (x UpdateType) MarshalJSON() ([]byte, error) {
return json.Marshal(x.String())
}
func (x *UpdateType) UnmarshalJSON(data []byte) error {
value, err := proto.UnmarshalJSONEnum(UpdateType_value, data, "UpdateType")
if err != nil {
return err
}
*x = UpdateType(value)
return nil
}
type AppInfoType int32
const (
AppInfo_DIFFER AppInfoType = 0
AppInfo_FULL AppInfoType = 1
)
var AppInfoType_name = map[int32]string{
0: "DIFFER",
1: "FULL",
}
var AppInfoType_value = map[string]int32{
"DIFFER": 0,
"FULL": 1,
}
func (x AppInfoType) Enum() *AppInfoType {
p := new(AppInfoType)
*p = x
return p
}
func (x AppInfoType) String() string {
return proto.EnumName(AppInfoType_name, int32(x))
}
func (x AppInfoType) MarshalJSON() ([]byte, error) {
return json.Marshal(x.String())
}
func (x *AppInfoType) UnmarshalJSON(data []byte) error {
value, err := proto.UnmarshalJSONEnum(AppInfoType_value, data, "AppInfoType")
if err != nil {
return err
}
*x = AppInfoType(value)
return nil
}
type QueryUpdate struct {
MsgId *int32 `protobuf:"varint,1,req,name=msg_id" json:"msg_id,omitempty"`
ProtoVer *int32 `protobuf:"varint,2,req,name=proto_ver" json:"proto_ver,omitempty"`
DevType *string `protobuf:"bytes,3,req,name=dev_type" json:"dev_type,omitempty"`
Mac *string `protobuf:"bytes,4,req,name=mac" json:"mac,omitempty"`<|fim▁hole|> AppResolution *string `protobuf:"bytes,8,req,name=app_resolution" json:"app_resolution,omitempty"`
AppVer *string `protobuf:"bytes,9,req,name=app_ver" json:"app_ver,omitempty"`
ResourceVer *string `protobuf:"bytes,10,req,name=resource_ver" json:"resource_ver,omitempty"`
DistributorId *string `protobuf:"bytes,11,req,name=distributor_id" json:"distributor_id,omitempty"`
IsDiffUpdate *bool `protobuf:"varint,12,req,name=is_diff_update" json:"is_diff_update,omitempty"`
XXX_unrecognized []byte `json:"-"`
}
func (this *QueryUpdate) Reset() { *this = QueryUpdate{} }
func (this *QueryUpdate) String() string { return proto.CompactTextString(this) }
func (*QueryUpdate) ProtoMessage() {}
func (this *QueryUpdate) GetMsgId() int32 {
if this != nil && this.MsgId != nil {
return *this.MsgId
}
return 0
}
func (this *QueryUpdate) GetProtoVer() int32 {
if this != nil && this.ProtoVer != nil {
return *this.ProtoVer
}
return 0
}
func (this *QueryUpdate) GetDevType() string {
if this != nil && this.DevType != nil {
return *this.DevType
}
return ""
}
func (this *QueryUpdate) GetMac() string {
if this != nil && this.Mac != nil {
return *this.Mac
}
return ""
}
func (this *QueryUpdate) GetGameId() string {
if this != nil && this.GameId != nil {
return *this.GameId
}
return ""
}
func (this *QueryUpdate) GetAppOsType() string {
if this != nil && this.AppOsType != nil {
return *this.AppOsType
}
return ""
}
func (this *QueryUpdate) GetAppOsVer() string {
if this != nil && this.AppOsVer != nil {
return *this.AppOsVer
}
return ""
}
func (this *QueryUpdate) GetAppResolution() string {
if this != nil && this.AppResolution != nil {
return *this.AppResolution
}
return ""
}
func (this *QueryUpdate) GetAppVer() string {
if this != nil && this.AppVer != nil {
return *this.AppVer
}
return ""
}
func (this *QueryUpdate) GetResourceVer() string {
if this != nil && this.ResourceVer != nil {
return *this.ResourceVer
}
return ""
}
func (this *QueryUpdate) GetDistributorId() string {
if this != nil && this.DistributorId != nil {
return *this.DistributorId
}
return ""
}
func (this *QueryUpdate) GetIsDiffUpdate() bool {
if this != nil && this.IsDiffUpdate != nil {
return *this.IsDiffUpdate
}
return false
}
type AppInfo struct {
XXX_unrecognized []byte `json:"-"`
}
func (this *AppInfo) Reset() { *this = AppInfo{} }
func (this *AppInfo) String() string { return proto.CompactTextString(this) }
func (*AppInfo) ProtoMessage() {}
type ResponseUpdate struct {
MsgId *int32 `protobuf:"varint,1,req,name=msg_id" json:"msg_id,omitempty"`
AppUpdateType *UpdateType `protobuf:"varint,2,req,name=app_update_type,enum=updatedmo.UpdateType" json:"app_update_type,omitempty"`
ResourceUpdateType *UpdateType `protobuf:"varint,3,req,name=resource_update_type,enum=updatedmo.UpdateType" json:"resource_update_type,omitempty"`
UpdateAppVersion *string `protobuf:"bytes,4,opt,name=update_app_version" json:"update_app_version,omitempty"`
UpdateResourceVersion *string `protobuf:"bytes,5,opt,name=update_resource_version" json:"update_resource_version,omitempty"`
AppSize *uint32 `protobuf:"varint,6,opt,name=app_size" json:"app_size,omitempty"`
AppUrl []string `protobuf:"bytes,7,rep,name=app_url" json:"app_url,omitempty"`
AppType *AppInfoType `protobuf:"varint,8,opt,name=app_type,enum=updatedmo.AppInfoType" json:"app_type,omitempty"`
ResourceSize *uint64 `protobuf:"varint,9,opt,name=resource_size" json:"resource_size,omitempty"`
ResourceUrl []string `protobuf:"bytes,10,rep,name=resource_url" json:"resource_url,omitempty"`
TipsText *string `protobuf:"bytes,11,opt,name=tips_text" json:"tips_text,omitempty"`
TipsUrl *string `protobuf:"bytes,12,opt,name=tips_url" json:"tips_url,omitempty"`
XXX_unrecognized []byte `json:"-"`
}
func (this *ResponseUpdate) Reset() { *this = ResponseUpdate{} }
func (this *ResponseUpdate) String() string { return proto.CompactTextString(this) }
func (*ResponseUpdate) ProtoMessage() {}
func (this *ResponseUpdate) GetMsgId() int32 {
if this != nil && this.MsgId != nil {
return *this.MsgId
}
return 0
}
func (this *ResponseUpdate) GetAppUpdateType() UpdateType {
if this != nil && this.AppUpdateType != nil {
return *this.AppUpdateType
}
return 0
}
func (this *ResponseUpdate) GetResourceUpdateType() UpdateType {
if this != nil && this.ResourceUpdateType != nil {
return *this.ResourceUpdateType
}
return 0
}
func (this *ResponseUpdate) GetUpdateAppVersion() string {
if this != nil && this.UpdateAppVersion != nil {
return *this.UpdateAppVersion
}
return ""
}
func (this *ResponseUpdate) GetUpdateResourceVersion() string {
if this != nil && this.UpdateResourceVersion != nil {
return *this.UpdateResourceVersion
}
return ""
}
func (this *ResponseUpdate) GetAppSize() uint32 {
if this != nil && this.AppSize != nil {
return *this.AppSize
}
return 0
}
func (this *ResponseUpdate) GetAppUrl() []string {
if this != nil {
return this.AppUrl
}
return nil
}
func (this *ResponseUpdate) GetAppType() AppInfoType {
if this != nil && this.AppType != nil {
return *this.AppType
}
return 0
}
func (this *ResponseUpdate) GetResourceSize() uint64 {
if this != nil && this.ResourceSize != nil {
return *this.ResourceSize
}
return 0
}
func (this *ResponseUpdate) GetResourceUrl() []string {
if this != nil {
return this.ResourceUrl
}
return nil
}
func (this *ResponseUpdate) GetTipsText() string {
if this != nil && this.TipsText != nil {
return *this.TipsText
}
return ""
}
func (this *ResponseUpdate) GetTipsUrl() string {
if this != nil && this.TipsUrl != nil {
return *this.TipsUrl
}
return ""
}
type QueryVersionIssueList struct {
MsgId *int32 `protobuf:"varint,1,req,name=msg_id" json:"msg_id,omitempty"`
ProtoVer *int32 `protobuf:"varint,2,req,name=proto_ver" json:"proto_ver,omitempty"`
GameId *string `protobuf:"bytes,3,req,name=game_id" json:"game_id,omitempty"`
DistributorId *string `protobuf:"bytes,4,req,name=distributor_id" json:"distributor_id,omitempty"`
XXX_unrecognized []byte `json:"-"`
}
func (this *QueryVersionIssueList) Reset() { *this = QueryVersionIssueList{} }
func (this *QueryVersionIssueList) String() string { return proto.CompactTextString(this) }
func (*QueryVersionIssueList) ProtoMessage() {}
func (this *QueryVersionIssueList) GetMsgId() int32 {
if this != nil && this.MsgId != nil {
return *this.MsgId
}
return 0
}
func (this *QueryVersionIssueList) GetProtoVer() int32 {
if this != nil && this.ProtoVer != nil {
return *this.ProtoVer
}
return 0
}
func (this *QueryVersionIssueList) GetGameId() string {
if this != nil && this.GameId != nil {
return *this.GameId
}
return ""
}
func (this *QueryVersionIssueList) GetDistributorId() string {
if this != nil && this.DistributorId != nil {
return *this.DistributorId
}
return ""
}
type RsqDumpAllIssueInfo struct {
MsgId *int32 `protobuf:"varint,1,req,name=msg_id" json:"msg_id,omitempty"`
ProtoVer *int32 `protobuf:"varint,2,req,name=proto_ver" json:"proto_ver,omitempty"`
XXX_unrecognized []byte `json:"-"`
}
func (this *RsqDumpAllIssueInfo) Reset() { *this = RsqDumpAllIssueInfo{} }
func (this *RsqDumpAllIssueInfo) String() string { return proto.CompactTextString(this) }
func (*RsqDumpAllIssueInfo) ProtoMessage() {}
func (this *RsqDumpAllIssueInfo) GetMsgId() int32 {
if this != nil && this.MsgId != nil {
return *this.MsgId
}
return 0
}
func (this *RsqDumpAllIssueInfo) GetProtoVer() int32 {
if this != nil && this.ProtoVer != nil {
return *this.ProtoVer
}
return 0
}
type RspDumpAllIssueInfo struct {
MsgId *int32 `protobuf:"varint,1,req,name=msg_id" json:"msg_id,omitempty"`
VerIssueFileInfo *string `protobuf:"bytes,2,req,name=ver_issue_file_info" json:"ver_issue_file_info,omitempty"`
XXX_unrecognized []byte `json:"-"`
}
func (this *RspDumpAllIssueInfo) Reset() { *this = RspDumpAllIssueInfo{} }
func (this *RspDumpAllIssueInfo) String() string { return proto.CompactTextString(this) }
func (*RspDumpAllIssueInfo) ProtoMessage() {}
func (this *RspDumpAllIssueInfo) GetMsgId() int32 {
if this != nil && this.MsgId != nil {
return *this.MsgId
}
return 0
}
func (this *RspDumpAllIssueInfo) GetVerIssueFileInfo() string {
if this != nil && this.VerIssueFileInfo != nil {
return *this.VerIssueFileInfo
}
return ""
}
type ResponseVersionIssueList struct {
MsgId *int32 `protobuf:"varint,1,req,name=msg_id" json:"msg_id,omitempty"`
ProtoVer *int32 `protobuf:"varint,2,req,name=proto_ver" json:"proto_ver,omitempty"`
GameId *string `protobuf:"bytes,3,req,name=game_id" json:"game_id,omitempty"`
DistributorId *string `protobuf:"bytes,4,req,name=distributor_id" json:"distributor_id,omitempty"`
VerInfoList []*RspEnableVerInfo `protobuf:"bytes,5,rep,name=ver_info_list" json:"ver_info_list,omitempty"`
XXX_unrecognized []byte `json:"-"`
}
func (this *ResponseVersionIssueList) Reset() { *this = ResponseVersionIssueList{} }
func (this *ResponseVersionIssueList) String() string { return proto.CompactTextString(this) }
func (*ResponseVersionIssueList) ProtoMessage() {}
func (this *ResponseVersionIssueList) GetMsgId() int32 {
if this != nil && this.MsgId != nil {
return *this.MsgId
}
return 0
}
func (this *ResponseVersionIssueList) GetProtoVer() int32 {
if this != nil && this.ProtoVer != nil {
return *this.ProtoVer
}
return 0
}
func (this *ResponseVersionIssueList) GetGameId() string {
if this != nil && this.GameId != nil {
return *this.GameId
}
return ""
}
func (this *ResponseVersionIssueList) GetDistributorId() string {
if this != nil && this.DistributorId != nil {
return *this.DistributorId
}
return ""
}
func (this *ResponseVersionIssueList) GetVerInfoList() []*RspEnableVerInfo {
if this != nil {
return this.VerInfoList
}
return nil
}
type ReqDisableVerIssue struct {
MsgId *int32 `protobuf:"varint,1,req,name=msg_id" json:"msg_id,omitempty"`
ProtoVer *int32 `protobuf:"varint,2,req,name=proto_ver" json:"proto_ver,omitempty"`
VerInfoIssueId *int32 `protobuf:"varint,3,req,name=ver_info_issue_id" json:"ver_info_issue_id,omitempty"`
XXX_unrecognized []byte `json:"-"`
}
func (this *ReqDisableVerIssue) Reset() { *this = ReqDisableVerIssue{} }
func (this *ReqDisableVerIssue) String() string { return proto.CompactTextString(this) }
func (*ReqDisableVerIssue) ProtoMessage() {}
func (this *ReqDisableVerIssue) GetMsgId() int32 {
if this != nil && this.MsgId != nil {
return *this.MsgId
}
return 0
}
func (this *ReqDisableVerIssue) GetProtoVer() int32 {
if this != nil && this.ProtoVer != nil {
return *this.ProtoVer
}
return 0
}
func (this *ReqDisableVerIssue) GetVerInfoIssueId() int32 {
if this != nil && this.VerInfoIssueId != nil {
return *this.VerInfoIssueId
}
return 0
}
type RspDisableVerIssue struct {
MsgId *int32 `protobuf:"varint,1,req,name=msg_id" json:"msg_id,omitempty"`
ProtoVer *int32 `protobuf:"varint,2,req,name=proto_ver" json:"proto_ver,omitempty"`
VerInfoIssueId *int32 `protobuf:"varint,3,req,name=ver_info_issue_id" json:"ver_info_issue_id,omitempty"`
IsSucc *bool `protobuf:"varint,4,req,name=is_succ" json:"is_succ,omitempty"`
XXX_unrecognized []byte `json:"-"`
}
func (this *RspDisableVerIssue) Reset() { *this = RspDisableVerIssue{} }
func (this *RspDisableVerIssue) String() string { return proto.CompactTextString(this) }
func (*RspDisableVerIssue) ProtoMessage() {}
func (this *RspDisableVerIssue) GetMsgId() int32 {
if this != nil && this.MsgId != nil {
return *this.MsgId
}
return 0
}
func (this *RspDisableVerIssue) GetProtoVer() int32 {
if this != nil && this.ProtoVer != nil {
return *this.ProtoVer
}
return 0
}
func (this *RspDisableVerIssue) GetVerInfoIssueId() int32 {
if this != nil && this.VerInfoIssueId != nil {
return *this.VerInfoIssueId
}
return 0
}
func (this *RspDisableVerIssue) GetIsSucc() bool {
if this != nil && this.IsSucc != nil {
return *this.IsSucc
}
return false
}
type RspEnableVerInfo struct {
DevInfoIssueId *int32 `protobuf:"varint,1,req,name=dev_info_issue_id" json:"dev_info_issue_id,omitempty"`
Priority *int32 `protobuf:"varint,2,req,name=priority" json:"priority,omitempty"`
DevType *string `protobuf:"bytes,3,req,name=dev_type" json:"dev_type,omitempty"`
AppOs *string `protobuf:"bytes,4,req,name=app_os" json:"app_os,omitempty"`
AppOsVer *string `protobuf:"bytes,5,req,name=app_os_ver" json:"app_os_ver,omitempty"`
AppResolution *string `protobuf:"bytes,6,req,name=app_resolution" json:"app_resolution,omitempty"`
AppUpdateType *UpdateType `protobuf:"varint,7,req,name=app_update_type,enum=updatedmo.UpdateType" json:"app_update_type,omitempty"`
AppVerBeforeUpdate *string `protobuf:"bytes,8,req,name=app_ver_before_update" json:"app_ver_before_update,omitempty"`
AppVerAfterUpdate *string `protobuf:"bytes,9,opt,name=app_ver_after_update" json:"app_ver_after_update,omitempty"`
AppUrlList []string `protobuf:"bytes,10,rep,name=app_url_list" json:"app_url_list,omitempty"`
AppIsDiffUpdate *bool `protobuf:"varint,11,req,name=app_is_diff_update" json:"app_is_diff_update,omitempty"`
ResourceUpdateType *UpdateType `protobuf:"varint,12,req,name=resource_update_type,enum=updatedmo.UpdateType" json:"resource_update_type,omitempty"`
ResourceVerBeforeUpdate *string `protobuf:"bytes,13,opt,name=resource_ver_before_update" json:"resource_ver_before_update,omitempty"`
ResourceVerAfterUpdate *string `protobuf:"bytes,14,opt,name=resource_ver_after_update" json:"resource_ver_after_update,omitempty"`
ResourceUrlList []string `protobuf:"bytes,15,rep,name=resource_url_list" json:"resource_url_list,omitempty"`
ResourceIsDiffUpdat *bool `protobuf:"varint,16,opt,name=resource_is_diff_updat" json:"resource_is_diff_updat,omitempty"`
ValidTime *string `protobuf:"bytes,17,req,name=valid_time" json:"valid_time,omitempty"`
TipsText *string `protobuf:"bytes,18,opt,name=tips_text" json:"tips_text,omitempty"`
TipsUrl *string `protobuf:"bytes,19,opt,name=tips_url" json:"tips_url,omitempty"`
XXX_unrecognized []byte `json:"-"`
}
func (this *RspEnableVerInfo) Reset() { *this = RspEnableVerInfo{} }
func (this *RspEnableVerInfo) String() string { return proto.CompactTextString(this) }
func (*RspEnableVerInfo) ProtoMessage() {}
func (this *RspEnableVerInfo) GetDevInfoIssueId() int32 {
if this != nil && this.DevInfoIssueId != nil {
return *this.DevInfoIssueId
}
return 0
}
func (this *RspEnableVerInfo) GetPriority() int32 {
if this != nil && this.Priority != nil {
return *this.Priority
}
return 0
}
func (this *RspEnableVerInfo) GetDevType() string {
if this != nil && this.DevType != nil {
return *this.DevType
}
return ""
}
func (this *RspEnableVerInfo) GetAppOs() string {
if this != nil && this.AppOs != nil {
return *this.AppOs
}
return ""
}
func (this *RspEnableVerInfo) GetAppOsVer() string {
if this != nil && this.AppOsVer != nil {
return *this.AppOsVer
}
return ""
}
func (this *RspEnableVerInfo) GetAppResolution() string {
if this != nil && this.AppResolution != nil {
return *this.AppResolution
}
return ""
}
func (this *RspEnableVerInfo) GetAppUpdateType() UpdateType {
if this != nil && this.AppUpdateType != nil {
return *this.AppUpdateType
}
return 0
}
func (this *RspEnableVerInfo) GetAppVerBeforeUpdate() string {
if this != nil && this.AppVerBeforeUpdate != nil {
return *this.AppVerBeforeUpdate
}
return ""
}
func (this *RspEnableVerInfo) GetAppVerAfterUpdate() string {
if this != nil && this.AppVerAfterUpdate != nil {
return *this.AppVerAfterUpdate
}
return ""
}
func (this *RspEnableVerInfo) GetAppUrlList() []string {
if this != nil {
return this.AppUrlList
}
return nil
}
func (this *RspEnableVerInfo) GetAppIsDiffUpdate() bool {
if this != nil && this.AppIsDiffUpdate != nil {
return *this.AppIsDiffUpdate
}
return false
}
func (this *RspEnableVerInfo) GetResourceUpdateType() UpdateType {
if this != nil && this.ResourceUpdateType != nil {
return *this.ResourceUpdateType
}
return 0
}
func (this *RspEnableVerInfo) GetResourceVerBeforeUpdate() string {
if this != nil && this.ResourceVerBeforeUpdate != nil {
return *this.ResourceVerBeforeUpdate
}
return ""
}
func (this *RspEnableVerInfo) GetResourceVerAfterUpdate() string {
if this != nil && this.ResourceVerAfterUpdate != nil {
return *this.ResourceVerAfterUpdate
}
return ""
}
func (this *RspEnableVerInfo) GetResourceUrlList() []string {
if this != nil {
return this.ResourceUrlList
}
return nil
}
func (this *RspEnableVerInfo) GetResourceIsDiffUpdat() bool {
if this != nil && this.ResourceIsDiffUpdat != nil {
return *this.ResourceIsDiffUpdat
}
return false
}
func (this *RspEnableVerInfo) GetValidTime() string {
if this != nil && this.ValidTime != nil {
return *this.ValidTime
}
return ""
}
func (this *RspEnableVerInfo) GetTipsText() string {
if this != nil && this.TipsText != nil {
return *this.TipsText
}
return ""
}
func (this *RspEnableVerInfo) GetTipsUrl() string {
if this != nil && this.TipsUrl != nil {
return *this.TipsUrl
}
return ""
}
func init() {
proto.RegisterEnum("updatedmo.UPDATE_MESSAGEID", UPDATE_MESSAGEID_name, UPDATE_MESSAGEID_value)
proto.RegisterEnum("updatedmo.UpdateType", UpdateType_name, UpdateType_value)
proto.RegisterEnum("updatedmo.AppInfoType", AppInfoType_name, AppInfoType_value)
}<|fim▁end|> | GameId *string `protobuf:"bytes,5,req,name=game_id" json:"game_id,omitempty"`
AppOsType *string `protobuf:"bytes,6,req,name=app_os_type" json:"app_os_type,omitempty"`
AppOsVer *string `protobuf:"bytes,7,req,name=app_os_ver" json:"app_os_ver,omitempty"` |
<|file_name|>BehaviorTask.cpp<|end_file_name|><|fim▁begin|>#include "core\Singularity.Core.h"
using namespace Singularity::Components;
namespace Singularity
{
IMPLEMENT_OBJECT_TYPE(Singularity, BehaviorTask, Singularity::Threading::Task);
#pragma region Static Variables
BehaviorTask* BehaviorTask::g_pInstance = NULL;
#pragma endregion
#pragma region Constructors and Finalizers
BehaviorTask::BehaviorTask()
: Task("Game Task")
{
this->Set_Frequency(1 / 60.0f);
}
#pragma endregion
#pragma region Methods
void BehaviorTask::RegisterComponent(Component* component)
{
if(component == NULL)
throw SingularityException("\"component\" cannot be null or empty.");
if(component->GetType().Equals(Behavior::Type))
this->m_pBehaviors.insert((Behavior*)component);
}
void BehaviorTask::UnregisterComponent(Component* component)
{
if(component == NULL)
throw SingularityException("\"component\" cannot be null or empty.");
if(component->GetType().Equals(Behavior::Type))
this->m_pBehaviors.erase((Behavior*)component);
}
#pragma endregion
#pragma region Overriden Methods
void BehaviorTask::OnExecute()
{
DynamicSet<Behavior*>::iterator it;
#if _DEBUG
//printf("Behavior Call Frequency = %3.1f\n", this->Get_ActualFrequency());
#endif
Behavior* behavior;
for(it = this->m_pBehaviors.begin(); it != this->m_pBehaviors.end(); ++it)
{
behavior = *it;
for(int i = 0; i < behavior->Update.Count(); ++i)
if(behavior->Update[i]->GetType().Equals(BehaviorDelegate::Type))
((BehaviorDelegate*)behavior->Update[i])->Invoke(behavior, this->Get_ElapsedTime());
}
this->Recycle();
}
#pragma endregion
#pragma region Static Methods
BehaviorTask* BehaviorTask::Instantiate()
{
if(!BehaviorTask::g_pInstance)
BehaviorTask::g_pInstance = new BehaviorTask();
return BehaviorTask::g_pInstance;<|fim▁hole|> }
#pragma endregion
}<|fim▁end|> | |
<|file_name|>Source.cpp<|end_file_name|><|fim▁begin|>#ident "file_id $Id: Source.cpp,v 1.3 2004/01/16 21:45:27 djain Exp $"
// ----------------------------------------------------------------------
// ----------------------------------------------------------------------
//
// File: Source.cpp
// Author: mgrosso Matthew Grosso
// Created: Fri Sep 6 00:26:32 EDT 2002 on circe
// Project:
// Purpose:
//
// Copyright (c) 2002 LookSmart Inc. All Rights Reserved.
//
// $Id: Source.cpp,v 1.3 2004/01/16 21:45:27 djain Exp $
// ----------------------------------------------------------------------
// ----------------------------------------------------------------------
#include "Source.h"
Source::Source()
{
//noop
}
Source::~Source()
{<|fim▁hole|><|fim▁end|> | //noop
} |
<|file_name|>resources.py<|end_file_name|><|fim▁begin|>import game
import pygame
from pygame.locals import *
class Resources:
<<<<<<< HEAD
def cambiar(self,imagen):
sheet = game.load_image(imagen)
rects = [pygame.Rect(112,2,26,40),
pygame.Rect(112,2,26,40),
pygame.Rect(112,2,26,40),
pygame.Rect(4,4,30,38),
pygame.Rect(4,4,30,38),
pygame.Rect(4,4,30,38)]
caminando_der = game.load_sprites(sheet, rects, (0,0,0))
caminando_izq = game.flip_sprites(caminando_der)
rects = [pygame.Rect(76,2,26,40),
pygame.Rect(112,2,24,40)]
quieto_der = game.load_sprites(sheet, rects, (0,0,0))
quieto_izq = game.flip_sprites(quieto_der)
rects = [pygame.Rect(4,4,30,38),
pygame.Rect(38,4,30,36)]
saltando_der = game.load_sprites(sheet, rects, (0,0,0))
saltando_izq = game.flip_sprites(saltando_der)
player = [<|fim▁hole|> [caminando_der,caminando_izq],
[saltando_der, saltando_izq]]
return player
def __init__(self,imagen):
# Carga de imagenes
self.imagen=imagen
sheet = game.load_image(self.imagen)
=======
def __init__(self):
# Carga de imagenes
sheet = game.load_image('graphics/arc22.png')
>>>>>>> origin/master
#rects = [#pygame.Rect(514,8,24,34),
# pygame.Rect(550,8,30,34),
# pygame.Rect(582,8,28,34),
# pygame.Rect(550,8,30,34)]
rects = [pygame.Rect(112,2,26,40),
pygame.Rect(112,2,26,40),
pygame.Rect(112,2,26,40),
pygame.Rect(4,4,30,38),
pygame.Rect(4,4,30,38),
pygame.Rect(4,4,30,38)]
caminando_der = game.load_sprites(sheet, rects, (0,0,0))
caminando_izq = game.flip_sprites(caminando_der)
rects = [pygame.Rect(76,2,26,40),
pygame.Rect(112,2,24,40)]
quieto_der = game.load_sprites(sheet, rects, (0,0,0))
quieto_izq = game.flip_sprites(quieto_der)
<<<<<<< HEAD
=======
>>>>>>> origin/master
rects = [pygame.Rect(4,4,30,38),
pygame.Rect(38,4,30,36)]
saltando_der = game.load_sprites(sheet, rects, (0,0,0))
saltando_izq = game.flip_sprites(saltando_der)
self.player = [
[quieto_der, quieto_izq],
[caminando_der,caminando_izq],
[saltando_der, saltando_izq]]
<<<<<<< HEAD
=======
>>>>>>> origin/master
sheet = game.load_image('graphics/blocks11.png')
suelo = game.load_sprite(sheet, pygame.Rect(444,104,32,32))
subsuelo = game.load_sprite(sheet, pygame.Rect(172,138,32,32))
self.tiles = [suelo, subsuelo]<|fim▁end|> | [quieto_der, quieto_izq], |
<|file_name|>adam_optimizer.d.ts<|end_file_name|><|fim▁begin|>/**
* @license
* Copyright 2018 Google Inc. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================================
*/
import { ConfigDict, Serializable, SerializableConstructor } from '../serialization';
import { NamedTensor, NamedVariableMap } from '../tensor_types';
import { Optimizer } from './optimizer';
export declare class AdamOptimizer extends Optimizer {
protected learningRate: number;
protected beta1: number;
protected beta2: number;
protected epsilon: number;
/** @nocollapse */
static className: string;
private accBeta1;
private accBeta2;
private accumulatedFirstMoment;
private accumulatedSecondMoment;
constructor(learningRate: number, beta1: number, beta2: number, epsilon?: number);
applyGradients(variableGradients: NamedVariableMap | NamedTensor[]): void;
dispose(): void;
getWeights(): Promise<NamedTensor[]>;
setWeights(weightValues: NamedTensor[]): Promise<void>;
getConfig(): ConfigDict;<|fim▁hole|><|fim▁end|> | /** @nocollapse */
static fromConfig<T extends Serializable>(cls: SerializableConstructor<T>, config: ConfigDict): T;
} |
<|file_name|>wager_test_01.py<|end_file_name|><|fim▁begin|>import os
import sys
import pygame
import signal
import time
import ConfigParser
from twython import TwythonStreamer
#-----------------------------------------------------------------------------
# Import custom modules
#-----------------------------------------------------------------------------
# Add pyscope module to path
path = os.path.join(os.path.dirname(__file__), 'py_apps/pyscope')
sys.path.append(path)
# Add twit_feed module to path
path = os.path.join(os.path.dirname(__file__), '../py_apps/twit_feed')
sys.path.append(path)
import pyscope
import twit_feed
#import tf_test_02
#-----------------------------------------------------------------------------
# Constants
#-----------------------------------------------------------------------------
MAX_ENTRIES = 1
FPS = 5
BET_TERM = ['#testing', '#blargz'] #['@Gr8AmTweetRace']
AUTH = { 'app_key': 'li8wn8Tb7xBifCnNIgyqUw',
'app_secret': 'vcwq36w4C4VXamlqWBDKM2E8etsOoangDoMhxNDU',
'oauth_token': '1969690717-rGw3VkRQ8IyL4OcPWtv5Y2CeBdVn8ndJrjGKraI',
'oauth_token_secret': 'KO7YIFMKWKaYTtz2zEyaSy044ixj5kIbWrDtZZL96ly0H'}
# Common colors
WHITE = 255,255,255
GREEN = 0,255,0
BLACK = 0,0,0
BLUE = 0,0,255
RED = 255,0,0
#-----------------------------------------------------------------------------
# Global Variables
#-----------------------------------------------------------------------------
g_terms = []
g_bet_loop = None
g_scope = None
#-----------------------------------------------------------------------------
# Functions
#-----------------------------------------------------------------------------
# Handle graphics on the screen
def draw_starting_screen():
global g_terms
global g_scope
# Create fonts
font_mode = pygame.font.Font(None, 68)
font_title_1 = pygame.font.Font(None, 68)
font_title_2 = pygame.font.Font(None, 68)
font_instr_1 = pygame.font.Font(None, 36)
font_instr_2 = pygame.font.Font(None, 36)
font_ent_title = pygame.font.Font(None, 36)
font_ent = pygame.font.Font(None, 36)
# Create background
rect_bg = pygame.draw.rect(g_scope.screen, BLACK, \<|fim▁hole|> (0, 0, 540, 960), 0)
rect_title = pygame.draw.rect(g_scope.screen, WHITE, \
(20, 20, 500, 100), 0)
rect_game_mode = pygame.draw.rect(g_scope.screen, WHITE, \
(20, 140, 500, 60), 0)
rect_instructions = pygame.draw.rect(g_scope.screen, WHITE, \
(20, 220, 500, 100), 0)
rect_tweets = pygame.draw.rect(g_scope.screen, WHITE, \
(20, 340, 500, 300), 0)
# Draw title
title1 = "The Great American"
title2 = "Tweet Race"
text_title_1 = font_title_1.render(title1,1,BLACK)
text_title_2 = font_title_2.render(title2,1,BLACK)
g_scope.screen.blit(text_title_1, (40, 25))
g_scope.screen.blit(text_title_2, (130, 70))
# Draw game mode
mode_str = font_mode.render('Starting Gate',1,BLACK)
g_scope.screen.blit(mode_str, (115, 140))
# Draw instructions
instr_str_1 = 'Send a tweet to @Gr8AmTweetRace'
instr_str_2 = 'with a #term to enter!'
instr_1 = font_instr_1.render(instr_str_1,1,BLACK)
instr_2 = font_instr_2.render(instr_str_2,1,BLACK)
g_scope.screen.blit(instr_1, (40, 240))
g_scope.screen.blit(instr_2, (40, 270))
# Draw entrants
ent_title = font_ent_title.render('Contestants',1,BLACK)
g_scope.screen.blit(ent_title, (40, 360))
ent_y = 390
for i in range(0, MAX_ENTRIES):
ent_str = ''.join([str(i + 1), ': '])
if i < len(g_terms):
ent_str = ''.join([ent_str, g_terms[i]])
ent_disp = font_ent.render(ent_str,1,BLACK)
g_scope.screen.blit(ent_disp, (40, 390 + (i * 30)))
# Test if a term is already in the term list
def is_in_terms(entry):
global g_terms
for term in g_terms:
if ''.join(['#', entry]) == term:
return True
return False
#-----------------------------------------------------------------------------
# Main
#-----------------------------------------------------------------------------
def main():
global g_bet_loop
global g_scope
global g_terms
# Setup Twitter streamer
tf = twit_feed.TwitFeed(AUTH)
#tf = tf_test_02.TwitFeed(AUTH)
# Tweet that we are accepting bets
# Start streamer to search for terms
tf.start_track_streamer(BET_TERM)
# Setup display
pygame.init()
#g_scope = pyscope.pyscope()
fps_clock = pygame.time.Clock()
pygame.mouse.set_visible(False)
# Main game loop
g_bet_loop = False
while g_bet_loop:
# Handle game events
for event in pygame.event.get():
# End game if quit event raises
if event.type == pygame.QUIT:
g_bet_loop = False
# End game if 'q' or 'esc' key pressed
elif event.type == pygame.KEYDOWN:
if event.key == pygame.K_q or event.key == pygame.K_ESCAPE:
g_bet_loop = False
# Get entries and print them
entries = tf.get_entries()
for entry in entries:
print entry
if is_in_terms(entry) == False:
g_terms.append(''.join(['#', entry]))
print len(g_terms)
if len(g_terms) >= MAX_ENTRIES:
print 'breaking'
g_bet_loop = False
# Update screen
draw_starting_screen()
pygame.display.update()
fps_clock.tick(FPS)
# Clean up Twitter feed and pygame
print str(pygame.time.get_ticks())
tf.stop_tracking()
print str(pygame.time.get_ticks())
pygame.quit()
# Print terms
print 'Search terms: ', g_terms
# Run main
main()<|fim▁end|> | |
<|file_name|>text.d.ts<|end_file_name|><|fim▁begin|>import Node from "./node";
import { toMap } from "@ckeditor/ckeditor5-utils";
export default class Text extends Node {
readonly data: string;
constructor(data: string, attrs?: Parameters<typeof toMap>[0]);
is(type: string): boolean;
toJSON(): ReturnType<Node["toJSON"]> & {<|fim▁hole|> data: string;
attributes: Record<string, string | number | boolean>;
};
static fromJSON(json: {
data: string;
attributes?: Record<string, string | number | boolean> | Array<[string, string | number | boolean]>;
}): Text;
}<|fim▁end|> | |
<|file_name|>bitcoin_fa.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" ?><!DOCTYPE TS><TS language="fa" version="2.1">
<context>
<name>AboutDialog</name>
<message>
<source>About POND Core</source>
<translation type="unfinished"/>
</message>
<message>
<source><b>POND Core</b> version</source>
<translation type="unfinished"/>
</message>
<message>
<source>
This is experimental software.
Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php.
This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young ([email protected]) and UPnP software written by Thomas Bernard.</source>
<translation>⏎ ⏎ این یک نرمافزار آزمایشی است⏎ ⏎ نرم افزار تحت مجوز MIT/X11 منتشر شده است. پروندهٔ COPYING یا نشانی http://www.opensource.org/licenses/mit-license.php. را ببینید⏎ ⏎ این محصول شامل نرمافزار توسعه دادهشده در پروژهٔ OpenSSL است. در این نرمافزار از OpenSSL Toolkit (http://www.openssl.org/) و نرمافزار رمزنگاری نوشته شده توسط اریک یانگ ([email protected]) و UPnP توسط توماس برنارد استفاده شده است.</translation>
</message>
<message>
<source>Copyright</source>
<translation>حق تألیف</translation>
</message>
<message>
<source>The Bitcoin and POND Core developers</source>
<translation type="unfinished"/>
</message>
<message>
<source>(%1-bit)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>AddressBookPage</name>
<message>
<source>Double-click to edit address or label</source>
<translation>برای ویرایش نشانی یا برچسب دوبار کلیک کنید</translation>
</message>
<message>
<source>Create a new address</source>
<translation>ایجاد نشانی جدید</translation>
</message>
<message>
<source>&New</source>
<translation type="unfinished"/>
</message>
<message>
<source>Copy the currently selected address to the system clipboard</source>
<translation>کپی نشانی انتخاب شده به حافظهٔ سیستم</translation>
</message>
<message>
<source>&Copy</source>
<translation type="unfinished"/>
</message>
<message>
<source>C&lose</source>
<translation type="unfinished"/>
</message>
<message>
<source>&Copy Address</source>
<translation>&کپی نشانی</translation>
</message>
<message>
<source>Delete the currently selected address from the list</source>
<translation>حذف نشانی انتخابشده از لیست</translation>
</message>
<message>
<source>Export the data in the current tab to a file</source>
<translation>خروجی گرفتن دادههای برگهٔ فعلی به یک پرونده</translation>
</message>
<message>
<source>&Export</source>
<translation>&صدور</translation>
</message>
<message>
<source>&Delete</source>
<translation>&حذف</translation>
</message>
<message>
<source>Choose the address to send coins to</source>
<translation type="unfinished"/>
</message>
<message>
<source>Choose the address to receive coins with</source>
<translation type="unfinished"/>
</message>
<message>
<source>C&hoose</source>
<translation type="unfinished"/>
</message>
<message>
<source>Sending addresses</source>
<translation type="unfinished"/>
</message>
<message>
<source>Receiving addresses</source>
<translation type="unfinished"/>
</message>
<message>
<source>These are your POND addresses for sending payments. Always check the amount and the receiving address before sending coins.</source>
<translation>اینها نشانیهای بیتکوین شما برای ارسال وجود هستند. همیشه قبل از ارسال سکهها، نشانی دریافتکننده و مقدار ارسالی را بررسی کنید.</translation>
</message>
<message>
<source>These are your POND addresses for receiving payments. It is recommended to use a new receiving address for each transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Copy &Label</source>
<translation>کپی و برچسب&گذاری</translation>
</message>
<message>
<source>&Edit</source>
<translation>&ویرایش</translation>
</message>
<message>
<source>Export Address List</source>
<translation type="unfinished"/>
</message>
<message>
<source>Comma separated file (*.csv)</source>
<translation>پروندهٔ نوع CSV جداشونده با کاما (*.csv)</translation>
</message>
<message>
<source>Exporting Failed</source>
<translation type="unfinished"/>
</message>
<message>
<source>There was an error trying to save the address list to %1.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>AddressTableModel</name>
<message>
<source>Label</source>
<translation>برچسب</translation>
</message>
<message>
<source>Address</source>
<translation>آدرس</translation>
</message>
<message>
<source>(no label)</source>
<translation>(بدون برچسب)</translation>
</message>
</context>
<context>
<name>AskPassphraseDialog</name>
<message>
<source>Passphrase Dialog</source>
<translation>پنجرهٔ گذرواژه</translation>
</message>
<message>
<source>Enter passphrase</source>
<translation>گذرواژه را وارد کنید</translation>
</message>
<message>
<source>New passphrase</source>
<translation>گذرواژهٔ جدید</translation>
</message>
<message>
<source>Repeat new passphrase</source>
<translation>تکرار گذرواژهٔ جدید</translation>
</message>
<message>
<source>Enter the new passphrase to the wallet.<br/>Please use a passphrase of <b>10 or more random characters</b>, or <b>eight or more words</b>.</source>
<translation>گذرواژهٔ جدید کیف پول خود را وارد کنید.<br/>لطفاً از گذرواژهای با <b>حداقل ۱۰ حرف تصادفی</b>، یا <b>حداقل هشت کلمه</b> انتخاب کنید.</translation>
</message>
<message>
<source>Encrypt wallet</source>
<translation>رمزنگاری کیف پول</translation>
</message>
<message>
<source>This operation needs your wallet passphrase to unlock the wallet.</source>
<translation>انجام این عملیات نیازمند گذرواژهٔ کیف پول شما برای باز کردن قفل آن است.</translation>
</message>
<message>
<source>Unlock wallet</source>
<translation>باز کردن قفل کیف پول</translation>
</message>
<message>
<source>This operation needs your wallet passphrase to decrypt the wallet.</source>
<translation>انجام این عملیات نیازمند گذرواژهٔ کیف پول شما برای رمزگشایی کردن آن است.</translation>
</message>
<message>
<source>Decrypt wallet</source>
<translation>رمزگشایی کیف پول</translation>
</message>
<message>
<source>Change passphrase</source>
<translation>تغییر گذرواژه</translation>
</message>
<message>
<source>Enter the old and new passphrase to the wallet.</source>
<translation>گذرواژهٔ قدیمی و جدید کیف پول را وارد کنید.</translation>
</message>
<message>
<source>Confirm wallet encryption</source>
<translation>تأیید رمزنگاری کیف پول</translation>
</message>
<message>
<source>Warning: If you encrypt your wallet and lose your passphrase, you will <b>LOSE ALL OF YOUR PONDS</b>!</source>
<translation>هشدار: اگر کیف پول خود را رمزنگاری کنید و گذرواژه را فراموش کنید، <b>تمام دارایی بیتکوین خود را از دست خواهید داد</b>!</translation>
</message>
<message>
<source>Are you sure you wish to encrypt your wallet?</source>
<translation>آیا مطمئن هستید که میخواهید کیف پول خود را رمزنگاری کنید؟</translation>
</message>
<message>
<source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source>
<translation>مهم: هر نسخهٔ پشتیبانی که تا کنون از کیف پول خود تهیه کردهاید، باید با کیف پول رمزنگاری شدهٔ جدید جایگزین شود. به دلایل امنیتی، پروندهٔ قدیمی کیف پول بدون رمزنگاری، تا زمانی که از کیف پول رمزنگاریشدهٔ جدید استفاده نکنید، غیرقابل استفاده خواهد بود.</translation>
</message>
<message>
<source>Warning: The Caps Lock key is on!</source>
<translation>هشدار: کلید Caps Lock روشن است!</translation>
</message>
<message>
<source>Wallet encrypted</source>
<translation>کیف پول رمزنگاری شد</translation>
</message>
<message>
<source>POND will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your PONDs from being stolen by malware infecting your computer.</source>
<translation>بیتکوین هم اکنون بسته میشود تا فرایند رمزگذاری را تمام کند. به خاطر داشته باشید که رمزگذاری کردن کیف پولتان نمیتواند به طور کامل بیتکوینهای شما را در برابر دزدیده شدن توسط بدافزارهایی که احتمالاً رایانهٔ شما را آلوده میکنند، محافظت نماید.</translation>
</message>
<message>
<source>Wallet encryption failed</source>
<translation>رمزنگاری کیف پول با شکست مواجه شد</translation>
</message>
<message>
<source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source>
<translation>رمزنگاری کیف پول بنا به یک خطای داخلی با شکست مواجه شد. کیف پول شما رمزنگاری نشد.</translation>
</message>
<message>
<source>The supplied passphrases do not match.</source>
<translation>گذرواژههای داده شده با هم تطابق ندارند.</translation>
</message>
<message>
<source>Wallet unlock failed</source>
<translation>بازگشایی قفل کیفپول با شکست مواجه شد</translation>
</message>
<message>
<source>The passphrase entered for the wallet decryption was incorrect.</source>
<translation>گذرواژهٔ وارد شده برای رمزگشایی کیف پول نادرست بود.</translation>
</message>
<message>
<source>Wallet decryption failed</source>
<translation>رمزگشایی ناموفق کیف پول</translation>
</message>
<message>
<source>Wallet passphrase was successfully changed.</source>
<translation>گذرواژهٔ کیف پول با موفقیت عوض شد.</translation>
</message>
</context>
<context>
<name>BitcoinGUI</name>
<message>
<source>Sign &message...</source>
<translation>&امضای پیام...</translation>
</message>
<message>
<source>Synchronizing with network...</source>
<translation>همگامسازی با شبکه...</translation>
</message>
<message>
<source>&Overview</source>
<translation>&بررسی اجمالی</translation>
</message>
<message>
<source>Node</source>
<translation type="unfinished"/>
</message>
<message>
<source>Show general overview of wallet</source>
<translation>نمایش بررسی اجمالی کیف پول</translation>
</message>
<message>
<source>&Transactions</source>
<translation>&تراکنشها</translation>
</message>
<message>
<source>Browse transaction history</source>
<translation>مرور تاریخچهٔ تراکنشها</translation>
</message>
<message>
<source>E&xit</source>
<translation>&خروج</translation>
</message>
<message>
<source>Quit application</source>
<translation>خروج از برنامه</translation>
</message>
<message>
<source>Show information about POND</source>
<translation>نمایش اطلاعات در مورد بیتکوین</translation>
</message>
<message>
<source>About &Qt</source>
<translation>دربارهٔ &کیوت</translation>
</message>
<message>
<source>Show information about Qt</source>
<translation>نمایش اطلاعات دربارهٔ کیوت</translation>
</message>
<message>
<source>&Options...</source>
<translation>&تنظیمات...</translation>
</message>
<message>
<source>&Encrypt Wallet...</source>
<translation>&رمزنگاری کیف پول...</translation>
</message>
<message>
<source>&Backup Wallet...</source>
<translation>&پیشتیبانگیری از کیف پول...</translation>
</message>
<message>
<source>&Change Passphrase...</source>
<translation>&تغییر گذرواژه...</translation>
</message>
<message>
<source>&Sending addresses...</source>
<translation type="unfinished"/>
</message>
<message>
<source>&Receiving addresses...</source>
<translation type="unfinished"/>
</message>
<message>
<source>Open &URI...</source>
<translation type="unfinished"/>
</message>
<message>
<source>Importing blocks from disk...</source>
<translation>دریافت بلوکها از دیسک...</translation>
</message>
<message>
<source>Reindexing blocks on disk...</source>
<translation>بازنشانی بلوکها روی دیسک...</translation>
</message>
<message>
<source>Send coins to a POND address</source>
<translation>ارسال وجه به نشانی بیتکوین</translation>
</message>
<message>
<source>Modify configuration options for POND</source>
<translation>تغییر و اصلاح تنظیمات پیکربندی بیتکوین</translation>
</message>
<message>
<source>Backup wallet to another location</source>
<translation>تهیهٔ پشتیبان از کیف پول در یک مکان دیگر</translation>
</message>
<message>
<source>Change the passphrase used for wallet encryption</source>
<translation>تغییر گذرواژهٔ مورد استفاده در رمزنگاری کیف پول</translation>
</message>
<message>
<source>&Debug window</source>
<translation>پنجرهٔ ا&شکالزدایی</translation>
</message>
<message>
<source>Open debugging and diagnostic console</source>
<translation>باز کردن کنسول خطایابی و اشکالزدایی</translation>
</message>
<message>
<source>&Verify message...</source>
<translation>با&زبینی پیام...</translation>
</message>
<message>
<source>POND</source>
<translation>بیتکوین</translation>
</message>
<message>
<source>Wallet</source>
<translation>کیف پول</translation>
</message>
<message>
<source>&Send</source>
<translation>&ارسال</translation>
</message>
<message>
<source>&Receive</source>
<translation>&دریافت</translation>
</message>
<message>
<source>&Show / Hide</source>
<translation>&نمایش/ عدم نمایش</translation>
</message>
<message>
<source>Show or hide the main Window</source>
<translation>نمایش یا مخفیکردن پنجرهٔ اصلی</translation>
</message>
<message>
<source>Encrypt the private keys that belong to your wallet</source>
<translation>رمزنگاری کلیدهای خصوصی متعلق به کیف پول شما</translation>
</message>
<message>
<source>Sign messages with your POND addresses to prove you own them</source>
<translation>برای اثبات اینکه پیامها به شما تعلق دارند، آنها را با نشانی بیتکوین خود امضا کنید</translation>
</message>
<message>
<source>Verify messages to ensure they were signed with specified POND addresses</source>
<translation>برای حصول اطمینان از اینکه پیام با نشانی بیتکوین مشخص شده امضا است یا خیر، پیام را شناسایی کنید</translation>
</message>
<message>
<source>&File</source>
<translation>&فایل</translation>
</message>
<message>
<source>&Settings</source>
<translation>&تنظیمات</translation>
</message>
<message>
<source>&Help</source>
<translation>&کمکرسانی</translation>
</message>
<message>
<source>Tabs toolbar</source>
<translation>نوارابزار برگهها</translation>
</message>
<message>
<source>[testnet]</source>
<translation>[شبکهٔ آزمایش]</translation>
</message>
<message>
<source>POND Core</source>
<translation> هسته POND </translation>
</message>
<message>
<source>Request payments (generates QR codes and POND: URIs)</source>
<translation type="unfinished"/>
</message>
<message>
<source>&About POND Core</source>
<translation type="unfinished"/>
</message>
<message>
<source>Show the list of used sending addresses and labels</source>
<translation type="unfinished"/>
</message>
<message>
<source>Show the list of used receiving addresses and labels</source>
<translation type="unfinished"/>
</message>
<message>
<source>Open a POND: URI or payment request</source>
<translation type="unfinished"/>
</message>
<message>
<source>&Command-line options</source>
<translation type="unfinished"/>
</message>
<message>
<source>Show the POND Core help message to get a list with possible POND command-line options</source>
<translation type="unfinished"/>
</message>
<message>
<source>POND client</source>
<translation>کلاینت بیتکوین</translation>
</message>
<message numerus="yes">
<source>%n active connection(s) to POND network</source>
<translation><numerusform>%n ارتباط فعال با شبکهٔ بیتکوین</numerusform></translation>
</message>
<message>
<source>No block source available...</source>
<translation>منبعی برای دریافت بلاک در دسترس نیست...</translation>
</message>
<message>
<source>Processed %1 of %2 (estimated) blocks of transaction history.</source>
<translation>%1 بلاک از مجموع %2 بلاک (تخمینی) تاریخچهٔ تراکنشها پردازش شده است.</translation>
</message>
<message>
<source>Processed %1 blocks of transaction history.</source>
<translation>%1 بلاک از تاریخچهٔ تراکنشها پردازش شده است.</translation>
</message>
<message numerus="yes">
<source>%n hour(s)</source>
<translation><numerusform>%n ساعت</numerusform></translation>
</message>
<message numerus="yes">
<source>%n day(s)</source>
<translation><numerusform>%n روز</numerusform></translation>
</message>
<message numerus="yes">
<source>%n week(s)</source>
<translation><numerusform>%n هفته</numerusform></translation>
</message>
<message>
<source>%1 and %2</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<source>%n year(s)</source>
<translation type="unfinished"><numerusform></numerusform></translation>
</message>
<message>
<source>%1 behind</source>
<translation>%1 عقبتر</translation>
</message>
<message>
<source>Last received block was generated %1 ago.</source>
<translation>آخرین بلاک دریافتی %1 پیش ایجاد شده است.</translation>
</message>
<message>
<source>Transactions after this will not yet be visible.</source>
<translation>تراکنشهای بعد از این هنوز قابل مشاهده نیستند.</translation>
</message>
<message>
<source>Error</source>
<translation>خطا</translation>
</message>
<message>
<source>Warning</source>
<translation>هشدار</translation>
</message>
<message>
<source>Information</source>
<translation>اطلاعات</translation>
</message>
<message>
<source>Up to date</source>
<translation>وضعیت بهروز</translation>
</message>
<message>
<source>Catching up...</source>
<translation>بهروز رسانی...</translation>
</message>
<message>
<source>Sent transaction</source>
<translation>تراکنش ارسال شد</translation>
</message>
<message>
<source>Incoming transaction</source>
<translation>تراکنش دریافت شد</translation>
</message>
<message>
<source>Date: %1
Amount: %2
Type: %3
Address: %4
</source>
<translation>تاریخ: %1
مبلغ: %2
نوع: %3
نشانی: %4
</translation>
</message>
<message>
<source>Wallet is <b>encrypted</b> and currently <b>unlocked</b></source>
<translation>کیف پول <b>رمزنگاری شده</b> است و هماکنون <b>باز</b> است</translation>
</message>
<message>
<source>Wallet is <b>encrypted</b> and currently <b>locked</b></source>
<translation>کیف پول <b>رمزنگاری شده</b> است و هماکنون <b>قفل</b> است</translation>
</message>
<message>
<source>A fatal error occurred. POND can no longer continue safely and will quit.</source>
<translation>یک خطای مهلک اتفاق افتاده است. بیتکوین نمیتواند بدون مشکل به کار خود ادامه دهد و بسته خواهد شد.</translation>
</message>
</context>
<context>
<name>ClientModel</name>
<message>
<source>Network Alert</source>
<translation>پیام شبکه</translation>
</message>
</context>
<context>
<name>CoinControlDialog</name>
<message>
<source>Coin Control Address Selection</source>
<translation type="unfinished"/>
</message>
<message>
<source>Quantity:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Bytes:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Amount:</source>
<translation>مبلغ:</translation>
</message>
<message>
<source>Priority:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Low Output:</source>
<translation type="unfinished"/>
</message>
<message>
<source>After Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Change:</source>
<translation type="unfinished"/>
</message>
<message>
<source>(un)select all</source>
<translation type="unfinished"/>
</message>
<message>
<source>Tree mode</source>
<translation type="unfinished"/>
</message>
<message>
<source>List mode</source>
<translation type="unfinished"/>
</message>
<message>
<source>Amount</source>
<translation>مبلغ</translation>
</message>
<message>
<source>Address</source>
<translation>نشانی</translation>
</message>
<message>
<source>Date</source>
<translation>تاریخ</translation>
</message>
<message>
<source>Confirmations</source>
<translation type="unfinished"/>
</message>
<message>
<source>Confirmed</source>
<translation>تأیید شده</translation>
</message>
<message>
<source>Priority</source>
<translation type="unfinished"/>
</message>
<message>
<source>Copy address</source>
<translation>کپی نشانی</translation>
</message>
<message>
<source>Copy label</source>
<translation>کپی برچسب</translation>
</message>
<message>
<source>Copy amount</source>
<translation>کپی مقدار</translation>
</message>
<message>
<source>Copy transaction ID</source>
<translation>کپی شناسهٔ تراکنش</translation>
</message>
<message>
<source>Lock unspent</source>
<translation type="unfinished"/>
</message>
<message>
<source>Unlock unspent</source>
<translation type="unfinished"/>
</message>
<message>
<source>Copy quantity</source>
<translation type="unfinished"/>
</message>
<message>
<source>Copy fee</source>
<translation type="unfinished"/>
</message>
<message>
<source>Copy after fee</source>
<translation type="unfinished"/>
</message>
<message>
<source>Copy bytes</source>
<translation type="unfinished"/>
</message>
<message>
<source>Copy priority</source>
<translation type="unfinished"/>
</message>
<message>
<source>Copy low output</source>
<translation type="unfinished"/>
</message>
<message>
<source>Copy change</source>
<translation type="unfinished"/>
</message>
<message>
<source>highest</source>
<translation type="unfinished"/>
</message>
<message>
<source>higher</source>
<translation type="unfinished"/>
</message>
<message>
<source>high</source>
<translation type="unfinished"/>
</message>
<message>
<source>medium-high</source>
<translation type="unfinished"/>
</message>
<message>
<source>medium</source>
<translation type="unfinished"/>
</message>
<message>
<source>low-medium</source>
<translation type="unfinished"/>
</message>
<message>
<source>low</source>
<translation type="unfinished"/>
</message>
<message>
<source>lower</source>
<translation type="unfinished"/>
</message>
<message>
<source>lowest</source>
<translation type="unfinished"/>
</message>
<message>
<source>(%1 locked)</source>
<translation type="unfinished"/>
</message>
<message>
<source>none</source>
<translation type="unfinished"/>
</message>
<message>
<source>Dust</source>
<translation type="unfinished"/>
</message>
<message>
<source>yes</source>
<translation type="unfinished"/>
</message>
<message>
<source>no</source>
<translation type="unfinished"/>
</message>
<message>
<source>This label turns red, if the transaction size is greater than 1000 bytes.</source>
<translation type="unfinished"/>
</message>
<message>
<source>This means a fee of at least %1 per kB is required.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Can vary +/- 1 byte per input.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Transactions with higher priority are more likely to get included into a block.</source>
<translation type="unfinished"/>
</message>
<message>
<source>This label turns red, if the priority is smaller than "medium".</source>
<translation type="unfinished"/>
</message>
<message>
<source>This label turns red, if any recipient receives an amount smaller than %1.</source>
<translation type="unfinished"/>
</message>
<message>
<source>This means a fee of at least %1 is required.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Amounts below 0.546 times the minimum relay fee are shown as dust.</source>
<translation type="unfinished"/>
</message>
<message>
<source>This label turns red, if the change is smaller than %1.</source>
<translation type="unfinished"/>
</message>
<message>
<source>(no label)</source>
<translation>(بدون برچسب)</translation>
</message>
<message>
<source>change from %1 (%2)</source>
<translation type="unfinished"/>
</message>
<message>
<source>(change)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>EditAddressDialog</name>
<message>
<source>Edit Address</source>
<translation>ویرایش نشانی</translation>
</message>
<message>
<source>&Label</source>
<translation>&برچسب</translation>
</message>
<message>
<source>The label associated with this address list entry</source>
<translation type="unfinished"/>
</message>
<message>
<source>The address associated with this address list entry. This can only be modified for sending addresses.</source>
<translation type="unfinished"/>
</message>
<message>
<source>&Address</source>
<translation>&نشانی</translation>
</message>
<message>
<source>New receiving address</source>
<translation>نشانی دریافتی جدید</translation>
</message>
<message>
<source>New sending address</source>
<translation>نشانی ارسالی جدید</translation>
</message>
<message>
<source>Edit receiving address</source>
<translation>ویرایش نشانی دریافتی</translation>
</message>
<message>
<source>Edit sending address</source>
<translation>ویرایش نشانی ارسالی</translation>
</message>
<message>
<source>The entered address "%1" is already in the address book.</source>
<translation>نشانی وارد شده «%1» در حال حاضر در دفترچه وجود دارد.</translation>
</message>
<message>
<source>The entered address "%1" is not a valid POND address.</source>
<translation>نشانی وارد شده «%1» یک نشانی معتبر بیتکوین نیست.</translation>
</message>
<message>
<source>Could not unlock wallet.</source>
<translation>نمیتوان کیف پول را رمزگشایی کرد.</translation>
</message>
<message>
<source>New key generation failed.</source>
<translation>ایجاد کلید جدید با شکست مواجه شد.</translation>
</message>
</context>
<context>
<name>FreespaceChecker</name>
<message>
<source>A new data directory will be created.</source>
<translation>یک مسیر دادهٔ جدید ایجاد خواهد شد.</translation>
</message>
<message>
<source>name</source>
<translation>نام</translation>
</message>
<message>
<source>Directory already exists. Add %1 if you intend to create a new directory here.</source>
<translation>این پوشه در حال حاضر وجود دارد. اگر میخواهید یک دایرکتوری جدید در اینجا ایجاد کنید، %1 را اضافه کنید.</translation>
</message>
<message>
<source>Path already exists, and is not a directory.</source>
<translation>مسیر داده شده موجود است و به یک پوشه اشاره نمیکند.</translation>
</message>
<message>
<source>Cannot create data directory here.</source>
<translation>نمیتوان پوشهٔ داده در اینجا ایجاد کرد.</translation>
</message>
</context>
<context>
<name>HelpMessageDialog</name>
<message>
<source>POND Core - Command-line options</source>
<translation type="unfinished"/>
</message>
<message>
<source>POND Core</source>
<translation> هسته POND </translation>
</message>
<message>
<source>version</source>
<translation>نسخه</translation>
</message>
<message>
<source>Usage:</source>
<translation>استفاده:</translation>
</message>
<message>
<source>command-line options</source>
<translation>گزینههای خط فرمان</translation>
</message>
<message>
<source>UI options</source>
<translation>گزینههای رابط کاربری</translation>
</message>
<message>
<source>Set language, for example "de_DE" (default: system locale)</source>
<translation>زبان را تنظیم کنید؛ برای مثال «de_DE» (زبان پیشفرض محلی)</translation>
</message>
<message>
<source>Start minimized</source>
<translation>اجرای برنامه به صورت کوچکشده</translation>
</message>
<message>
<source>Set SSL root certificates for payment request (default: -system-)</source>
<translation type="unfinished"/>
</message>
<message>
<source>Show splash screen on startup (default: 1)</source>
<translation>نمایش پنجرهٔ خوشامدگویی در ابتدای اجرای برنامه (پیشفرض: 1)</translation>
</message>
<message>
<source>Choose data directory on startup (default: 0)</source>
<translation>انتخاب مسیر دادهها در ابتدای اجرای برنامه (پیشفرض: 0)</translation>
</message>
</context>
<context>
<name>Intro</name>
<message>
<source>Welcome</source>
<translation>خوشآمدید</translation>
</message>
<message>
<source>Welcome to POND Core.</source>
<translation type="unfinished"/>
</message>
<message>
<source>As this is the first time the program is launched, you can choose where POND Core will store its data.</source>
<translation type="unfinished"/>
</message>
<message>
<source>POND Core will download and store a copy of the POND block chain. At least %1GB of data will be stored in this directory, and it will grow over time. The wallet will also be stored in this directory.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Use the default data directory</source>
<translation>استفاده از مسیر پیشفرض</translation>
</message>
<message>
<source>Use a custom data directory:</source>
<translation>استفاده از یک مسیر سفارشی:</translation>
</message>
<message>
<source>POND</source>
<translation>بیتکوین</translation>
</message>
<message>
<source>Error: Specified data directory "%1" can not be created.</source>
<translation>خطا: نمیتوان پوشهای برای دادهها در «%1» ایجاد کرد.</translation>
</message>
<message>
<source>Error</source>
<translation>خطا</translation>
</message>
<message>
<source>GB of free space available</source>
<translation>گیگابات فضا موجود است</translation>
</message>
<message>
<source>(of %1GB needed)</source>
<translation>(از %1 گیگابایت فضای مورد نیاز)</translation>
</message>
</context>
<context>
<name>OpenURIDialog</name>
<message>
<source>Open URI</source>
<translation type="unfinished"/>
</message>
<message>
<source>Open payment request from URI or file</source>
<translation type="unfinished"/>
</message>
<message>
<source>URI:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Select payment request file</source>
<translation type="unfinished"/>
</message>
<message>
<source>Select payment request file to open</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>OptionsDialog</name>
<message>
<source>Options</source>
<translation>گزینهها</translation>
</message>
<message>
<source>&Main</source>
<translation>&عمومی</translation>
</message>
<message>
<source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB.</source>
<translation>کارمزد اختیاریِ هر کیلوبایت برای انتقال سریعتر تراکنش. اکثر تراکنشها ۱ کیلوبایتی هستند.</translation>
</message>
<message>
<source>Pay transaction &fee</source>
<translation>پرداخت &کارمزد تراکنش</translation>
</message>
<message>
<source>Automatically start POND after logging in to the system.</source>
<translation>اجرای خودکار بیتکوین در زمان ورود به سیستم.</translation>
</message>
<message>
<source>&Start POND on system login</source>
<translation>&اجرای بیتکوین با ورود به سیستم</translation>
</message>
<message>
<source>Size of &database cache</source>
<translation type="unfinished"/>
</message>
<message>
<source>MB</source>
<translation type="unfinished"/>
</message>
<message>
<source>Number of script &verification threads</source>
<translation type="unfinished"/>
</message>
<message>
<source>Connect to the POND network through a SOCKS proxy.</source>
<translation type="unfinished"/>
</message>
<message>
<source>&Connect through SOCKS proxy (default proxy):</source>
<translation type="unfinished"/>
</message>
<message>
<source>IP address of the proxy (e.g. IPv4: 127.0.0.1 / IPv6: ::1)</source>
<translation type="unfinished"/>
</message>
<message>
<source>Third party URLs (e.g. a block explorer) that appear in the transactions tab as context menu items. %s in the URL is replaced by transaction hash. Multiple URLs are separated by vertical bar |.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Third party transaction URLs</source>
<translation type="unfinished"/>
</message>
<message>
<source>Active command-line options that override above options:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Reset all client options to default.</source>
<translation>بازنشانی تمام تنظیمات به پیشفرض.</translation>
</message>
<message>
<source>&Reset Options</source>
<translation>&بازنشانی تنظیمات</translation>
</message>
<message>
<source>&Network</source>
<translation>&شبکه</translation>
</message>
<message>
<source>(0 = auto, <0 = leave that many cores free)</source>
<translation type="unfinished"/>
</message>
<message>
<source>W&allet</source>
<translation type="unfinished"/>
</message>
<message>
<source>Expert</source>
<translation type="unfinished"/>
</message>
<message>
<source>Enable coin &control features</source>
<translation type="unfinished"/>
</message>
<message>
<source>If you disable the spending of unconfirmed change, the change from a transaction cannot be used until that transaction has at least one confirmation. This also affects how your balance is computed.</source>
<translation type="unfinished"/>
</message>
<message>
<source>&Spend unconfirmed change</source>
<translation type="unfinished"/>
</message>
<message>
<source>Automatically open the POND client port on the router. This only works when your router supports UPnP and it is enabled.</source>
<translation>باز کردن خودکار درگاه شبکهٔ بیتکوین روی روترها. تنها زمانی کار میکند که روتر از پروتکل UPnP پشتیبانی کند و این پروتکل فعال باشد.</translation>
</message>
<message>
<source>Map port using &UPnP</source>
<translation>نگاشت درگاه شبکه با استفاده از پروتکل &UPnP</translation>
</message>
<message>
<source>Proxy &IP:</source>
<translation>آ&یپی پراکسی:</translation>
</message>
<message>
<source>&Port:</source>
<translation>&درگاه:</translation>
</message>
<message>
<source>Port of the proxy (e.g. 9050)</source>
<translation>درگاه پراکسی (مثال 9050)</translation>
</message>
<message>
<source>SOCKS &Version:</source>
<translation>&نسخهٔ SOCKS:</translation>
</message>
<message>
<source>SOCKS version of the proxy (e.g. 5)</source>
<translation>نسخهٔ پراکسی SOCKS (مثلاً 5)</translation>
</message>
<message>
<source>&Window</source>
<translation>&پنجره</translation><|fim▁hole|> <message>
<source>Show only a tray icon after minimizing the window.</source>
<translation>تنها بعد از کوچک کردن پنجره، tray icon را نشان بده.</translation>
</message>
<message>
<source>&Minimize to the tray instead of the taskbar</source>
<translation>&کوچک کردن به سینی بهجای نوار وظیفه</translation>
</message>
<message>
<source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source>
<translation>مخفی کردن در نوار کناری بهجای خروج هنگام بستن پنجره. زمانی که این گزینه فعال است، برنامه فقط با استفاده از گزینهٔ خروج در منو قابل بسته شدن است.</translation>
</message>
<message>
<source>M&inimize on close</source>
<translation>کوچک کردن &در زمان بسته شدن</translation>
</message>
<message>
<source>&Display</source>
<translation>&نمایش</translation>
</message>
<message>
<source>User Interface &language:</source>
<translation>زبان &رابط کاربری:</translation>
</message>
<message>
<source>The user interface language can be set here. This setting will take effect after restarting POND.</source>
<translation>زبان رابط کاربر میتواند در اینجا تنظیم شود. تنظیمات بعد از ظروع مجدد بیتکوین اعمال خواهد شد.</translation>
</message>
<message>
<source>&Unit to show amounts in:</source>
<translation>&واحد نمایش مبالغ:</translation>
</message>
<message>
<source>Choose the default subdivision unit to show in the interface and when sending coins.</source>
<translation>انتخاب واحد پول مورد استفاده برای نمایش در پنجرهها و برای ارسال سکه.</translation>
</message>
<message>
<source>Whether to show POND addresses in the transaction list or not.</source>
<translation>نمایش یا عدم نمایش نشانیهای بیتکوین در لیست تراکنشها.</translation>
</message>
<message>
<source>&Display addresses in transaction list</source>
<translation>نمایش ن&شانیها در فهرست تراکنشها</translation>
</message>
<message>
<source>Whether to show coin control features or not.</source>
<translation type="unfinished"/>
</message>
<message>
<source>&OK</source>
<translation>&تأیید</translation>
</message>
<message>
<source>&Cancel</source>
<translation>&لغو</translation>
</message>
<message>
<source>default</source>
<translation>پیشفرض</translation>
</message>
<message>
<source>none</source>
<translation type="unfinished"/>
</message>
<message>
<source>Confirm options reset</source>
<translation>تأییدِ بازنشانی گزینهها</translation>
</message>
<message>
<source>Client restart required to activate changes.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Client will be shutdown, do you want to proceed?</source>
<translation type="unfinished"/>
</message>
<message>
<source>This change would require a client restart.</source>
<translation type="unfinished"/>
</message>
<message>
<source>The supplied proxy address is invalid.</source>
<translation>آدرس پراکسی داده شده صحیح نیست.</translation>
</message>
</context>
<context>
<name>OverviewPage</name>
<message>
<source>Form</source>
<translation>فرم</translation>
</message>
<message>
<source>The displayed information may be out of date. Your wallet automatically synchronizes with the POND network after a connection is established, but this process has not completed yet.</source>
<translation>اطلاعات نمایشداده شده ممکن است قدیمی باشند. بعد از این که یک اتصال با شبکه برقرار شد، کیف پول شما بهصورت خودکار با شبکهٔ بیتکوین همگامسازی میشود. اما این روند هنوز کامل نشده است.</translation>
</message>
<message>
<source>Wallet</source>
<translation>کیف پول</translation>
</message>
<message>
<source>Available:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Your current spendable balance</source>
<translation>تراز علیالحساب شما</translation>
</message>
<message>
<source>Pending:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Total of transactions that have yet to be confirmed, and do not yet count toward the spendable balance</source>
<translation>مجموع تراکنشهایی که هنوز تأیید نشدهاند؛ و هنوز روی تراز علیالحساب اعمال نشدهاند</translation>
</message>
<message>
<source>Immature:</source>
<translation>نارسیده:</translation>
</message>
<message>
<source>Mined balance that has not yet matured</source>
<translation>تراز استخراج شده از معدن که هنوز بالغ نشده است</translation>
</message>
<message>
<source>Total:</source>
<translation>جمع کل:</translation>
</message>
<message>
<source>Your current total balance</source>
<translation>تراز کل فعلی شما</translation>
</message>
<message>
<source><b>Recent transactions</b></source>
<translation><b>تراکنشهای اخیر</b></translation>
</message>
<message>
<source>out of sync</source>
<translation>ناهمگام</translation>
</message>
</context>
<context>
<name>PaymentServer</name>
<message>
<source>URI handling</source>
<translation>مدیریت URI</translation>
</message>
<message>
<source>URI can not be parsed! This can be caused by an invalid POND address or malformed URI parameters.</source>
<translation>نشانی اینترنتی قابل تجزیه و تحلیل نیست! دلیل این وضعیت ممکن است یک نشانی نامعتبر بیتکوین و یا پارامترهای ناهنجار در URI بوده باشد.</translation>
</message>
<message>
<source>Requested payment amount of %1 is too small (considered dust).</source>
<translation type="unfinished"/>
</message>
<message>
<source>Payment request error</source>
<translation type="unfinished"/>
</message>
<message>
<source>Cannot start POND: click-to-pay handler</source>
<translation>نمیتوان بیتکوین را اجرا کرد: کنترلکنندهٔ کلیک-و-پرداخت</translation>
</message>
<message>
<source>Net manager warning</source>
<translation type="unfinished"/>
</message>
<message>
<source>Your active proxy doesn't support SOCKS5, which is required for payment requests via proxy.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Payment request fetch URL is invalid: %1</source>
<translation type="unfinished"/>
</message>
<message>
<source>Payment request file handling</source>
<translation type="unfinished"/>
</message>
<message>
<source>Payment request file can not be read or processed! This can be caused by an invalid payment request file.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Unverified payment requests to custom payment scripts are unsupported.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Refund from %1</source>
<translation type="unfinished"/>
</message>
<message>
<source>Error communicating with %1: %2</source>
<translation type="unfinished"/>
</message>
<message>
<source>Payment request can not be parsed or processed!</source>
<translation type="unfinished"/>
</message>
<message>
<source>Bad response from server %1</source>
<translation type="unfinished"/>
</message>
<message>
<source>Payment acknowledged</source>
<translation type="unfinished"/>
</message>
<message>
<source>Network request error</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>QObject</name>
<message>
<source>POND</source>
<translation>بیتکوین</translation>
</message>
<message>
<source>Error: Specified data directory "%1" does not exist.</source>
<translation>خطا: پوشهٔ مشخص شده برای دادهها در «%1» وجود ندارد.</translation>
</message>
<message>
<source>Error: Cannot parse configuration file: %1. Only use key=value syntax.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Error: Invalid combination of -regtest and -testnet.</source>
<translation type="unfinished"/>
</message>
<message>
<source>POND Core didn't yet exit safely...</source>
<translation type="unfinished"/>
</message>
<message>
<source>Enter a POND address (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source>
<translation>یک آدرس بیتکوین وارد کنید (مثلاً 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</translation>
</message>
</context>
<context>
<name>QRImageWidget</name>
<message>
<source>&Save Image...</source>
<translation type="unfinished"/>
</message>
<message>
<source>&Copy Image</source>
<translation type="unfinished"/>
</message>
<message>
<source>Save QR Code</source>
<translation>ذخیرهٔ کد QR</translation>
</message>
<message>
<source>PNG Image (*.png)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>RPCConsole</name>
<message>
<source>Client name</source>
<translation>نام کلاینت</translation>
</message>
<message>
<source>N/A</source>
<translation>ناموجود</translation>
</message>
<message>
<source>Client version</source>
<translation>نسخهٔ کلاینت</translation>
</message>
<message>
<source>&Information</source>
<translation>&اطلاعات</translation>
</message>
<message>
<source>Debug window</source>
<translation type="unfinished"/>
</message>
<message>
<source>General</source>
<translation type="unfinished"/>
</message>
<message>
<source>Using OpenSSL version</source>
<translation>نسخهٔ OpenSSL استفاده شده</translation>
</message>
<message>
<source>Startup time</source>
<translation>زمان آغاز به کار</translation>
</message>
<message>
<source>Network</source>
<translation>شبکه</translation>
</message>
<message>
<source>Name</source>
<translation>اسم</translation>
</message>
<message>
<source>Number of connections</source>
<translation>تعداد ارتباطات</translation>
</message>
<message>
<source>Block chain</source>
<translation>زنجیرهٔ بلوکها</translation>
</message>
<message>
<source>Current number of blocks</source>
<translation>تعداد فعلی بلوکها</translation>
</message>
<message>
<source>Estimated total blocks</source>
<translation>تعداد تخمینی بلوکها</translation>
</message>
<message>
<source>Last block time</source>
<translation>زمان آخرین بلوک</translation>
</message>
<message>
<source>&Open</source>
<translation>با&ز کردن</translation>
</message>
<message>
<source>&Console</source>
<translation>&کنسول</translation>
</message>
<message>
<source>&Network Traffic</source>
<translation type="unfinished"/>
</message>
<message>
<source>&Clear</source>
<translation type="unfinished"/>
</message>
<message>
<source>Totals</source>
<translation type="unfinished"/>
</message>
<message>
<source>In:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Out:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Build date</source>
<translation>ساخت تاریخ</translation>
</message>
<message>
<source>Debug log file</source>
<translation>فایلِ لاگِ اشکال زدایی</translation>
</message>
<message>
<source>Open the POND debug log file from the current data directory. This can take a few seconds for large log files.</source>
<translation>فایلِ لاگِ اشکال زدایی POND را از دایرکتوری جاری داده ها باز کنید. این عملیات ممکن است برای فایلهای لاگِ حجیم طولانی شود.</translation>
</message>
<message>
<source>Clear console</source>
<translation>پاکسازی کنسول</translation>
</message>
<message>
<source>Welcome to the POND RPC console.</source>
<translation>به کنسور RPC بیتکوین خوش آمدید.</translation>
</message>
<message>
<source>Use up and down arrows to navigate history, and <b>Ctrl-L</b> to clear screen.</source>
<translation>دکمههای بالا و پایین برای پیمایش تاریخچه و <b>Ctrl-L</b> برای پاک کردن صفحه.</translation>
</message>
<message>
<source>Type <b>help</b> for an overview of available commands.</source>
<translation>برای نمایش یک مرور کلی از دستورات ممکن، عبارت <b>help</b> را بنویسید.</translation>
</message>
<message>
<source>%1 B</source>
<translation type="unfinished"/>
</message>
<message>
<source>%1 KB</source>
<translation type="unfinished"/>
</message>
<message>
<source>%1 MB</source>
<translation type="unfinished"/>
</message>
<message>
<source>%1 GB</source>
<translation type="unfinished"/>
</message>
<message>
<source>%1 m</source>
<translation type="unfinished"/>
</message>
<message>
<source>%1 h</source>
<translation type="unfinished"/>
</message>
<message>
<source>%1 h %2 m</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>ReceiveCoinsDialog</name>
<message>
<source>&Amount:</source>
<translation type="unfinished"/>
</message>
<message>
<source>&Label:</source>
<translation>&برچسب:</translation>
</message>
<message>
<source>&Message:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Reuse one of the previously used receiving addresses. Reusing addresses has security and privacy issues. Do not use this unless re-generating a payment request made before.</source>
<translation type="unfinished"/>
</message>
<message>
<source>R&euse an existing receiving address (not recommended)</source>
<translation type="unfinished"/>
</message>
<message>
<source>An optional message to attach to the payment request, which will be displayed when the request is opened. Note: The message will not be sent with the payment over the POND network.</source>
<translation type="unfinished"/>
</message>
<message>
<source>An optional label to associate with the new receiving address.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Use this form to request payments. All fields are <b>optional</b>.</source>
<translation type="unfinished"/>
</message>
<message>
<source>An optional amount to request. Leave this empty or zero to not request a specific amount.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Clear all fields of the form.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Clear</source>
<translation type="unfinished"/>
</message>
<message>
<source>Requested payments history</source>
<translation type="unfinished"/>
</message>
<message>
<source>&Request payment</source>
<translation type="unfinished"/>
</message>
<message>
<source>Show the selected request (does the same as double clicking an entry)</source>
<translation type="unfinished"/>
</message>
<message>
<source>Show</source>
<translation type="unfinished"/>
</message>
<message>
<source>Remove the selected entries from the list</source>
<translation type="unfinished"/>
</message>
<message>
<source>Remove</source>
<translation type="unfinished"/>
</message>
<message>
<source>Copy label</source>
<translation>کپی برچسب</translation>
</message>
<message>
<source>Copy message</source>
<translation type="unfinished"/>
</message>
<message>
<source>Copy amount</source>
<translation>کپی مقدار</translation>
</message>
</context>
<context>
<name>ReceiveRequestDialog</name>
<message>
<source>QR Code</source>
<translation>کد QR</translation>
</message>
<message>
<source>Copy &URI</source>
<translation type="unfinished"/>
</message>
<message>
<source>Copy &Address</source>
<translation type="unfinished"/>
</message>
<message>
<source>&Save Image...</source>
<translation type="unfinished"/>
</message>
<message>
<source>Request payment to %1</source>
<translation type="unfinished"/>
</message>
<message>
<source>Payment information</source>
<translation type="unfinished"/>
</message>
<message>
<source>URI</source>
<translation type="unfinished"/>
</message>
<message>
<source>Address</source>
<translation>نشانی</translation>
</message>
<message>
<source>Amount</source>
<translation>مبلغ</translation>
</message>
<message>
<source>Label</source>
<translation>برچسب</translation>
</message>
<message>
<source>Message</source>
<translation>پیام</translation>
</message>
<message>
<source>Resulting URI too long, try to reduce the text for label / message.</source>
<translation>URL ایجاد شده خیلی طولانی است. سعی کنید طول برچسب و یا پیام را کمتر کنید.</translation>
</message>
<message>
<source>Error encoding URI into QR Code.</source>
<translation>خطا در تبدیل نشانی اینترنتی به صورت کد QR.</translation>
</message>
</context>
<context>
<name>RecentRequestsTableModel</name>
<message>
<source>Date</source>
<translation>تاریخ</translation>
</message>
<message>
<source>Label</source>
<translation>برچسب</translation>
</message>
<message>
<source>Message</source>
<translation>پیام</translation>
</message>
<message>
<source>Amount</source>
<translation>مبلغ</translation>
</message>
<message>
<source>(no label)</source>
<translation>(بدون برچسب)</translation>
</message>
<message>
<source>(no message)</source>
<translation type="unfinished"/>
</message>
<message>
<source>(no amount)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SendCoinsDialog</name>
<message>
<source>Send Coins</source>
<translation>ارسال سکه</translation>
</message>
<message>
<source>Coin Control Features</source>
<translation type="unfinished"/>
</message>
<message>
<source>Inputs...</source>
<translation type="unfinished"/>
</message>
<message>
<source>automatically selected</source>
<translation type="unfinished"/>
</message>
<message>
<source>Insufficient funds!</source>
<translation type="unfinished"/>
</message>
<message>
<source>Quantity:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Bytes:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Amount:</source>
<translation>مبلغ:</translation>
</message>
<message>
<source>Priority:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Low Output:</source>
<translation type="unfinished"/>
</message>
<message>
<source>After Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Change:</source>
<translation type="unfinished"/>
</message>
<message>
<source>If this is activated, but the change address is empty or invalid, change will be sent to a newly generated address.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Custom change address</source>
<translation type="unfinished"/>
</message>
<message>
<source>Send to multiple recipients at once</source>
<translation>ارسال به چند دریافتکنندهٔ بهطور همزمان</translation>
</message>
<message>
<source>Add &Recipient</source>
<translation>&دریافتکنندهٔ جدید</translation>
</message>
<message>
<source>Clear all fields of the form.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Clear &All</source>
<translation>پاکسازی &همه</translation>
</message>
<message>
<source>Balance:</source>
<translation>تزار:</translation>
</message>
<message>
<source>Confirm the send action</source>
<translation>عملیات ارسال را تأیید کنید</translation>
</message>
<message>
<source>S&end</source>
<translation>&ارسال</translation>
</message>
<message>
<source>Confirm send coins</source>
<translation>ارسال سکه را تأیید کنید</translation>
</message>
<message>
<source>%1 to %2</source>
<translation type="unfinished"/>
</message>
<message>
<source>Copy quantity</source>
<translation type="unfinished"/>
</message>
<message>
<source>Copy amount</source>
<translation>کپی مقدار</translation>
</message>
<message>
<source>Copy fee</source>
<translation type="unfinished"/>
</message>
<message>
<source>Copy after fee</source>
<translation type="unfinished"/>
</message>
<message>
<source>Copy bytes</source>
<translation type="unfinished"/>
</message>
<message>
<source>Copy priority</source>
<translation type="unfinished"/>
</message>
<message>
<source>Copy low output</source>
<translation type="unfinished"/>
</message>
<message>
<source>Copy change</source>
<translation type="unfinished"/>
</message>
<message>
<source>Total Amount %1 (= %2)</source>
<translation type="unfinished"/>
</message>
<message>
<source>or</source>
<translation type="unfinished"/>
</message>
<message>
<source>The recipient address is not valid, please recheck.</source>
<translation>نشانی گیرنده معتبر نیست؛ لطفا دوباره بررسی کنید.</translation>
</message>
<message>
<source>The amount to pay must be larger than 0.</source>
<translation>مبلغ پرداخت باید بیشتر از ۰ باشد.</translation>
</message>
<message>
<source>The amount exceeds your balance.</source>
<translation>میزان پرداخت از تراز شما بیشتر است.</translation>
</message>
<message>
<source>The total exceeds your balance when the %1 transaction fee is included.</source>
<translation>با احتساب هزینهٔ %1 برای هر تراکنش، مجموع میزان پرداختی از مبلغ تراز شما بیشتر میشود.</translation>
</message>
<message>
<source>Duplicate address found, can only send to each address once per send operation.</source>
<translation>یک نشانی تکراری پیدا شد. در هر عملیات ارسال، به هر نشانی فقط مبلغ میتوان ارسال کرد.</translation>
</message>
<message>
<source>Transaction creation failed!</source>
<translation type="unfinished"/>
</message>
<message>
<source>The transaction was rejected! This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Warning: Invalid POND address</source>
<translation type="unfinished"/>
</message>
<message>
<source>(no label)</source>
<translation>(بدون برچسب)</translation>
</message>
<message>
<source>Warning: Unknown change address</source>
<translation type="unfinished"/>
</message>
<message>
<source>Are you sure you want to send?</source>
<translation type="unfinished"/>
</message>
<message>
<source>added as transaction fee</source>
<translation type="unfinished"/>
</message>
<message>
<source>Payment request expired</source>
<translation type="unfinished"/>
</message>
<message>
<source>Invalid payment address %1</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SendCoinsEntry</name>
<message>
<source>A&mount:</source>
<translation>A&مبلغ :</translation>
</message>
<message>
<source>Pay &To:</source>
<translation>پرداخ&ت به:</translation>
</message>
<message>
<source>The address to send the payment to (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source>
<translation>نشانی مقصد برای پرداخت (مثلاً 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</translation>
</message>
<message>
<source>Enter a label for this address to add it to your address book</source>
<translation>برای این نشانی یک برچسب وارد کنید تا در دفترچهٔ آدرس ذخیره شود</translation>
</message>
<message>
<source>&Label:</source>
<translation>&برچسب:</translation>
</message>
<message>
<source>Choose previously used address</source>
<translation type="unfinished"/>
</message>
<message>
<source>This is a normal payment.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<source>Paste address from clipboard</source>
<translation>چسباندن نشانی از حافظهٔ سیستم</translation>
</message>
<message>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<source>Remove this entry</source>
<translation type="unfinished"/>
</message>
<message>
<source>Message:</source>
<translation>پیام:</translation>
</message>
<message>
<source>This is a verified payment request.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Enter a label for this address to add it to the list of used addresses</source>
<translation type="unfinished"/>
</message>
<message>
<source>A message that was attached to the POND: URI which will be stored with the transaction for your reference. Note: This message will not be sent over the POND network.</source>
<translation type="unfinished"/>
</message>
<message>
<source>This is an unverified payment request.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Pay To:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Memo:</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>ShutdownWindow</name>
<message>
<source>POND Core is shutting down...</source>
<translation type="unfinished"/>
</message>
<message>
<source>Do not shut down the computer until this window disappears.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SignVerifyMessageDialog</name>
<message>
<source>Signatures - Sign / Verify a Message</source>
<translation>امضاها - امضا / تأیید یک پیام</translation>
</message>
<message>
<source>&Sign Message</source>
<translation>ا&مضای پیام</translation>
</message>
<message>
<source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source>
<translation>برای احراز اینکه پیامها از جانب شما هستند، میتوانید آنها را با نشانی خودتان امضا کنید. مراقب باشید چیزی که بدان اطمینان ندارید را امضا نکنید زیرا حملات فیشینگ ممکن است بخواهند از.پیامی با امضای شما سوءاستفاده کنند. تنها مواردی را که حاوی اطلاعات دقیق و قابل قبول برای شما هستند امضا کنید.</translation>
</message>
<message>
<source>The address to sign the message with (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source>
<translation>نشانی مورد استفاده برای امضا کردن پیام (برای مثال 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</translation>
</message>
<message>
<source>Choose previously used address</source>
<translation type="unfinished"/>
</message>
<message>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<source>Paste address from clipboard</source>
<translation>چسباندن نشانی از حافظهٔ سیستم</translation>
</message>
<message>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<source>Enter the message you want to sign here</source>
<translation>پیامی را که میخواهید امضا کنید در اینجا وارد کنید</translation>
</message>
<message>
<source>Signature</source>
<translation>امضا</translation>
</message>
<message>
<source>Copy the current signature to the system clipboard</source>
<translation>امضای فعلی را به حافظهٔ سیستم کپی کن</translation>
</message>
<message>
<source>Sign the message to prove you own this POND address</source>
<translation>برای اثبات تعلق این نشانی به شما، پیام را امضا کنید</translation>
</message>
<message>
<source>Sign &Message</source>
<translation>ا&مضای پیام</translation>
</message>
<message>
<source>Reset all sign message fields</source>
<translation>بازنشانی تمام فیلدهای پیام</translation>
</message>
<message>
<source>Clear &All</source>
<translation>پاک &کردن همه</translation>
</message>
<message>
<source>&Verify Message</source>
<translation>&شناسایی پیام</translation>
</message>
<message>
<source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source>
<translation>برای شناسایی پیام، نشانیِ امضا کننده و متن پیام را وارد کنید. (مطمئن شوید که فاصلهها، تبها و خطوط را عیناً کپی میکنید.) مراقب باشید در امضا چیزی بیشتر از آنچه در پیام میبینید وجود نداشته باشد تا فریب دزدان اینترنتی و حملات از نوع MITM را نخورید.</translation>
</message>
<message>
<source>The address the message was signed with (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source>
<translation>نشانی مورد استفاده برای امضا کردن پیام (برای مثال 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</translation>
</message>
<message>
<source>Verify the message to ensure it was signed with the specified POND address</source>
<translation>برای حصول اطمینان از اینکه پیام با نشانی بیتکوین مشخص شده امضا است یا خیر، پیام را شناسایی کنید</translation>
</message>
<message>
<source>Verify &Message</source>
<translation>&شناسایی پیام</translation>
</message>
<message>
<source>Reset all verify message fields</source>
<translation>بازنشانی تمام فیلدهای پیام</translation>
</message>
<message>
<source>Enter a POND address (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source>
<translation>یک نشانی بیتکوین وارد کنید (مثلاً 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</translation>
</message>
<message>
<source>Click "Sign Message" to generate signature</source>
<translation>برای ایجاد یک امضای جدید روی «امضای پیام» کلیک کنید</translation>
</message>
<message>
<source>The entered address is invalid.</source>
<translation>نشانی وارد شده نامعتبر است.</translation>
</message>
<message>
<source>Please check the address and try again.</source>
<translation>لطفاً نشانی را بررسی کنید و دوباره تلاش کنید.</translation>
</message>
<message>
<source>The entered address does not refer to a key.</source>
<translation>نشانی وارد شده به هیچ کلیدی اشاره نمیکند.</translation>
</message>
<message>
<source>Wallet unlock was cancelled.</source>
<translation>عملیات باز کرن قفل کیف پول لغو شد.</translation>
</message>
<message>
<source>Private key for the entered address is not available.</source>
<translation>کلید خصوصی برای نشانی وارد شده در دسترس نیست.</translation>
</message>
<message>
<source>Message signing failed.</source>
<translation>امضای پیام با شکست مواجه شد.</translation>
</message>
<message>
<source>Message signed.</source>
<translation>پیام امضا شد.</translation>
</message>
<message>
<source>The signature could not be decoded.</source>
<translation>امضا نمیتواند کدگشایی شود.</translation>
</message>
<message>
<source>Please check the signature and try again.</source>
<translation>لطفاً امضا را بررسی نموده و دوباره تلاش کنید.</translation>
</message>
<message>
<source>The signature did not match the message digest.</source>
<translation>امضا با خلاصهٔ پیام مطابقت ندارد.</translation>
</message>
<message>
<source>Message verification failed.</source>
<translation>شناسایی پیام با شکست مواجه شد.</translation>
</message>
<message>
<source>Message verified.</source>
<translation>پیام شناسایی شد.</translation>
</message>
</context>
<context>
<name>SplashScreen</name>
<message>
<source>POND Core</source>
<translation> هسته POND </translation>
</message>
<message>
<source>The Bitcoin and POND Core developers</source>
<translation type="unfinished"/>
</message>
<message>
<source>[testnet]</source>
<translation>آزمایش شبکه</translation>
</message>
</context>
<context>
<name>TrafficGraphWidget</name>
<message>
<source>KB/s</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>TransactionDesc</name>
<message>
<source>Open until %1</source>
<translation>باز تا %1</translation>
</message>
<message>
<source>conflicted</source>
<translation type="unfinished"/>
</message>
<message>
<source>%1/offline</source>
<translation>%1/آفلاین</translation>
</message>
<message>
<source>%1/unconfirmed</source>
<translation>%1/تأیید نشده</translation>
</message>
<message>
<source>%1 confirmations</source>
<translation>%1 تأییدیه</translation>
</message>
<message>
<source>Status</source>
<translation>وضعیت</translation>
</message>
<message numerus="yes">
<source>, broadcast through %n node(s)</source>
<translation><numerusform>، پخش از طریق %n گره</numerusform></translation>
</message>
<message>
<source>Date</source>
<translation>تاریخ</translation>
</message>
<message>
<source>Source</source>
<translation>منبع</translation>
</message>
<message>
<source>Generated</source>
<translation>تولید شده</translation>
</message>
<message>
<source>From</source>
<translation>فرستنده</translation>
</message>
<message>
<source>To</source>
<translation>گیرنده</translation>
</message>
<message>
<source>own address</source>
<translation>آدرس شما</translation>
</message>
<message>
<source>label</source>
<translation>برچسب</translation>
</message>
<message>
<source>Credit</source>
<translation>بدهی</translation>
</message>
<message numerus="yes">
<source>matures in %n more block(s)</source>
<translation><numerusform>بلوغ در %n بلوک دیگر</numerusform></translation>
</message>
<message>
<source>not accepted</source>
<translation>پذیرفته نشد</translation>
</message>
<message>
<source>Debit</source>
<translation>اعتبار</translation>
</message>
<message>
<source>Transaction fee</source>
<translation>هزینهٔ تراکنش</translation>
</message>
<message>
<source>Net amount</source>
<translation>مبلغ خالص</translation>
</message>
<message>
<source>Message</source>
<translation>پیام</translation>
</message>
<message>
<source>Comment</source>
<translation>نظر</translation>
</message>
<message>
<source>Transaction ID</source>
<translation>شناسهٔ تراکنش</translation>
</message>
<message>
<source>Merchant</source>
<translation type="unfinished"/>
</message>
<message>
<source>Generated coins must mature %1 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to "not accepted" and it won't be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Debug information</source>
<translation>اطلاعات اشکالزدایی</translation>
</message>
<message>
<source>Transaction</source>
<translation>تراکنش</translation>
</message>
<message>
<source>Inputs</source>
<translation>ورودیها</translation>
</message>
<message>
<source>Amount</source>
<translation>مبلغ</translation>
</message>
<message>
<source>true</source>
<translation>درست</translation>
</message>
<message>
<source>false</source>
<translation>نادرست</translation>
</message>
<message>
<source>, has not been successfully broadcast yet</source>
<translation>، هنوز با موفقیت ارسال نشده</translation>
</message>
<message numerus="yes">
<source>Open for %n more block(s)</source>
<translation><numerusform>باز برای %n بلوک دیگر</numerusform></translation>
</message>
<message>
<source>unknown</source>
<translation>ناشناس</translation>
</message>
</context>
<context>
<name>TransactionDescDialog</name>
<message>
<source>Transaction details</source>
<translation>جزئیات تراکنش</translation>
</message>
<message>
<source>This pane shows a detailed description of the transaction</source>
<translation>این پانل شامل توصیف کاملی از جزئیات تراکنش است</translation>
</message>
</context>
<context>
<name>TransactionTableModel</name>
<message>
<source>Date</source>
<translation>تاریخ</translation>
</message>
<message>
<source>Type</source>
<translation>نوع</translation>
</message>
<message>
<source>Address</source>
<translation>نشانی</translation>
</message>
<message>
<source>Amount</source>
<translation>مبلغ</translation>
</message>
<message>
<source>Immature (%1 confirmations, will be available after %2)</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<source>Open for %n more block(s)</source>
<translation><numerusform>باز برای %n بلوک دیگر</numerusform></translation>
</message>
<message>
<source>Open until %1</source>
<translation>باز شده تا %1</translation>
</message>
<message>
<source>Confirmed (%1 confirmations)</source>
<translation>تأیید شده (%1 تأییدیه)</translation>
</message>
<message>
<source>This block was not received by any other nodes and will probably not be accepted!</source>
<translation>این بلوک از هیچ همتای دیگری دریافت نشده است و احتمال میرود پذیرفته نشود!</translation>
</message>
<message>
<source>Generated but not accepted</source>
<translation>تولید شده ولی قبول نشده</translation>
</message>
<message>
<source>Offline</source>
<translation type="unfinished"/>
</message>
<message>
<source>Unconfirmed</source>
<translation type="unfinished"/>
</message>
<message>
<source>Confirming (%1 of %2 recommended confirmations)</source>
<translation type="unfinished"/>
</message>
<message>
<source>Conflicted</source>
<translation type="unfinished"/>
</message>
<message>
<source>Received with</source>
<translation>دریافتشده با</translation>
</message>
<message>
<source>Received from</source>
<translation>دریافتشده از</translation>
</message>
<message>
<source>Sent to</source>
<translation>ارسالشده به</translation>
</message>
<message>
<source>Payment to yourself</source>
<translation>پر داخت به خودتان</translation>
</message>
<message>
<source>Mined</source>
<translation>استخراجشده</translation>
</message>
<message>
<source>(n/a)</source>
<translation>(ناموجود)</translation>
</message>
<message>
<source>Transaction status. Hover over this field to show number of confirmations.</source>
<translation>وضعیت تراکنش. نشانگر را روی این فیلد نگه دارید تا تعداد تأییدیهها نشان داده شود.</translation>
</message>
<message>
<source>Date and time that the transaction was received.</source>
<translation>تاریخ و ساعت دریافت تراکنش.</translation>
</message>
<message>
<source>Type of transaction.</source>
<translation>نوع تراکنش.</translation>
</message>
<message>
<source>Destination address of transaction.</source>
<translation>نشانی مقصد تراکنش.</translation>
</message>
<message>
<source>Amount removed from or added to balance.</source>
<translation>مبلغ کسر شده و یا اضافه شده به تراز.</translation>
</message>
</context>
<context>
<name>TransactionView</name>
<message>
<source>All</source>
<translation>همه</translation>
</message>
<message>
<source>Today</source>
<translation>امروز</translation>
</message>
<message>
<source>This week</source>
<translation>این هفته</translation>
</message>
<message>
<source>This month</source>
<translation>این ماه</translation>
</message>
<message>
<source>Last month</source>
<translation>ماه گذشته</translation>
</message>
<message>
<source>This year</source>
<translation>امسال</translation>
</message>
<message>
<source>Range...</source>
<translation>محدوده...</translation>
</message>
<message>
<source>Received with</source>
<translation>دریافتشده با </translation>
</message>
<message>
<source>Sent to</source>
<translation>ارسال به</translation>
</message>
<message>
<source>To yourself</source>
<translation>به خودتان</translation>
</message>
<message>
<source>Mined</source>
<translation>استخراجشده</translation>
</message>
<message>
<source>Other</source>
<translation>دیگر</translation>
</message>
<message>
<source>Enter address or label to search</source>
<translation>برای جستوجو نشانی یا برچسب را وارد کنید</translation>
</message>
<message>
<source>Min amount</source>
<translation>مبلغ حداقل</translation>
</message>
<message>
<source>Copy address</source>
<translation>کپی نشانی</translation>
</message>
<message>
<source>Copy label</source>
<translation>کپی برچسب</translation>
</message>
<message>
<source>Copy amount</source>
<translation>کپی مقدار</translation>
</message>
<message>
<source>Copy transaction ID</source>
<translation>کپی شناسهٔ تراکنش</translation>
</message>
<message>
<source>Edit label</source>
<translation>ویرایش برچسب</translation>
</message>
<message>
<source>Show transaction details</source>
<translation>نمایش جزئیات تراکنش</translation>
</message>
<message>
<source>Export Transaction History</source>
<translation type="unfinished"/>
</message>
<message>
<source>Exporting Failed</source>
<translation type="unfinished"/>
</message>
<message>
<source>There was an error trying to save the transaction history to %1.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Exporting Successful</source>
<translation type="unfinished"/>
</message>
<message>
<source>The transaction history was successfully saved to %1.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Comma separated file (*.csv)</source>
<translation>پروندهٔ نوع CSV جداشونده با کاما (*.csv)</translation>
</message>
<message>
<source>Confirmed</source>
<translation>تأیید شده</translation>
</message>
<message>
<source>Date</source>
<translation>تاریخ</translation>
</message>
<message>
<source>Type</source>
<translation>نوع</translation>
</message>
<message>
<source>Label</source>
<translation>برچسب</translation>
</message>
<message>
<source>Address</source>
<translation>نشانی</translation>
</message>
<message>
<source>Amount</source>
<translation>مبلغ</translation>
</message>
<message>
<source>ID</source>
<translation>شناسه</translation>
</message>
<message>
<source>Range:</source>
<translation>محدوده:</translation>
</message>
<message>
<source>to</source>
<translation>به</translation>
</message>
</context>
<context>
<name>WalletFrame</name>
<message>
<source>No wallet has been loaded.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>WalletModel</name>
<message>
<source>Send Coins</source>
<translation>ارسال وجه</translation>
</message>
</context>
<context>
<name>WalletView</name>
<message>
<source>&Export</source>
<translation>&صدور</translation>
</message>
<message>
<source>Export the data in the current tab to a file</source>
<translation>داده ها نوارِ جاری را به فایل انتقال دهید</translation>
</message>
<message>
<source>Backup Wallet</source>
<translation>نسخهٔ پشتیبان کیف پول</translation>
</message>
<message>
<source>Wallet Data (*.dat)</source>
<translation>دادهٔ کیف پول (*.dat)</translation>
</message>
<message>
<source>Backup Failed</source>
<translation>خطا در پشتیبانگیری</translation>
</message>
<message>
<source>There was an error trying to save the wallet data to %1.</source>
<translation type="unfinished"/>
</message>
<message>
<source>The wallet data was successfully saved to %1.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Backup Successful</source>
<translation>پشتیبانگیری موفق</translation>
</message>
</context>
<context>
<name>bitcoin-core</name>
<message>
<source>Usage:</source>
<translation>استفاده:</translation>
</message>
<message>
<source>List commands</source>
<translation>نمایش لیست فرمانها</translation>
</message>
<message>
<source>Get help for a command</source>
<translation>راهنمایی در مورد یک دستور</translation>
</message>
<message>
<source>Options:</source>
<translation>گزینهها:</translation>
</message>
<message>
<source>Specify configuration file (default: POND.conf)</source>
<translation>مشخص کردن فایل پیکربندی (پیشفرض: POND.conf)</translation>
</message>
<message>
<source>Specify pid file (default: PONDd.pid)</source>
<translation>مشخص کردن فایل شناسهٔ پردازش - pid - (پیشفرض: bitcoin.pid)</translation>
</message>
<message>
<source>Specify data directory</source>
<translation>مشخص کردن دایرکتوری دادهها</translation>
</message>
<message>
<source>Listen for connections on <port> (default: 16115 or testnet: 26115)</source>
<translation>پذیرش اتصالات روی پورت <port> (پیشفرض: 8833 یا شبکهٔ آزمایشی: 26115)</translation>
</message>
<message>
<source>Maintain at most <n> connections to peers (default: 125)</source>
<translation>حداکثر <n> اتصال با همتایان برقرار شود (پیشفرض: ۱۲۵)</translation>
</message>
<message>
<source>Connect to a node to retrieve peer addresses, and disconnect</source>
<translation>اتصال به یک گره برای دریافت آدرسهای همتا و قطع اتصال پس از اتمام عملیات</translation>
</message>
<message>
<source>Specify your own public address</source>
<translation>آدرس عمومی خود را مشخص کنید</translation>
</message>
<message>
<source>Threshold for disconnecting misbehaving peers (default: 100)</source>
<translation>حد آستانه برای قطع ارتباط با همتایان بدرفتار (پیشفرض: ۱۰۰)</translation>
</message>
<message>
<source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source>
<translation>مدت زمان جلوگیری از اتصال مجدد همتایان بدرفتار، به ثانیه (پیشفرض: ۸۴۶۰۰)</translation>
</message>
<message>
<source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source>
<translation>هنگام تنظیم پورت RPC %u برای گوش دادن روی IPv4 خطایی رخ داده است: %s</translation>
</message>
<message>
<source>Listen for JSON-RPC connections on <port> (default: 16114 or testnet: 26114)</source>
<translation>پورت مورد شنود برای اتصالات JSON-RPC (پیشفرض: 16114 برای شبکهٔ تست 26114)</translation>
</message>
<message>
<source>Accept command line and JSON-RPC commands</source>
<translation>پذیرش دستورات خط فرمان و دستورات JSON-RPC</translation>
</message>
<message>
<source>POND Core RPC client version</source>
<translation type="unfinished"/>
</message>
<message>
<source>Run in the background as a daemon and accept commands</source>
<translation>اجرا در پشت زمینه بهصورت یک سرویس و پذیرش دستورات</translation>
</message>
<message>
<source>Use the test network</source>
<translation>استفاده از شبکهٔ آزمایش</translation>
</message>
<message>
<source>Accept connections from outside (default: 1 if no -proxy or -connect)</source>
<translation>پذیرش اتصالات از بیرون (پیش فرض:1 بدون پراکسی یا اتصال)</translation>
</message>
<message>
<source>%s, you must set a rpcpassword in the configuration file:
%s
It is recommended you use the following random password:
rpcuser=PONDrpc
rpcpassword=%s
(you do not need to remember this password)
The username and password MUST NOT be the same.
If the file does not exist, create it with owner-readable-only file permissions.
It is also recommended to set alertnotify so you are notified of problems;
for example: alertnotify=echo %%s | mail -s "POND Alert" [email protected]
</source>
<translation type="unfinished"/>
</message>
<message>
<source>Acceptable ciphers (default: TLSv1.2+HIGH:TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!3DES:@STRENGTH)</source>
<translation type="unfinished"/>
</message>
<message>
<source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source>
<translation type="unfinished"/>
</message>
<message>
<source>Bind to given address and always listen on it. Use [host]:port notation for IPv6</source>
<translation>مقید به نشانی داده شده باشید و همیشه از آن پیروی کنید. از نشانه گذاری استاندار IPv6 به صورت Host]:Port] استفاده کنید.</translation>
</message>
<message>
<source>Continuously rate-limit free transactions to <n>*1000 bytes per minute (default:15)</source>
<translation type="unfinished"/>
</message>
<message>
<source>Enter regression test mode, which uses a special chain in which blocks can be solved instantly. This is intended for regression testing tools and app development.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Enter regression test mode, which uses a special chain in which blocks can be solved instantly.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Error: Listening for incoming connections failed (listen returned error %d)</source>
<translation type="unfinished"/>
</message>
<message>
<source>Error: The transaction was rejected! This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation>تراکنش پذیرفته نیست! این خطا ممکن است در حالتی رخ داده باشد که مقداری از سکه های شما در کیف پولتان از جایی دیگر، همانند یک کپی از کیف پول اصلی اتان، خرج شده باشد اما در کیف پول اصلی اتان به عنوان مبلغ خرج شده، نشانه گذاری نشده باشد.</translation>
</message>
<message>
<source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds!</source>
<translation>خطا: این تراکنش به علت میزان وجه، دشواری، و یا استفاده از وجوه دریافتی اخیر نیازمند کارمزد به مبلغ حداقل %s است.</translation>
</message>
<message>
<source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source>
<translation>هنگامی که یک تراکنش در کیف پولی رخ می دهد، دستور را اجرا کن(%s در دستورات بوسیله ی TxID جایگزین می شود)</translation>
</message>
<message>
<source>Fees smaller than this are considered zero fee (for transaction creation) (default:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Flush database activity from memory pool to disk log every <n> megabytes (default: 100)</source>
<translation type="unfinished"/>
</message>
<message>
<source>How thorough the block verification of -checkblocks is (0-4, default: 3)</source>
<translation type="unfinished"/>
</message>
<message>
<source>In this mode -genproclimit controls how many blocks are generated immediately.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Set the number of script verification threads (%u to %d, 0 = auto, <0 = leave that many cores free, default: %d)</source>
<translation type="unfinished"/>
</message>
<message>
<source>Set the processor limit for when generation is on (-1 = unlimited, default: -1)</source>
<translation type="unfinished"/>
</message>
<message>
<source>This is a pre-release test build - use at your own risk - do not use for mining or merchant applications</source>
<translation>این یک نسخه ی آزمایشی است - با مسئولیت خودتان از آن استفاده کنید - آن را در معدن و بازرگانی بکار نگیرید.</translation>
</message>
<message>
<source>Unable to bind to %s on this computer. POND Core is probably already running.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Use separate SOCKS5 proxy to reach peers via Tor hidden services (default: -proxy)</source>
<translation type="unfinished"/>
</message>
<message>
<source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source>
<translation>هشدار: مبلغ paytxfee بسیار بالایی تنظیم شده است! این مبلغ هزینهای است که شما برای تراکنشها پرداخت میکنید.</translation>
</message>
<message>
<source>Warning: Please check that your computer's date and time are correct! If your clock is wrong POND will not work properly.</source>
<translation>هشدار: لطفا زمان و تاریخ رایانه خود را تصحیح نمایید! اگر ساعت رایانه شما اشتباه باشد POND ممکن است صحیح کار نکند</translation>
</message>
<message>
<source>Warning: The network does not appear to fully agree! Some miners appear to be experiencing issues.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Warning: We do not appear to fully agree with our peers! You may need to upgrade, or other nodes may need to upgrade.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source>
<translation type="unfinished"/>
</message>
<message>
<source>(default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<source>(default: wallet.dat)</source>
<translation type="unfinished"/>
</message>
<message>
<source><category> can be:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Attempt to recover private keys from a corrupt wallet.dat</source>
<translation type="unfinished"/>
</message>
<message>
<source>POND Core Daemon</source>
<translation type="unfinished"/>
</message>
<message>
<source>Block creation options:</source>
<translation>بستن گزینه ایجاد</translation>
</message>
<message>
<source>Clear list of wallet transactions (diagnostic tool; implies -rescan)</source>
<translation type="unfinished"/>
</message>
<message>
<source>Connect only to the specified node(s)</source>
<translation>تنها در گره (های) مشخص شده متصل شوید</translation>
</message>
<message>
<source>Connect through SOCKS proxy</source>
<translation type="unfinished"/>
</message>
<message>
<source>Connect to JSON-RPC on <port> (default: 16114 or testnet: 26114)</source>
<translation type="unfinished"/>
</message>
<message>
<source>Connection options:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Corrupted block database detected</source>
<translation>یک پایگاه داده ی بلوک خراب یافت شد</translation>
</message>
<message>
<source>Debugging/Testing options:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Disable safemode, override a real safe mode event (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<source>Discover own IP address (default: 1 when listening and no -externalip)</source>
<translation>آدرس آی.پی. خود را شناسایی کنید (پیش فرض:1 در زمان when listening وno -externalip)</translation>
</message>
<message>
<source>Do not load the wallet and disable wallet RPC calls</source>
<translation type="unfinished"/>
</message>
<message>
<source>Do you want to rebuild the block database now?</source>
<translation>آیا مایلید که اکنون پایگاه داده ی بلوک را بازسازی کنید؟</translation>
</message>
<message>
<source>Error initializing block database</source>
<translation>خطا در آماده سازی پایگاه داده ی بلوک</translation>
</message>
<message>
<source>Error initializing wallet database environment %s!</source>
<translation type="unfinished"/>
</message>
<message>
<source>Error loading block database</source>
<translation>خطا در بارگذاری پایگاه داده ها</translation>
</message>
<message>
<source>Error opening block database</source>
<translation>خطا در بازگشایی پایگاه داده ی بلوک</translation>
</message>
<message>
<source>Error: Disk space is low!</source>
<translation type="unfinished"/>
</message>
<message>
<source>Error: Wallet locked, unable to create transaction!</source>
<translation type="unfinished"/>
</message>
<message>
<source>Error: system error: </source>
<translation>خطا: خطای سامانه:</translation>
</message>
<message>
<source>Failed to listen on any port. Use -listen=0 if you want this.</source>
<translation>شنیدن هر گونه درگاه انجام پذیر نیست. ازlisten=0 برای اینکار استفاده کیند.</translation>
</message>
<message>
<source>Failed to read block info</source>
<translation>خواندن اطلاعات بلوک با شکست مواجه شد</translation>
</message>
<message>
<source>Failed to read block</source>
<translation>خواندن بلوک با شکست مواجه شد</translation>
</message>
<message>
<source>Failed to sync block index</source>
<translation>همگام سازی فهرست بلوک با شکست مواجه شد</translation>
</message>
<message>
<source>Failed to write block index</source>
<translation>نوشتن فهرست بلوک با شکست مواجه شد</translation>
</message>
<message>
<source>Failed to write block info</source>
<translation>نوشتن اطلاعات بلوک با شکست مواجه شد</translation>
</message>
<message>
<source>Failed to write block</source>
<translation>نوشتن بلوک با شکست مواجه شد</translation>
</message>
<message>
<source>Failed to write file info</source>
<translation>نوشتن اطلاعات پرونده با شکست مواجه شد</translation>
</message>
<message>
<source>Failed to write to coin database</source>
<translation>نوشتن اطلاعات در پایگاه داده ی سکه ها با شکست مواجه شد</translation>
</message>
<message>
<source>Failed to write transaction index</source>
<translation>نوشتن فهرست تراکنش ها با شکست مواجه شد</translation>
</message>
<message>
<source>Failed to write undo data</source>
<translation>عملیات بازگشت دادن اطلاعات با شکست مواجه شدن</translation>
</message>
<message>
<source>Fee per kB to add to transactions you send</source>
<translation>نرخ هر کیلوبایت برای اضافه کردن به تراکنشهایی که میفرستید</translation>
</message>
<message>
<source>Fees smaller than this are considered zero fee (for relaying) (default:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Find peers using DNS lookup (default: 1 unless -connect)</source>
<translation>قرینه ها را برای جستجوی DNS بیاب (پیش فرض: 1 مگر در زمان اتصال)</translation>
</message>
<message>
<source>Force safe mode (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<source>Generate coins (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<source>How many blocks to check at startup (default: 288, 0 = all)</source>
<translation>چند بلوک نیاز است که در ابتدای راه اندازی بررسی شوند(پیش فرض:288 ،0=همه)</translation>
</message>
<message>
<source>If <category> is not supplied, output all debugging information.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Importing...</source>
<translation type="unfinished"/>
</message>
<message>
<source>Incorrect or no genesis block found. Wrong datadir for network?</source>
<translation type="unfinished"/>
</message>
<message>
<source>Invalid -onion address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<source>Not enough file descriptors available.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Prepend debug output with timestamp (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<source>RPC client options:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Rebuild block chain index from current blk000??.dat files</source>
<translation type="unfinished"/>
</message>
<message>
<source>Select SOCKS version for -proxy (4 or 5, default: 5)</source>
<translation type="unfinished"/>
</message>
<message>
<source>Set database cache size in megabytes (%d to %d, default: %d)</source>
<translation type="unfinished"/>
</message>
<message>
<source>Set maximum block size in bytes (default: %d)</source>
<translation type="unfinished"/>
</message>
<message>
<source>Set the number of threads to service RPC calls (default: 4)</source>
<translation type="unfinished"/>
</message>
<message>
<source>Specify wallet file (within data directory)</source>
<translation type="unfinished"/>
</message>
<message>
<source>Spend unconfirmed change when sending transactions (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<source>This is intended for regression testing tools and app development.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Usage (deprecated, use POND-cli):</source>
<translation type="unfinished"/>
</message>
<message>
<source>Verifying blocks...</source>
<translation>در حال بازبینی بلوک ها...</translation>
</message>
<message>
<source>Verifying wallet...</source>
<translation>در حال بازبینی کیف پول...</translation>
</message>
<message>
<source>Wait for RPC server to start</source>
<translation type="unfinished"/>
</message>
<message>
<source>Wallet %s resides outside data directory %s</source>
<translation type="unfinished"/>
</message>
<message>
<source>Wallet options:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Warning: Deprecated argument -debugnet ignored, use -debug=net</source>
<translation type="unfinished"/>
</message>
<message>
<source>You need to rebuild the database using -reindex to change -txindex</source>
<translation type="unfinished"/>
</message>
<message>
<source>Imports blocks from external blk000??.dat file</source>
<translation type="unfinished"/>
</message>
<message>
<source>Cannot obtain a lock on data directory %s. POND Core is probably already running.</source>
<translation type="unfinished"/>
</message>
<message>
<source>Execute command when a relevant alert is received or we see a really long fork (%s in cmd is replaced by message)</source>
<translation type="unfinished"/>
</message>
<message>
<source>Output debugging information (default: 0, supplying <category> is optional)</source>
<translation type="unfinished"/>
</message>
<message>
<source>Set maximum size of high-priority/low-fee transactions in bytes (default: %d)</source>
<translation type="unfinished"/>
</message>
<message>
<source>Information</source>
<translation>اطلاعات</translation>
</message>
<message>
<source>Invalid amount for -minrelaytxfee=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<source>Invalid amount for -mintxfee=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<source>Limit size of signature cache to <n> entries (default: 50000)</source>
<translation type="unfinished"/>
</message>
<message>
<source>Log transaction priority and fee per kB when mining blocks (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<source>Maintain a full transaction index (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<source>Maximum per-connection receive buffer, <n>*1000 bytes (default: 5000)</source>
<translation>حداکثر بافر دریافت شده بر اساس اتصال <n>* 1000 بایت (پیش فرض:5000)</translation>
</message>
<message>
<source>Maximum per-connection send buffer, <n>*1000 bytes (default: 1000)</source>
<translation>حداکثر بافر دریافت شده بر اساس اتصال <n>* 1000 بایت (پیش فرض:1000)</translation>
</message>
<message>
<source>Only accept block chain matching built-in checkpoints (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<source>Only connect to nodes in network <net> (IPv4, IPv6 or Tor)</source>
<translation>تنها =به گره ها در شبکه متصا شوید <net> (IPv4, IPv6 or Tor)</translation>
</message>
<message>
<source>Print block on startup, if found in block index</source>
<translation type="unfinished"/>
</message>
<message>
<source>Print block tree on startup (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<source>RPC SSL options: (see the POND Wiki for SSL setup instructions)</source>
<translation type="unfinished"/>
</message>
<message>
<source>RPC server options:</source>
<translation type="unfinished"/>
</message>
<message>
<source>Randomly drop 1 of every <n> network messages</source>
<translation type="unfinished"/>
</message>
<message>
<source>Randomly fuzz 1 of every <n> network messages</source>
<translation type="unfinished"/>
</message>
<message>
<source>Run a thread to flush wallet periodically (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<source>SSL options: (see the POND Wiki for SSL setup instructions)</source>
<translation>گزینه ssl (به ویکیbitcoin برای راهنمای راه اندازی ssl مراجعه شود)</translation>
</message>
<message>
<source>Send command to POND Core</source>
<translation type="unfinished"/>
</message>
<message>
<source>Send trace/debug info to console instead of debug.log file</source>
<translation>اطلاعات ردگیری/اشکالزدایی را به جای فایل لاگ اشکالزدایی به کنسول بفرستید</translation>
</message>
<message>
<source>Set minimum block size in bytes (default: 0)</source>
<translation>حداقل سایز بلاک بر اساس بایت تنظیم شود (پیش فرض: 0)</translation>
</message>
<message>
<source>Sets the DB_PRIVATE flag in the wallet db environment (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<source>Show all debugging options (usage: --help -help-debug)</source>
<translation type="unfinished"/>
</message>
<message>
<source>Show benchmark information (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<source>Shrink debug.log file on client startup (default: 1 when no -debug)</source>
<translation>فایل debug.log را در startup مشتری کوچک کن (پیش فرض:1 اگر اشکال زدایی روی نداد)</translation>
</message>
<message>
<source>Signing transaction failed</source>
<translation type="unfinished"/>
</message>
<message>
<source>Specify connection timeout in milliseconds (default: 5000)</source>
<translation>(میلی ثانیه )فاصله ارتباط خاص</translation>
</message>
<message>
<source>Start POND Core Daemon</source>
<translation type="unfinished"/>
</message>
<message>
<source>System error: </source>
<translation>خطای سامانه</translation>
</message>
<message>
<source>Transaction amount too small</source>
<translation type="unfinished"/>
</message>
<message>
<source>Transaction amounts must be positive</source>
<translation type="unfinished"/>
</message>
<message>
<source>Transaction too large</source>
<translation type="unfinished"/>
</message>
<message>
<source>Use UPnP to map the listening port (default: 0)</source>
<translation>از UPnP برای شناسایی درگاه شنیداری استفاده کنید (پیش فرض:0)</translation>
</message>
<message>
<source>Use UPnP to map the listening port (default: 1 when listening)</source>
<translation>از UPnP برای شناسایی درگاه شنیداری استفاده کنید (پیش فرض:1 در زمان شنیدن)</translation>
</message>
<message>
<source>Username for JSON-RPC connections</source>
<translation>JSON-RPC شناسه برای ارتباطات</translation>
</message>
<message>
<source>Warning</source>
<translation>هشدار</translation>
</message>
<message>
<source>Warning: This version is obsolete, upgrade required!</source>
<translation>هشدار: این نسخه قدیمی است، روزآمدسازی مورد نیاز است</translation>
</message>
<message>
<source>Zapping all transactions from wallet...</source>
<translation type="unfinished"/>
</message>
<message>
<source>on startup</source>
<translation type="unfinished"/>
</message>
<message>
<source>version</source>
<translation>نسخه</translation>
</message>
<message>
<source>wallet.dat corrupt, salvage failed</source>
<translation type="unfinished"/>
</message>
<message>
<source>Password for JSON-RPC connections</source>
<translation>JSON-RPC عبارت عبور برای ارتباطات</translation>
</message>
<message>
<source>Allow JSON-RPC connections from specified IP address</source>
<translation>از آدرس آی پی خاص JSON-RPC قبول ارتباطات</translation>
</message>
<message>
<source>Send commands to node running on <ip> (default: 127.0.0.1)</source>
<translation>(127.0.0.1پیش فرض: ) &lt;ip&gt; دادن فرمانها برای استفاده گره ها روی</translation>
</message>
<message>
<source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source>
<translation>زمانی که بهترین بلاک تغییر کرد، دستور را اجرا کن (%s در cmd با block hash جایگزین شده است)</translation>
</message>
<message>
<source>Upgrade wallet to latest format</source>
<translation>wallet را به جدیدترین فرمت روزآمد کنید</translation>
</message>
<message>
<source>Set key pool size to <n> (default: 100)</source>
<translation> (100پیش فرض:)&lt;n&gt; گذاشتن اندازه کلید روی </translation>
</message>
<message>
<source>Rescan the block chain for missing wallet transactions</source>
<translation>اسکان مجدد زنجیر بلوکها برای گم والت معامله</translation>
</message>
<message>
<source>Use OpenSSL (https) for JSON-RPC connections</source>
<translation>JSON-RPCبرای ارتباطات استفاده کنید OpenSSL (https)</translation>
</message>
<message>
<source>Server certificate file (default: server.cert)</source>
<translation> (server.certپیش فرض: )گواهی نامه سرور</translation>
</message>
<message>
<source>Server private key (default: server.pem)</source>
<translation>(server.pemپیش فرض: ) کلید خصوصی سرور</translation>
</message>
<message>
<source>This help message</source>
<translation>پیام کمکی</translation>
</message>
<message>
<source>Unable to bind to %s on this computer (bind returned error %d, %s)</source>
<translation>امکان اتصال به %s از این رایانه وجود ندارد ( bind returned error %d, %s)</translation>
</message>
<message>
<source>Allow DNS lookups for -addnode, -seednode and -connect</source>
<translation>به DNS اجازه بده تا برای addnode ، seednode و اتصال جستجو کند</translation>
</message>
<message>
<source>Loading addresses...</source>
<translation>بار گیری آدرس ها</translation>
</message>
<message>
<source>Error loading wallet.dat: Wallet corrupted</source>
<translation>خطا در بارگیری wallet.dat: کیف پول خراب شده است</translation>
</message>
<message>
<source>Error loading wallet.dat: Wallet requires newer version of POND</source>
<translation>خطا در بارگیری wallet.dat: کیف پول به ویرایش جدیدتری از Biticon نیاز دارد</translation>
</message>
<message>
<source>Wallet needed to be rewritten: restart POND to complete</source>
<translation>سلام</translation>
</message>
<message>
<source>Error loading wallet.dat</source>
<translation>خطا در بارگیری wallet.dat</translation>
</message>
<message>
<source>Invalid -proxy address: '%s'</source>
<translation>آدرس پراکسی اشتباه %s</translation>
</message>
<message>
<source>Unknown network specified in -onlynet: '%s'</source>
<translation>شبکه مشخص شده غیرقابل شناسایی در onlynet: '%s'</translation>
</message>
<message>
<source>Unknown -socks proxy version requested: %i</source>
<translation>نسخه پراکسی ساکس غیرقابل شناسایی درخواست شده است: %i</translation>
</message>
<message>
<source>Cannot resolve -bind address: '%s'</source>
<translation>آدرس قابل اتصال- شناسایی نیست %s</translation>
</message>
<message>
<source>Cannot resolve -externalip address: '%s'</source>
<translation>آدرس خارجی قابل اتصال- شناسایی نیست %s</translation>
</message>
<message>
<source>Invalid amount for -paytxfee=<amount>: '%s'</source>
<translation>میزان وجه اشتباه برای paytxfee=<میزان وجه>: %s</translation>
</message>
<message>
<source>Invalid amount</source>
<translation>میزان وجه اشتباه</translation>
</message>
<message>
<source>Insufficient funds</source>
<translation>بود جه نا کافی </translation>
</message>
<message>
<source>Loading block index...</source>
<translation>بار گیری شاخص بلوک</translation>
</message>
<message>
<source>Add a node to connect to and attempt to keep the connection open</source>
<translation>به اتصال یک گره اضافه کنید و اتصال را باز نگاه دارید</translation>
</message>
<message>
<source>Loading wallet...</source>
<translation>بار گیری والت</translation>
</message>
<message>
<source>Cannot downgrade wallet</source>
<translation>امکان تنزل نسخه در wallet وجود ندارد</translation>
</message>
<message>
<source>Cannot write default address</source>
<translation>آدرس پیش فرض قابل ذخیره نیست</translation>
</message>
<message>
<source>Rescanning...</source>
<translation>اسکان مجدد</translation>
</message>
<message>
<source>Done loading</source>
<translation>بار گیری انجام شده است</translation>
</message>
<message>
<source>To use the %s option</source>
<translation>برای استفاده از %s از انتخابات</translation>
</message>
<message>
<source>Error</source>
<translation>خطا</translation>
</message>
<message>
<source>You must set rpcpassword=<password> in the configuration file:
%s
If the file does not exist, create it with owner-readable-only file permissions.</source>
<translation>%s، شما باید یک rpcpassword را در فایل پیکربندی تنظیم کنید :⏎%s⏎ اگر فایل ایجاد نشد، یک فایل فقط متنی ایجاد کنید.
</translation>
</message>
</context>
</TS><|fim▁end|> | </message> |
<|file_name|>marshal.go<|end_file_name|><|fim▁begin|>// Copyright 2009 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package asn1
import (
"bytes"
"errors"
"fmt"
"io"
"math/big"
"reflect"
"time"
"unicode/utf8"
)
// A forkableWriter is an in-memory buffer that can be<|fim▁hole|>// 'forked' to create new forkableWriters that bracket the
// original. After
// pre, post := w.fork()
// the overall sequence of bytes represented is logically w+pre+post.
type forkableWriter struct {
*bytes.Buffer
pre, post *forkableWriter
}
func newForkableWriter() *forkableWriter {
return &forkableWriter{new(bytes.Buffer), nil, nil}
}
func (f *forkableWriter) fork() (pre, post *forkableWriter) {
if f.pre != nil || f.post != nil {
panic("have already forked")
}
f.pre = newForkableWriter()
f.post = newForkableWriter()
return f.pre, f.post
}
func (f *forkableWriter) Len() (l int) {
l += f.Buffer.Len()
if f.pre != nil {
l += f.pre.Len()
}
if f.post != nil {
l += f.post.Len()
}
return
}
func (f *forkableWriter) writeTo(out io.Writer) (n int, err error) {
n, err = out.Write(f.Bytes())
if err != nil {
return
}
var nn int
if f.pre != nil {
nn, err = f.pre.writeTo(out)
n += nn
if err != nil {
return
}
}
if f.post != nil {
nn, err = f.post.writeTo(out)
n += nn
}
return
}
func marshalBase128Int(out *forkableWriter, n int64) (err error) {
if n == 0 {
err = out.WriteByte(0)
return
}
l := 0
for i := n; i > 0; i >>= 7 {
l++
}
for i := l - 1; i >= 0; i-- {
o := byte(n >> uint(i*7))
o &= 0x7f
if i != 0 {
o |= 0x80
}
err = out.WriteByte(o)
if err != nil {
return
}
}
return nil
}
func marshalInt64(out *forkableWriter, i int64) (err error) {
n := int64Length(i)
for ; n > 0; n-- {
err = out.WriteByte(byte(i >> uint((n-1)*8)))
if err != nil {
return
}
}
return nil
}
func int64Length(i int64) (numBytes int) {
numBytes = 1
for i > 127 {
numBytes++
i >>= 8
}
for i < -128 {
numBytes++
i >>= 8
}
return
}
func marshalBigInt(out *forkableWriter, n *big.Int) (err error) {
if n.Sign() < 0 {
// A negative number has to be converted to two's-complement
// form. So we'll subtract 1 and invert. If the
// most-significant-bit isn't set then we'll need to pad the
// beginning with 0xff in order to keep the number negative.
nMinus1 := new(big.Int).Neg(n)
nMinus1.Sub(nMinus1, bigOne)
bytes := nMinus1.Bytes()
for i := range bytes {
bytes[i] ^= 0xff
}
if len(bytes) == 0 || bytes[0]&0x80 == 0 {
err = out.WriteByte(0xff)
if err != nil {
return
}
}
_, err = out.Write(bytes)
} else if n.Sign() == 0 {
// Zero is written as a single 0 zero rather than no bytes.
err = out.WriteByte(0x00)
} else {
bytes := n.Bytes()
if len(bytes) > 0 && bytes[0]&0x80 != 0 {
// We'll have to pad this with 0x00 in order to stop it
// looking like a negative number.
err = out.WriteByte(0)
if err != nil {
return
}
}
_, err = out.Write(bytes)
}
return
}
func marshalLength(out *forkableWriter, i int) (err error) {
n := lengthLength(i)
for ; n > 0; n-- {
err = out.WriteByte(byte(i >> uint((n-1)*8)))
if err != nil {
return
}
}
return nil
}
func lengthLength(i int) (numBytes int) {
numBytes = 1
for i > 255 {
numBytes++
i >>= 8
}
return
}
func marshalTagAndLength(out *forkableWriter, t tagAndLength) (err error) {
b := uint8(t.class) << 6
if t.isCompound {
b |= 0x20
}
if t.tag >= 31 {
b |= 0x1f
err = out.WriteByte(b)
if err != nil {
return
}
err = marshalBase128Int(out, int64(t.tag))
if err != nil {
return
}
} else {
b |= uint8(t.tag)
err = out.WriteByte(b)
if err != nil {
return
}
}
if t.length >= 128 {
l := lengthLength(t.length)
err = out.WriteByte(0x80 | byte(l))
if err != nil {
return
}
err = marshalLength(out, t.length)
if err != nil {
return
}
} else {
err = out.WriteByte(byte(t.length))
if err != nil {
return
}
}
return nil
}
func marshalBitString(out *forkableWriter, b BitString) (err error) {
paddingBits := byte((8 - b.BitLength%8) % 8)
err = out.WriteByte(paddingBits)
if err != nil {
return
}
_, err = out.Write(b.Bytes)
return
}
func marshalObjectIdentifier(out *forkableWriter, oid []int) (err error) {
if len(oid) < 2 || oid[0] > 2 || (oid[0] < 2 && oid[1] >= 40) {
return StructuralError{"invalid object identifier"}
}
err = marshalBase128Int(out, int64(oid[0]*40+oid[1]))
if err != nil {
return
}
for i := 2; i < len(oid); i++ {
err = marshalBase128Int(out, int64(oid[i]))
if err != nil {
return
}
}
return
}
func marshalPrintableString(out *forkableWriter, s string) (err error) {
b := []byte(s)
for _, c := range b {
if !isPrintable(c) {
return StructuralError{"PrintableString contains invalid character"}
}
}
_, err = out.Write(b)
return
}
func marshalIA5String(out *forkableWriter, s string) (err error) {
b := []byte(s)
for _, c := range b {
if c > 127 {
return StructuralError{"IA5String contains invalid character"}
}
}
_, err = out.Write(b)
return
}
func marshalUTF8String(out *forkableWriter, s string) (err error) {
_, err = out.Write([]byte(s))
return
}
func marshalTwoDigits(out *forkableWriter, v int) (err error) {
err = out.WriteByte(byte('0' + (v/10)%10))
if err != nil {
return
}
return out.WriteByte(byte('0' + v%10))
}
func marshalFourDigits(out *forkableWriter, v int) (err error) {
var bytes [4]byte
for i := range bytes {
bytes[3-i] = '0' + byte(v%10)
v /= 10
}
_, err = out.Write(bytes[:])
return
}
func outsideUTCRange(t time.Time) bool {
year := t.Year()
return year < 1950 || year >= 2050
}
func marshalUTCTime(out *forkableWriter, t time.Time) (err error) {
year := t.Year()
switch {
case 1950 <= year && year < 2000:
err = marshalTwoDigits(out, int(year-1900))
case 2000 <= year && year < 2050:
err = marshalTwoDigits(out, int(year-2000))
default:
return StructuralError{"cannot represent time as UTCTime"}
}
if err != nil {
return
}
return marshalTimeCommon(out, t)
}
func marshalGeneralizedTime(out *forkableWriter, t time.Time) (err error) {
year := t.Year()
if year < 0 || year > 9999 {
return StructuralError{"cannot represent time as GeneralizedTime"}
}
if err = marshalFourDigits(out, year); err != nil {
return
}
return marshalTimeCommon(out, t)
}
func marshalTimeCommon(out *forkableWriter, t time.Time) (err error) {
_, month, day := t.Date()
err = marshalTwoDigits(out, int(month))
if err != nil {
return
}
err = marshalTwoDigits(out, day)
if err != nil {
return
}
hour, min, sec := t.Clock()
err = marshalTwoDigits(out, hour)
if err != nil {
return
}
err = marshalTwoDigits(out, min)
if err != nil {
return
}
err = marshalTwoDigits(out, sec)
if err != nil {
return
}
_, offset := t.Zone()
switch {
case offset/60 == 0:
err = out.WriteByte('Z')
return
case offset > 0:
err = out.WriteByte('+')
case offset < 0:
err = out.WriteByte('-')
}
if err != nil {
return
}
offsetMinutes := offset / 60
if offsetMinutes < 0 {
offsetMinutes = -offsetMinutes
}
err = marshalTwoDigits(out, offsetMinutes/60)
if err != nil {
return
}
err = marshalTwoDigits(out, offsetMinutes%60)
return
}
func stripTagAndLength(in []byte) []byte {
_, offset, err := parseTagAndLength(in, 0)
if err != nil {
return in
}
return in[offset:]
}
func marshalBody(out *forkableWriter, value reflect.Value, params fieldParameters) (err error) {
switch value.Type() {
case flagType:
return nil
case timeType:
t := value.Interface().(time.Time)
if params.timeType == tagGeneralizedTime || outsideUTCRange(t) {
return marshalGeneralizedTime(out, t)
} else {
return marshalUTCTime(out, t)
}
case bitStringType:
return marshalBitString(out, value.Interface().(BitString))
case objectIdentifierType:
return marshalObjectIdentifier(out, value.Interface().(ObjectIdentifier))
case bigIntType:
return marshalBigInt(out, value.Interface().(*big.Int))
}
switch v := value; v.Kind() {
case reflect.Bool:
if v.Bool() {
return out.WriteByte(255)
} else {
return out.WriteByte(0)
}
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return marshalInt64(out, int64(v.Int()))
case reflect.Struct:
t := v.Type()
startingField := 0
// If the first element of the structure is a non-empty
// RawContents, then we don't bother serializing the rest.
if t.NumField() > 0 && t.Field(0).Type == rawContentsType {
s := v.Field(0)
if s.Len() > 0 {
bytes := make([]byte, s.Len())
for i := 0; i < s.Len(); i++ {
bytes[i] = uint8(s.Index(i).Uint())
}
/* The RawContents will contain the tag and
* length fields but we'll also be writing
* those ourselves, so we strip them out of
* bytes */
_, err = out.Write(stripTagAndLength(bytes))
return
} else {
startingField = 1
}
}
for i := startingField; i < t.NumField(); i++ {
var pre *forkableWriter
pre, out = out.fork()
err = marshalField(pre, v.Field(i), parseFieldParameters(t.Field(i).Tag.Get("asn1")))
if err != nil {
return
}
}
return
case reflect.Slice:
sliceType := v.Type()
if sliceType.Elem().Kind() == reflect.Uint8 {
bytes := make([]byte, v.Len())
for i := 0; i < v.Len(); i++ {
bytes[i] = uint8(v.Index(i).Uint())
}
_, err = out.Write(bytes)
return
}
var fp fieldParameters
for i := 0; i < v.Len(); i++ {
var pre *forkableWriter
pre, out = out.fork()
err = marshalField(pre, v.Index(i), fp)
if err != nil {
return
}
}
return
case reflect.String:
switch params.stringType {
case tagIA5String:
return marshalIA5String(out, v.String())
case tagPrintableString:
return marshalPrintableString(out, v.String())
default:
return marshalUTF8String(out, v.String())
}
}
return StructuralError{"unknown Go type"}
}
func marshalField(out *forkableWriter, v reflect.Value, params fieldParameters) (err error) {
if !v.IsValid() {
return fmt.Errorf("asn1: cannot marshal nil value")
}
// If the field is an interface{} then recurse into it.
if v.Kind() == reflect.Interface && v.Type().NumMethod() == 0 {
return marshalField(out, v.Elem(), params)
}
if v.Kind() == reflect.Slice && v.Len() == 0 && params.omitEmpty {
return
}
if params.optional && params.defaultValue != nil && canHaveDefaultValue(v.Kind()) {
defaultValue := reflect.New(v.Type()).Elem()
defaultValue.SetInt(*params.defaultValue)
if reflect.DeepEqual(v.Interface(), defaultValue.Interface()) {
return
}
}
// If no default value is given then the zero value for the type is
// assumed to be the default value. This isn't obviously the correct
// behaviour, but it's what Go has traditionally done.
if params.optional && params.defaultValue == nil {
if reflect.DeepEqual(v.Interface(), reflect.Zero(v.Type()).Interface()) {
return
}
}
if v.Type() == rawValueType {
rv := v.Interface().(RawValue)
if len(rv.FullBytes) != 0 {
_, err = out.Write(rv.FullBytes)
} else {
err = marshalTagAndLength(out, tagAndLength{rv.Class, rv.Tag, len(rv.Bytes), rv.IsCompound})
if err != nil {
return
}
_, err = out.Write(rv.Bytes)
}
return
}
tag, isCompound, ok := getUniversalType(v.Type())
if !ok {
err = StructuralError{fmt.Sprintf("unknown Go type: %v", v.Type())}
return
}
class := classUniversal
if params.timeType != 0 && tag != tagUTCTime {
return StructuralError{"explicit time type given to non-time member"}
}
if params.stringType != 0 && tag != tagPrintableString {
return StructuralError{"explicit string type given to non-string member"}
}
switch tag {
case tagPrintableString:
if params.stringType == 0 {
// This is a string without an explicit string type. We'll use
// a PrintableString if the character set in the string is
// sufficiently limited, otherwise we'll use a UTF8String.
for _, r := range v.String() {
if r >= utf8.RuneSelf || !isPrintable(byte(r)) {
if !utf8.ValidString(v.String()) {
return errors.New("asn1: string not valid UTF-8")
}
tag = tagUTF8String
break
}
}
} else {
tag = params.stringType
}
case tagUTCTime:
if params.timeType == tagGeneralizedTime || outsideUTCRange(v.Interface().(time.Time)) {
tag = tagGeneralizedTime
}
}
if params.set {
if tag != tagSequence {
return StructuralError{"non sequence tagged as set"}
}
tag = tagSet
}
tags, body := out.fork()
err = marshalBody(body, v, params)
if err != nil {
return
}
bodyLen := body.Len()
var explicitTag *forkableWriter
if params.explicit {
explicitTag, tags = tags.fork()
}
if !params.explicit && params.tag != nil {
// implicit tag.
tag = *params.tag
class = classContextSpecific
}
err = marshalTagAndLength(tags, tagAndLength{class, tag, bodyLen, isCompound})
if err != nil {
return
}
if params.explicit {
err = marshalTagAndLength(explicitTag, tagAndLength{
class: classContextSpecific,
tag: *params.tag,
length: bodyLen + tags.Len(),
isCompound: true,
})
}
return err
}
// Marshal returns the ASN.1 encoding of val.
//
// In addition to the struct tags recognised by Unmarshal, the following can be
// used:
//
// ia5: causes strings to be marshaled as ASN.1, IA5 strings
// omitempty: causes empty slices to be skipped
// printable: causes strings to be marshaled as ASN.1, PrintableString strings.
// utf8: causes strings to be marshaled as ASN.1, UTF8 strings
func Marshal(val interface{}) ([]byte, error) {
var out bytes.Buffer
v := reflect.ValueOf(val)
f := newForkableWriter()
err := marshalField(f, v, fieldParameters{})
if err != nil {
return nil, err
}
_, err = f.writeTo(&out)
return out.Bytes(), err
}<|fim▁end|> | |
<|file_name|>StrStrHashMapSerializer.py<|end_file_name|><|fim▁begin|>"""
# Licensed to the Apache Software Foundation (ASF) under one *
# or more contributor license agreements. See the NOTICE file *
# distributed with this work for additional information *
# regarding copyright ownership. The ASF licenses this file *
# to you under the Apache License, Version 2.0 (the *
# "License"); you may not use this file except in compliance *
# with the License. You may obtain a copy of the License at *
# *
# http://www.apache.org/licenses/LICENSE-2.0 *
# *
# Unless required by applicable law or agreed to in writing, *
# software distributed under the License is distributed on an *
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY *
# KIND, either express or implied. See the License for the *
# specific language governing permissions and limitations *
# under the License.
"""
from __future__ import absolute_import
from .StrStrHashMap import *
from ..msg.Field import *
from ..msg.ImportExportHelper import *
from ..msg.StructValue import *
from ..msg.Type import *
from ..msg.ValueFactory import *
from ..support.Class2TypeMap import *
from ..support.Validator_object import *
class StrStrHashMapSerializer(ImportExportHelper):
"""
etch serializer for StrStrHashMap
"""
FIELD_NAME = "keysAndValues"
@classmethod
def init(cls, typ, class2type):
"""
Defines custom fields in the value factory so that the importer can find them
@param typ
@param class2type
"""
field = typ.getField(cls.FIELD_NAME)
class2type.put( StrStrHashMap , typ )
typ.setComponentType( StrStrHashMap )
typ.setImportExportHelper( StrStrHashMapSerializer(typ, field))
typ.putValidator(field, Validator_object.get(1))
typ.lock()
def __init__(self, typ, field):
self.__type = typ
self.__field = field
def importHelper(self, struct):
m = StrStrHashMap()
keysAndValues = struct.get(self.__field)<|fim▁hole|> return m
def exportValue(self, vf, value):
m = StrStrHashMap(value)
keysAndValues = []
for i in m.keys():
keysAndValues.append(i)
keysAndValues.append(m[i])
struct = StructValue(self.__type, vf)
struct.put(self.__field, keysAndValues)
return struct<|fim▁end|> | for i in range(0, len(keysAndValues), 2):
m[keysAndValues[i]] = keysAndValues[i+1] |
<|file_name|>amcache.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""File containing a Windows Registry plugin to parse the AMCache.hve file."""
from __future__ import unicode_literals
import pyregf
from dfdatetime import filetime
from dfdatetime import posix_time
from dfwinreg import definitions as dfwinreg_definitions
from plaso.containers import events
from plaso.containers import time_events
from plaso.lib import definitions
from plaso.parsers import interface
from plaso.parsers import manager
class AMCacheFileEventData(events.EventData):
"""AMCache file event data.
Attributes:
company_name (str): company name that created product file belongs to.
file_description (str): description of file.
file_reference (str): file system file reference, for example 9-1 (MFT
entry - sequence number).
file_size (int): size of file in bytes.
file_version (str): version of file.
full_path (str): full path of file.
language_code (int): language code of file.
product_name (str): product name file belongs to.<|fim▁hole|> to.
sha1 (str): SHA-1 of file.
"""
DATA_TYPE = 'windows:registry:amcache'
def __init__(self):
"""Initializes event data."""
super(AMCacheFileEventData, self).__init__(data_type=self.DATA_TYPE)
self.company_name = None
self.file_description = None
self.file_reference = None
self.file_size = None
self.file_version = None
self.full_path = None
self.language_code = None
self.product_name = None
self.program_identifier = None
self.sha1 = None
class AMCacheProgramEventData(events.EventData):
"""AMCache programs event data.
Attributes:
entry_type (str): type of entry (usually AddRemoveProgram).
file_paths (str): file paths of installed program.
files (str): list of files belonging to program.
language_code (int): language_code of program.
msi_package_code (str): MSI package code of program.
msi_product_code (str): MSI product code of program.
name (str): name of installed program.
package_code (str): package code of program.
product_code (str): product code of program.
publisher (str): publisher of program.
uninstall_key (str): unicode string of uninstall registry key for program.
version (str): version of program.
"""
DATA_TYPE = 'windows:registry:amcache:programs'
def __init__(self):
"""Initializes event data."""
super(AMCacheProgramEventData, self).__init__(data_type=self.DATA_TYPE)
self.entry_type = None
self.file_paths = None
self.files = None
self.language_code = None
self.msi_package_code = None
self.msi_product_code = None
self.name = None
self.package_code = None
self.product_code = None
self.publisher = None
self.uninstall_key = None
self.version = None
class AMCacheParser(interface.FileObjectParser):
"""AMCache Registry plugin for recently run programs."""
NAME = 'amcache'
DATA_FORMAT = 'AMCache Windows NT Registry (AMCache.hve) file'
# Contains: {value name: attribute name}
_FILE_REFERENCE_KEY_VALUES = {
'0': 'product_name',
'1': 'company_name',
'3': 'language_code',
'5': 'file_version',
'6': 'file_size',
'c': 'file_description',
'15': 'full_path',
'100': 'program_identifier',
'101': 'sha1'}
_AMCACHE_COMPILATION_TIME = 'f'
_AMCACHE_FILE_MODIFICATION_TIME = '11'
_AMCACHE_FILE_CREATION_TIME = '12'
_AMCACHE_ENTRY_WRITE_TIME = '17'
_AMCACHE_P_INSTALLATION_TIME = 'a'
_AMCACHE_P_FILES = 'Files'
_PRODUCT_KEY_VALUES = {
'0': 'name',
'1': 'version',
'2': 'publisher',
'3': 'language_code',
'6': 'entry_type',
'7': 'uninstall_key',
'd': 'file_paths',
'f': 'product_code',
'10': 'package_code',
'11': 'msi_product_code',
'12': 'msi_package_code',
}
#TODO Add GetFormatSpecification when issues are fixed with adding
# multiple parsers for the same file format (in this case regf files)
# AddNewSignature ->
# b'\x41\x00\x6d\x00\x63\x00\x61\x00\x63\x00\x68\x00\x65', offset=88
def _GetValueDataAsObject(self, parser_mediator, value):
"""Retrieves the value data as an object.
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
value (pyregf_value): value.
Returns:
object: data as a Python type or None if the value cannot be read.
"""
try:
if value.type in (
dfwinreg_definitions.REG_SZ,
dfwinreg_definitions.REG_EXPAND_SZ,
dfwinreg_definitions.REG_LINK):
value_data = value.get_data_as_string()
elif value.type in (
dfwinreg_definitions.REG_DWORD,
dfwinreg_definitions.REG_DWORD_BIG_ENDIAN,
dfwinreg_definitions.REG_QWORD):
value_data = value.get_data_as_integer()
elif value.type == dfwinreg_definitions.REG_MULTI_SZ:
value_data = list(value.get_data_as_multi_string())
else:
value_data = value.data
except (IOError, OverflowError) as exception:
parser_mediator.ProduceExtractionWarning(
'Unable to read data from value: {0:s} with error: {1!s}'.format(
value.name, exception))
return None
return value_data
def _ParseFileKey(self, parser_mediator, file_key):
"""Parses a Root\\File key.
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
file_key (pyregf.key): the File key.
"""
for volume_key in file_key.sub_keys:
for file_reference_key in volume_key.sub_keys:
self._ParseFileReferenceKey(parser_mediator, file_reference_key)
def _ParseFileReferenceKey(self, parser_mediator, file_reference_key):
"""Parses a file reference key (sub key of Root\\File\\%VOLUME%) for events.
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
file_reference_key (pyregf.key): file reference key.
"""
event_data = AMCacheFileEventData()
try:
if '0000' in file_reference_key.name:
# A NTFS file is a combination of MFT entry and sequence number.
sequence_number, mft_entry = file_reference_key.name.split('0000')
mft_entry = int(mft_entry, 16)
sequence_number = int(sequence_number, 16)
event_data.file_reference = '{0:d}-{1:d}'.format(
mft_entry, sequence_number)
else:
# A FAT file is a single number.
file_reference = int(file_reference_key.name, 16)
event_data.file_reference = '{0:d}'.format(file_reference)
except (ValueError, TypeError):
pass
for value_name, attribute_name in self._FILE_REFERENCE_KEY_VALUES.items():
value = file_reference_key.get_value_by_name(value_name)
if not value:
continue
value_data = self._GetValueDataAsObject(parser_mediator, value)
if attribute_name == 'sha1' and value_data.startswith('0000'):
# Strip off the 4 leading zero's from the sha1 hash.
value_data = value_data[4:]
setattr(event_data, attribute_name, value_data)
amcache_time_value = file_reference_key.get_value_by_name(
self._AMCACHE_ENTRY_WRITE_TIME)
if amcache_time_value:
amcache_time = filetime.Filetime(amcache_time_value.get_data_as_integer())
event = time_events.DateTimeValuesEvent(
amcache_time, definitions.TIME_DESCRIPTION_MODIFICATION)
parser_mediator.ProduceEventWithEventData(event, event_data)
creation_time_value = file_reference_key.get_value_by_name(
self._AMCACHE_FILE_CREATION_TIME)
if creation_time_value:
creation_time = filetime.Filetime(
creation_time_value.get_data_as_integer())
event = time_events.DateTimeValuesEvent(
creation_time, definitions.TIME_DESCRIPTION_CREATION)
parser_mediator.ProduceEventWithEventData(event, event_data)
modification_time_value = file_reference_key.get_value_by_name(
self._AMCACHE_FILE_MODIFICATION_TIME)
if modification_time_value:
modification_time = filetime.Filetime(
modification_time_value.get_data_as_integer())
event = time_events.DateTimeValuesEvent(
modification_time, definitions.TIME_DESCRIPTION_MODIFICATION)
parser_mediator.ProduceEventWithEventData(event, event_data)
compilation_time_value = file_reference_key.get_value_by_name(
self._AMCACHE_COMPILATION_TIME)
if compilation_time_value:
link_time = posix_time.PosixTime(
compilation_time_value.get_data_as_integer())
event = time_events.DateTimeValuesEvent(
link_time, definitions.TIME_DESCRIPTION_CHANGE)
parser_mediator.ProduceEventWithEventData(event, event_data)
def _ParseProgramKey(self, parser_mediator, program_key):
"""Parses a program key (a sub key of Root\\Programs) for events.
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
program_key (pyregf_key): program key.
"""
event_data = AMCacheProgramEventData()
for value_name, attribute_name in self._PRODUCT_KEY_VALUES.items():
value = program_key.get_value_by_name(value_name)
if not value:
continue
value_data = self._GetValueDataAsObject(parser_mediator, value)
setattr(event_data, attribute_name, value_data)
installation_time_value = program_key.get_value_by_name(
self._AMCACHE_P_INSTALLATION_TIME)
if installation_time_value:
installation_time = posix_time.PosixTime(
installation_time_value.get_data_as_integer())
event = time_events.DateTimeValuesEvent(
installation_time, definitions.TIME_DESCRIPTION_INSTALLATION)
parser_mediator.ProduceEventWithEventData(event, event_data)
def _ParseProgramsKey(self, parser_mediator, programs_key):
"""Parses a Root\\Programs key.
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
programs_key (pyregf.key): the Programs key.
"""
for program_key in programs_key.sub_keys:
self._ParseProgramKey(parser_mediator, program_key)
def ParseFileObject(self, parser_mediator, file_object):
"""Parses an AMCache.hve file-like object for events.
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
file_object (dfvfs.FileIO): file-like object.
"""
regf_file = pyregf.file()
try:
regf_file.open_file_object(file_object)
except IOError:
# The error is currently ignored -> see TODO above related to the
# fixing of handling multiple parsers for the same file format.
return
root_key = regf_file.get_key_by_path('Root')
if root_key:
file_key = root_key.get_sub_key_by_path('File')
if file_key:
self._ParseFileKey(parser_mediator, file_key)
programs_key = root_key.get_sub_key_by_path('Programs')
if programs_key:
self._ParseProgramsKey(parser_mediator, programs_key)
regf_file.close()
manager.ParsersManager.RegisterParser(AMCacheParser)<|fim▁end|> | program_identifier (str): GUID of entry under Root/Program key file belongs |
<|file_name|>cubature_multidim.cpp<|end_file_name|><|fim▁begin|>/*
Tests multidimensional numerical integration using cubature.
Copyright 2014 Ilja Honkonen
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#include "array"
#include "cmath"
#include "cstdlib"
#include "iostream"
#include "limits"
#include "cubature.h"
bool good_solution(
const double integral,
const double exact,
const double max_relative_error
) {
if (
std::fabs(integral - exact)
> max_relative_error * std::max(std::fabs(integral), std::fabs(exact))
) {
return false;
}
return true;
}
int main()
{
constexpr double max_acceptable_error = 1e-9;
// integration range
const std::array<double, 3> start{1, 1, 1}, end{2, 2, 2};
double integral, error, exact;
/*
Returns x*y^2*z^3.
In dimension(s) over which pcubature is integrating, x, y or z
are given by r, the rest are given in extra_data. Format of
extra_data: std::pair<std::array<size_t, 3>, std::array<double, 3>>.
size_t tells the index of the dimension over which integration over
Nth dimension in r actually goes. The first r_dims of
size_t's contain valid data. For the rest the corresponding
double contains the coordinate at which to integrate over the
rest of dimensions of r.
For example when integrating x and z from 0 to 1 with y == -2
integrand could be called by cubature as (999's mark unused data):
f = integrand(
2,
std::array<double, 2>{0, 0}.data(),
std::pair<
std::array<size_t, 3>,
std::array<double, 3>
>{{0, 2, 999}, {999, -2, 999}}.data(),
1,
ret_val.data()
);
and when integrating y from -2 to -1 with x == 0.5 and z == 2/3:
f = integrand(
1,
std::array<double, 1>{-2}.data(),
std::pair<
std::array<size_t, 3>,
std::array<double, 3>
>{{1, 999, 999}, {0.5, 999, 2/3}}.data(),
1,
ret_val.data()
);
*/
auto integrand
= [](
unsigned r_dims,
const double* r,
void* extra_data,
unsigned f_dims,
double* f
) {
if (r_dims == 0) {
return -1;
}
if (f_dims != 1) {
return -2;
}
if (r == NULL) {
std::cerr << __FILE__ << "(" << __LINE__ << ")" << std::endl;
abort();
}
if (f == NULL) {
std::cerr << __FILE__ << "(" << __LINE__ << ")" << std::endl;
abort();
}
if (extra_data == NULL) {
std::cerr << __FILE__ << "(" << __LINE__ << ")" << std::endl;
abort();
}
const auto integration_info
= *static_cast<
std::pair<
std::array<size_t, 3>,
std::array<double, 3>
>*
>(extra_data);
std::array<double, 3> real_r = integration_info.second;
for (size_t i = 0; i < r_dims; i++) {
if (integration_info.first[i] > 3) {
std::cerr << __FILE__ << "(" << __LINE__ << ")" << std::endl;
abort();
}
real_r[integration_info.first[i]] = *(r + i);
}
*f
= std::pow(real_r[0], 1)
* std::pow(real_r[1], 2)
* std::pow(real_r[2], 3);
return 0;
};
std::pair<std::array<size_t, 3>, std::array<double, 3>> integration_parameters;
// 1d over x at y == 3/2 and z == 3/2
exact = std::pow(3./2., 6);
integration_parameters.first[0] = 0;
integration_parameters.first[1] = 999;
integration_parameters.first[2] = 999;
integration_parameters.second[0] = 999;
integration_parameters.second[1] = 3.0 / 2.0;
integration_parameters.second[2] = 3.0 / 2.0;
if (
pcubature(
1,
integrand,
&integration_parameters,
1,
start.data(),
end.data(),
0,
0,
1e-3,
ERROR_LINF,
&integral,
&error
) != 0
) {
std::cerr << __FILE__ << "(" << __LINE__ << ")" << std::endl;
abort();
}
if (not good_solution(integral, exact, max_acceptable_error)) {
std::cerr << __FILE__ << "(" << __LINE__ << "): "
<< "Too large relative error when integrating from " << start[0]
<< " to " << end[0]
<< ", exact: " << exact
<< ", numerical: " << integral
<< std::endl;
abort();
}
// 2d over y and z at x == 3/2
exact = 105.0 / 8.0;
integration_parameters.first[0] = 1;
integration_parameters.first[1] = 2;
integration_parameters.first[2] = 999;
integration_parameters.second[0] = 3.0 / 2.0;
integration_parameters.second[1] = 999;
integration_parameters.second[2] = 999;
if (
pcubature(
1,
integrand,
&integration_parameters,
2,
start.data(),
end.data(),
0,
0,
1e-3,
ERROR_LINF,
&integral,
&error
) != 0
) {
std::cerr << __FILE__ << "(" << __LINE__ << ")" << std::endl;
abort();
}
if (not good_solution(integral, exact, max_acceptable_error)) {
std::cerr << __FILE__ << "(" << __LINE__ << "): "
<< "Too large relative error when integrating from "
<< start[1] << "," << start[2]
<< " to " << end[1] << "," << end[2]
<< ", exact: " << exact
<< ", numerical: " << integral
<< std::endl;
abort();
}
// integrate over the edges of the unit cube from start to end
std::array<
std::pair<
// exact value for below integration parameters
double,
// same as in integration_parameters
std::pair<
std::array<size_t, 3>,
std::array<double, 3>
>
>,
12
> integration_params1d{{
// over x at min y, min z
{3.0 / 2.0, {{0, 999, 999}, {999, 1, 1}}},
// x at min y, max z
{12, {{0, 999, 999}, {999, 1, 2}}},
// x at max y, min z
{6, {{0, 999, 999}, {999, 2, 1}}},
// x at max y, max z
{48, {{0, 999, 999}, {999, 2, 2}}},
// over y at min x, min z
{7.0 / 3.0, {{1, 999, 999}, {1, 999, 1}}},
// y at min x, max z
{56.0 / 3.0, {{1, 999, 999}, {1, 999, 2}}},
// y at max y, min z
{14.0 / 3.0, {{1, 999, 999}, {2, 999, 1}}},
// y at max x, max z
{112.0 / 3.0, {{1, 999, 999}, {2, 999, 2}}},
// over z at min x, min y
{15.0 / 4.0, {{2, 999, 999}, {1, 1, 999}}},
// z at min x, max y
{15, {{2, 999, 999}, {1, 2, 999}}},
// z at max x, min y
{15.0 / 2.0, {{2, 999, 999}, {2, 1, 999}}},
// z at max x, max y
{30, {{2, 999, 999}, {2, 2, 999}}}
}};
for (auto& info: integration_params1d) {
const double exact = info.first;
if (
pcubature(
1,
integrand,
&info.second,
1,
start.data(),
end.data(),
0,
0,
1e-3,
ERROR_LINF,
&integral,
&error
) != 0
) {
std::cerr << __FILE__ << "(" << __LINE__ << ")" << std::endl;
abort();
}
if (not good_solution(integral, exact, max_acceptable_error)) {
std::cerr << __FILE__ << "(" << __LINE__ << "): "
<< "Too large relative error, exact: " << exact
<< ", numerical: " << integral
<< std::endl;
abort();
}
}<|fim▁hole|> double,
std::pair<
std::array<size_t, 3>,
std::array<double, 3>
>
>,
6
> integration_params2d{{
// over x and y at min z
{7.0 / 2.0, {{0, 1, 999}, {999, 999, 1}}},
// x, y at max z
{28, {{0, 1, 999}, {999, 999, 2}}},
// x, z at min y
{45.0 / 8.0, {{0, 2, 999}, {999, 1, 999}}},
// x, z at max y
{45.0 / 2.0, {{0, 2, 999}, {999, 2, 999}}},
// y, z at min x
{35.0 / 4.0, {{1, 2, 999}, {1, 999, 999}}},
// y, z at max x
{35.0 / 2.0, {{1, 2, 999}, {2, 999, 999}}}
}};
for (auto& info: integration_params2d) {
const double exact = info.first;
if (
pcubature(
1,
integrand,
&info.second,
2,
start.data(),
end.data(),
0,
0,
1e-3,
ERROR_LINF,
&integral,
&error
) != 0
) {
std::cerr << __FILE__ << "(" << __LINE__ << ")" << std::endl;
abort();
}
if (not good_solution(integral, exact, max_acceptable_error)) {
std::cerr << __FILE__ << "(" << __LINE__ << "): "
<< "Too large relative error, exact: " << exact
<< ", numerical: " << integral
<< std::endl;
abort();
}
}
return EXIT_SUCCESS;
}<|fim▁end|> |
// integrate over the faces of the unit cube from start to end
std::array<
std::pair< |
<|file_name|>loginForm.py<|end_file_name|><|fim▁begin|>from flask.ext.wtf import Form
from wtforms import StringField, BooleanField, PasswordField
from wtforms.validators import DataRequired<|fim▁hole|> remember_me = BooleanField('remember_me', default=False)<|fim▁end|> |
class LoginForm(Form):
email = StringField('email', validators=[DataRequired()])
password = PasswordField('password', validators=[DataRequired()]) |
<|file_name|>sample.py<|end_file_name|><|fim▁begin|>import json
from typing import NamedTuple
from collections import namedtuple
import kfp
import kfp.dsl as dsl
from kfp import components
from kfp.dsl.types import Integer
def get_current_namespace():
"""Returns current namespace if available, else kubeflow"""
try:
current_namespace = open(
"/var/run/secrets/kubernetes.io/serviceaccount/namespace"
).read()
except:
current_namespace = "kubeflow"
return current_namespace
def create_worker_spec(
worker_num: int = 0
) -> NamedTuple(
"CreatWorkerSpec", [("worker_spec", dict)]
):
"""
Creates pytorch-job worker spec
"""
worker = {}
if worker_num > 0:
worker = {
"replicas": worker_num,
"restartPolicy": "OnFailure",
"template": {
"metadata": {
"annotations": {
"sidecar.istio.io/inject": "false"
}
},
"spec": {
"containers": [
{
"args": [
"--backend",
"gloo",
],
"image": "public.ecr.aws/pytorch-samples/pytorch_dist_mnist:latest",
"name": "pytorch",
"resources": {
"requests": {
"memory": "4Gi",
"cpu": "2000m",
# Uncomment for GPU
# "nvidia.com/gpu": 1,
},
"limits": {
"memory": "4Gi",
"cpu": "2000m",
# Uncomment for GPU
# "nvidia.com/gpu": 1,
},
},
}
]
},
},
}
worker_spec_output = namedtuple(
"MyWorkerOutput", ["worker_spec"]
)
return worker_spec_output(worker)
<|fim▁hole|>worker_spec_op = components.func_to_container_op(
create_worker_spec,
base_image="python:slim",
)
@dsl.pipeline(
name="launch-kubeflow-pytorchjob",
description="An example to launch pytorch.",
)
def mnist_train(
namespace: str = get_current_namespace(),
worker_replicas: int = 1,
ttl_seconds_after_finished: int = -1,
job_timeout_minutes: int = 600,
delete_after_done: bool = False,
):
pytorchjob_launcher_op = components.load_component_from_file(
"./component.yaml"
)
master = {
"replicas": 1,
"restartPolicy": "OnFailure",
"template": {
"metadata": {
"annotations": {
# See https://github.com/kubeflow/website/issues/2011
"sidecar.istio.io/inject": "false"
}
},
"spec": {
"containers": [
{
# To override default command
# "command": [
# "python",
# "/opt/mnist/src/mnist.py"
# ],
"args": [
"--backend",
"gloo",
],
# Or, create your own image from
# https://github.com/kubeflow/pytorch-operator/tree/master/examples/mnist
"image": "public.ecr.aws/pytorch-samples/pytorch_dist_mnist:latest",
"name": "pytorch",
"resources": {
"requests": {
"memory": "4Gi",
"cpu": "2000m",
# Uncomment for GPU
# "nvidia.com/gpu": 1,
},
"limits": {
"memory": "4Gi",
"cpu": "2000m",
# Uncomment for GPU
# "nvidia.com/gpu": 1,
},
},
}
],
# If imagePullSecrets required
# "imagePullSecrets": [
# {"name": "image-pull-secret"},
# ],
},
},
}
worker_spec_create = worker_spec_op(
worker_replicas
)
# Launch and monitor the job with the launcher
pytorchjob_launcher_op(
# Note: name needs to be a unique pytorchjob name in the namespace.
# Using RUN_ID_PLACEHOLDER is one way of getting something unique.
name=f"name-{kfp.dsl.RUN_ID_PLACEHOLDER}",
namespace=namespace,
master_spec=master,
# pass worker_spec as a string because the JSON serializer will convert
# the placeholder for worker_replicas (which it sees as a string) into
# a quoted variable (eg a string) instead of an unquoted variable
# (number). If worker_replicas is quoted in the spec, it will break in
# k8s. See https://github.com/kubeflow/pipelines/issues/4776
worker_spec=worker_spec_create.outputs[
"worker_spec"
],
ttl_seconds_after_finished=ttl_seconds_after_finished,
job_timeout_minutes=job_timeout_minutes,
delete_after_done=delete_after_done,
)
if __name__ == "__main__":
import kfp.compiler as compiler
pipeline_file = "test.tar.gz"
print(
f"Compiling pipeline as {pipeline_file}"
)
compiler.Compiler().compile(
mnist_train, pipeline_file
)
# # To run:
# client = kfp.Client()
# run = client.create_run_from_pipeline_package(
# pipeline_file,
# arguments={},
# run_name="test pytorchjob run"
# )
# print(f"Created run {run}")<|fim▁end|> | |
<|file_name|>services.py<|end_file_name|><|fim▁begin|>import time
import json
import tornado.httpclient
http_client = tornado.httpclient.HTTPClient()
class HTTPServiceProxy(object):
def __init__(self, host='localhost', port=6999, cache_timeout=5.0):
self._host = host
self._port = port
self._cache_timeout = cache_timeout
self._cache = {}
self._cache_time = {}
def get(self, *path):
print 'http://%s:%d/%s' % (self._host, self._port, '/'.join(path))
if path in self._cache and \
self._cache_time[path] + self._cache_timeout > time.time():
return self._cache[path]
try:
response = http_client.fetch('http://%s:%d/%s' % (self._host, self._port, '/'.join(path)))
self._cache[path] = response.body
self._cache_time[path] = time.time()
return response.body
except tornado.httpclient.HTTPError as e:
if path in self._cache:
del self._cache[path]
return None
def post(self, *path, **kwargs):
url = 'http://%s:%d/%s' % (self._host, self._port, '/'.join(path))
print url
try:
request = tornado.httpclient.HTTPRequest(url, method='POST', body=json.dumps(kwargs))
response = http_client.fetch(request)
return response.body
except tornado.httpclient.HTTPError as e:<|fim▁hole|>class MonitorProxy(HTTPServiceProxy):
"""
Proxy object for the challenge monitor service.
"""
def __init__(self):
super(MonitorProxy, self).__init__(host='localhost', port=6999, cache_timeout=0.0)
@property
def challenges(self):
return json.loads(self.get('list'))
@property
def visible_challenges(self):
return json.loads(self.get('list_visible'))
def status(self, challenge):
try:
return json.loads(self.get('status')).get(challenge, None)
except TypeError:
return None
def show(self, challenge):
self.post('show', challenge)
def hide(self, challenge):
self.post('hide', challenge)
def start(self, challenge):
self.post('start', challenge)
def stop(self, challenge):
self.post('stop', challenge)
def metadata(self, challenge):
try:
return json.loads(self.get('metadata', challenge))
except TypeError:
return None
def fetch_file(self, challenge, filename):
return self.get('static_files', challenge, filename)
monitor = MonitorProxy()
class AuthProxy(HTTPServiceProxy):
"""
Proxy object for the user authentication serivce.
"""
def __init__(self, host='127.0.0.1', port=6998, cache_timeout=1.0):
super(AuthProxy, self).__init__(host='localhost', port=6998, cache_timeout=1.0)
@property
def users(self):
return json.loads(self.get('list'))
def create_user(self, user):
self.post('create_user', user)
def is_admin(self, user):
try:
return json.loads(self.post('get_tag', user, key='is_admin', default='false'))
except (ValueError, TypeError):
return False
def is_playing(self, user):
try:
return json.loads(self.post('get_tag', user, key='is_playing', default='true'))
except (ValueError, TypeError):
return False
def set_password(self, user, password):
self.post('set_password', user, password=password)
def check_password(self, user, password):
try:
return json.loads(self.post('check_password', user, password=password))
except TypeError:
return False
def set_tag(self, user, key, value):
self.post('set_tag', user, key=key, value=json.dumps(value))
def get_tag(self, user, key, default=''):
return self.post('get_tag', user, key=key, default=default)
auth = AuthProxy()
class ScoreboardProxy(HTTPServiceProxy):
"""
Proxy object for the scoreboard service.
"""
def __init__(self, host='127.0.0.1', port=6997, cache_timeout=1.0):
super(ScoreboardProxy, self).__init__(host='localhost', port=6997, cache_timeout=1.0)
def capture(self, user, challenge):
self.post('capture', challenge, user=user)
def get_captures_by_user(self, user):
return json.loads(self.get('get_captures_by_user', user))
def get_captures_by_challenge(self, challenge):
return json.loads(self.get('get_captures_by_challenge', challenge))
scoreboard = ScoreboardProxy()<|fim▁end|> | return None
|
<|file_name|>svg_example.py<|end_file_name|><|fim▁begin|># Example made by OssiLehtinen
#
from svgpathtools import svg2paths, wsvg
import numpy as np
import uArmRobot
import time
#Configure Serial Port
#serialport = "com3" # for windows
serialport = "/dev/ttyACM0" # for linux like system
# Connect to uArm
myRobot = uArmRobot.robot(serialport,0) # user 0 for firmware < v4 and use 1 for firmware v4
myRobot.debug = True # Enable / Disable debug output on screen, by default disabled
myRobot.connect()
myRobot.mode(1) # Set mode to Normal
# Read in the svg
paths, attributes = svg2paths('drawing.svg')
scale = .25
steps_per_seg = 3
coords = []
x_offset = 200
height = 90
draw_speed = 1000
# Convert the paths to a list of coordinates
for i in range(len(paths)):
path = paths[i]
attribute = attributes[i]
# A crude check for whether a path should be drawn. Does it have a style defined?
if 'style' in attribute:
for seg in path:
segcoords = []
for p in range(steps_per_seg+1):
cp = seg.point(float(p)/float(steps_per_seg))
segcoords.append([-np.real(cp)*scale+x_offset, np.imag(cp)*scale])
coords.append(segcoords)
# The starting point
myRobot.goto(coords[0][0][0], coords[0][0][1], height, 6000)
for seg in coords:
myRobot.goto(seg[0][0], seg[0][1], height, 6000)
time.sleep(0.15)
for p in seg:
myRobot.goto_laser(p[0], p[1], height, draw_speed)<|fim▁hole|>myRobot.goto(coords[0][0][0], coords[0][0][1], height, 6000)<|fim▁end|> |
# Back to the starting point (and turn the laser off) |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>callback_functions = ["collision_enter", "collision_stay", "collision_exit"]
length_area_world = 75
raise_exception = False
# import all required modules
from game import *
from gameobject import *
from contracts import *
from configuration import *
from component import *
from loader import *
from physics import *
from scene import *
from timeutils import *
from builtincomponents import *
from builtincomponents.camera import *
from builtincomponents.collider import *
from builtincomponents.sprite_renderer import *<|fim▁hole|>from builtincomponents.transform import *<|fim▁end|> | |
<|file_name|>pythonrc.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
r"""
PYTHONRC
========
Initialization script for the interactive Python interpreter. Its main purpose
is to enhance the overall user experience when working in such an environment
by adding some niceties to the standard console.
It also works with IPython and BPython, although its utility in that kind of
scenarios can be argued.
Tested in GNU/Linux with Python versions 2.7 and 3.4.
Please read the Installation section below.
Features
--------
- User input completion
+ Introduces a completion mechanism for inputted commands in Python 2.
+ In Python 3, where the standard console is a lot nicer, it just
impersonates the default completion machinery to keep the consistency with
the behavior in Python 2 (and so it's still possible to adapt it to the
user's needs).
- Command History
+ Creates a callable, singleton object called `history`, placing it into
the `__builtins__` object to make it easily available, which enables the
handling of the command history (saving some input lines to a file of your
choice, listing the commands introduced so far, etc.). Try simply
`history()` on the Python prompt to see it in action; inspect its members
(with `dir(history)` or `help(history.write)`) for more information.
- Color prompt
+ Puts a colorful prompt in place, if the terminal supports it.
- Implementation of a bash's "operate-and-get-next" clone
+ Enables a quick re-edition of a code block from the history by
successive keypresses of the `Ctrl-o` hotkey.
Installation
------------
- You must define in your environment (in GNU/Linux and MacOS X that usually
means your `~/.bashrc` file) the variable 'PYTHONSTARTUP' containing the path
to `pythonrc.py`.
- It is also highly recommended to define the variable 'PYTHON_HISTORY_FILE'.
Remember that BPython (unlike the standard interpreter or IPython) ignores that
variable, so you'll have to configure it as well by other means to be able to
use the same history file there (for instance, in Linux, the file
`~/.config/bpython/config` is a good place to start, but please read BPython's
documentation).
### Example configurations
- Extract of `~/.bashrc`
```sh
# python
export PYTHONSTARTUP=~/.python/pythonrc.py
export PYTHON_HISTORY_FILE=~/.python/.python_history
## You may want to also uncomment some of this lines if using an old
## version of virtualenvwrapper
# export VIRTUALENVWRAPPER_PYTHON=/usr/bin/python3.4
# export WORKON_HOME=~/.python/virtualenvs
# source $(which virtualenvwrapper.sh)
```
- Extract of `~/.config/bpython/config`
```
[general]
color_scheme = default
hist_file = ~/.python/.python_history
hist_lenght = 1000
```
Bugs / Caveats / Future enhancements
------------------------------------
- No module/package introspection for the last argument in commands of the form
`from <package> import <not_completing_this>` (this, in fact, could be a not so
bad thing, because it doesn't execute side effects, e.g. modules' init code).
- Depending on the user's system, the compilation of the packages' and modules'
list for completing `import ...` and `from ... import ...` commands can take a
long time, especially the first time it is invoked.
- When completing things like a method's name, the default is to also include
the closing parenthesis along with the opening one, but the cursor is placed
after it no matter what, instead of between them. This is because of the
python module `readline`'s limitations.
You can turn off the inclusion of the closing parenthesis; if you do so, you
might be also interested in modifying the variable called
`dict_keywords_postfix` (especially the strings that act as that dictionary's
indexes).
- IPython has its own `%history` magic. I did my best to not interfere with
it, but I don't know the actual consequences. Also, it's debatable if it
even makes sense to use this file with IPython and/or BPython (though having
a unified history for all the environments is really nice).
You could define some bash aliases like
```sh
alias ipython='PYTHONSTARTUP="" ipython'
alias bpython='PYTHONSTARTUP="" bpython'
```
to be on the safer side.
- Could have used the module `six` for better clarity. Right now it uses my own
made up stubs to work on both Python 2 and 3.
- Needs better comments and documentation, especially the part on history
handling.
- Probably a lot more. Feel free to file bug reports ;-)
"""
def init():
# color prompt
import sys
import os
term_with_colors = ['xterm', 'xterm-color', 'xterm-256color', 'linux',
'screen', 'screen-256color', 'screen-bce']
red = ''
green = ''
reset = ''
if os.environ.get('TERM') in term_with_colors:
escapes_pattern = '\001\033[%sm\002' # \001 and \002 mark non-printing
red = escapes_pattern % '31'
green = escapes_pattern % '32'
reset = escapes_pattern % '0'
sys.ps1 = red + '>>> ' + reset
sys.ps2 = green + '... ' + reset
red = red.strip('\001\002')
green = green.strip('\001\002')
reset = reset.strip('\001\002')
# readline (tab-completion, history)
try:
import readline
except ImportError:
print(red + "Module 'readline' not available. Skipping user customizations." + reset)
return
import rlcompleter
import atexit
from pwd import getpwall
from os.path import isfile, isdir, expanduser, \
join as joinpath, split as splitpath, sep as pathsep
default_history_file = '~/.pythonhist'
majver = sys.version_info[0]
# Both BPython and Django shell change the nature of the __builtins__
# object. This hack workarounds that:
def builtin_setattr(attr, value):
if hasattr(__builtins__, '__dict__'):
setattr(__builtins__, attr, value)
else:
__builtins__[attr] = value
def builtin_getattr(attr):
if hasattr(__builtins__, '__dict__'):
return getattr(__builtins__, attr)
else:
return __builtins__[attr]
# My own "six" library, where I define the following stubs:
# * myrange for xrange() (python2) / range() (python3)
# * exec_stub for exec()
# * iteritems for dict.iteritems() (python2) / list(dict.items()) (python3)
# I could have done "from six import iteritems" and such instead of this
if majver == 2:
myrange = xrange
def exec_stub(textcode, globalz=None, localz=None):
# the parenthesis make it valid python3 syntax, do nothing at all
exec (textcode) in globalz, localz
def iteritems(d):
return d.iteritems()
elif majver == 3:
myrange = range
# def exec_stub(textcode, globalz=None, localz=None):
# # the "in" & "," make it valid python2 syntax, do nothing useful
# exec(textcode, globalz, localz) in globalz #, localz
# the three previous lines work, but this is better
exec_stub = builtin_getattr('exec')
def iteritems(d):
return list(d.items())
# AUXILIARY CLASSES
# History management
class History:
set_length = readline.set_history_length
get_length = readline.get_history_length
get_current_length = readline.get_current_history_length
get_item = readline.get_history_item
write = readline.write_history_file
def __init__(self, path=default_history_file, length=500):
self.path = path
self.reload(path)
self.set_length(length)
def __exit__(self):
print("Saving history (%s)..." % self.path)
self.write(expanduser(self.path))
def __repr__(self):
"""print out current history information"""
# length = self.get_current_length()
# command = self.get_item(length)
# if command == 'history':
# return "\n".join(self.get_item(i)
# for i in myrange(1, length+1))
# else:
# return '<%s instance>' % str(self.__class__)
return '<%s instance>' % str(self.__class__)
def __call__(self, pos=None, end=None):
"""print out current history information with line number"""
if not pos:
pos = 1
elif not end:
end = pos
for i, item in self.iterator(pos, end, enumerate_it=True):
print('%i:\t%s' % (i, item))
def iterator(self, pos, end, enumerate_it=False):
length = self.get_current_length()
if not pos:
pos = 1
if not end:
end = length
pos = min(pos, length)
if pos < 0:
pos = max(1, pos + length + 1)
end = min(end, length)
if end < 0:
end = max(1, end + length + 1)
if enumerate_it:
return ((i, self.get_item(i)) for i in myrange(pos, end + 1))
else:
return (self.get_item(i) for i in myrange(pos, end + 1))
def reload(self, path=""):
"""clear the current history and reload it from saved"""
readline.clear_history()
if isfile(path):
self.path = path
readline.read_history_file(expanduser(self.path))
def save(self, filename, pos=None, end=None):
"""write history number from pos to end into filename file"""
with open(filename, 'w') as f:
for item in self.iterator(pos, end):
f.write(item)
f.write('\n')
def execute(self, pos, end=None):
"""execute history number from pos to end"""
if not end:
end = pos
commands = []
for item in self.iterator(pos, end):
commands.append(item)
readline.add_history(item)
exec_stub("\n".join(commands), globals())
# comment the previous two lines and uncomment those below
# if you prefer to re-add to history just the commands that
# executed without problems
# try:
# exec_stub("\n".join(commands), globals())
# except:
# raise
# else:
# for item in commands:
# readline.add_history(cmdlist)
# Activate completion and make it smarter
class Irlcompleter(rlcompleter.Completer):
"""
This class enables the insertion of "indentation" if there's no text
for completion.
The default "indentation" is four spaces. You can initialize with '\t'
as the tab if you wish to use a genuine tab.
Also, compared to the default rlcompleter, this one performs some
additional useful things, like file completion for string constants
and addition of some decorations to keywords (namely, closing
parenthesis, and whatever you've defined in dict_keywords_postfix --
spaces, colons, etc.)
"""
def __init__(
self,
indent_str=' ',
delims=readline.get_completer_delims(),
binds=('tab: complete', ),
dict_keywords_postfix={" ": ["import", "from"], },
add_closing_parenthesis=True
):
rlcompleter.Completer.__init__(self, namespace=globals())
readline.set_completer_delims(delims)
self.indent_str_list = [indent_str, None]
for bind in binds:
readline.parse_and_bind(bind)
self.dict_keywords_postfix = dict_keywords_postfix
self.add_closing_parenthesis = add_closing_parenthesis
def complete(self, text, state):
line = readline.get_line_buffer()
stripped_line = line.lstrip()
# libraries
if stripped_line.startswith('import '):
value = self.complete_libs(text, state)
elif stripped_line.startswith('from '):
pos = readline.get_begidx()
# end = readline.get_endidx()
if line[:pos].strip() == 'from':
value = self.complete_libs(text, state) + " "
elif state == 0 and line.find(' import ') == -1:
value = 'import '
else:
# Here we could do module introspection (ugh)
value = None
# indentation, files and keywords/identifiers
elif text == '':
value = self.indent_str_list[state]<|fim▁hole|> return value
def complete_keywords(self, text, state):
txt = rlcompleter.Completer.complete(self, text, state)
if txt is None:
return None
if txt.endswith('('):
if self.add_closing_parenthesis:
return txt + ')'
else:
return txt
for postfix, words in iteritems(self.dict_keywords_postfix):
if txt in words:
return txt + postfix
return txt
def complete_files(self, text, state):
str_delim = text[0]
path = text[1:]
if path.startswith("~/"):
path = expanduser("~/") + path[2:]
elif path.startswith("~"):
i = path.find(pathsep)
if i > 0:
path = expanduser(path[:i]) + path[i:]
else:
return [
str_delim + "~" + i[0] + pathsep
for i in getpwall()
if i[0].startswith(path[1:])
][state]
dir, fname = splitpath(path)
if not dir:
dir = os.curdir
return [
str_delim + joinpath(dir, i)
for i in os.listdir(dir)
if i.startswith(fname)
][state]
def complete_libs(self, text, state):
libs = {}
for i in sys.path:
try:
if i == '':
i = os.curdir
files = os.listdir(i)
for j in files:
filename = joinpath(i, j)
if isfile(filename):
for s in [".py", ".pyc", ".so"]:
if j.endswith(s):
j = j[:-len(s)]
pos = j.find(".")
if pos > 0:
j = j[:pos]
libs[j] = None
break
elif isdir(filename):
for s in ["__init__.py", "__init__.pyc"]:
if isfile(joinpath(filename, s)):
libs[j] = None
except OSError:
pass
for j in sys.builtin_module_names:
libs[j] = None
libs = sorted(j for j in libs.keys() if j.startswith(text))
return libs[state]
# DEFINITIONS:
# history file path and length
history_length = 1000
history_path = os.getenv("PYTHON_HISTORY_FILE", default_history_file)
# bindings for readline (assign completion key, etc.)
# readline_binds = (
# 'tab: tab_complete',
# '"\C-o": operate-and-get-next', # exists in bash but not in readline
# )
# completion delimiters
# we erase ", ', ~ and / so file completion works
# readline_delims = ' \t\n`!@#$%^&*()-=+[{]}\\|;:,<>?'
readline_delims = readline.get_completer_delims()\
.replace("~", "", 1)\
.replace("/", "", 1)\
.replace("'", "", 1)\
.replace('"', '', 1)
# dictionary of keywords to be postfixed by a string
dict_keywords_postfix = {
":": ["else", "try", "finally", ],
" ": ["import", "from", "or", "and", "not", "if", "elif", ],
" ():": ["def", ] # "class", ]
}
# DO IT
completer = Irlcompleter(delims=readline_delims, # binds=readline_binds,
dict_keywords_postfix=dict_keywords_postfix)
readline.set_completer(completer.complete)
if not os.access(history_path, os.F_OK):
print(green + 'History file %s does not exist. Creating it...' % history_path + reset)
with open(history_path, 'w') as f:
pass
elif not os.access(history_path, os.R_OK|os.W_OK):
print(red + 'History file %s has wrong permissions!' % history_path + reset)
history = History(history_path, history_length)
#
# Hack: Implementation of bash-like "operate-and-get-next" (Ctrl-o)
#
try:
# We'll hook the C functions that we need from the underlying
# libreadline implementation that aren't exposed by the readline
# python module.
from ctypes import CDLL, CFUNCTYPE, c_int
librl = CDLL(readline.__file__)
rl_callback = CFUNCTYPE(c_int, c_int, c_int)
rl_int_void = CFUNCTYPE(c_int)
readline.add_defun = librl.rl_add_defun # didn't bother to define args
readline.accept_line = rl_callback(librl.rl_newline)
readline.previous_history = rl_callback(librl.rl_get_previous_history)
readline.where_history = rl_int_void(librl.where_history)
def pre_input_hook_factory(offset, char):
def rewind_history_pre_input_hook():
# Uninstall this hook, rewind history and redisplay
readline.set_pre_input_hook(None)
result = readline.previous_history(offset, char)
readline.redisplay()
return result
return rewind_history_pre_input_hook
@rl_callback
def operate_and_get_next(count, char):
current_line = readline.where_history()
offset = readline.get_current_history_length() - current_line
# Accept the current line and set the hook to rewind history
result = readline.accept_line(1, char)
readline.set_pre_input_hook(pre_input_hook_factory(offset, char))
return result
# Hook our function to Ctrl-o, and hold a reference to it to avoid GC
readline.add_defun('operate-and-get-next', operate_and_get_next, ord("O") & 0x1f)
history._readline_functions = [operate_and_get_next]
except (ImportError, OSError, AttributeError) as e:
print(red + """
Couldn't either bridge the needed methods from binary 'readline'
or properly install our implementation of 'operate-and-get-next'.
Skipping the hack. Underlying error:
""" + reset + repr(e))
builtin_setattr('history', history)
atexit.register(history.__exit__)
# run the initialization and clean up the environment afterwards
init()
del init<|fim▁end|> | elif text[0] in ('"', "'"):
value = self.complete_files(text, state)
else:
value = self.complete_keywords(text, state) |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|> __version__ = '.'.join(map(str, VERSION))
else: # pragma: no cover
__version__ = '.'.join(map(str, VERSION[:-1]))<|fim▁end|> | # -*- coding: utf-8 -*-
VERSION = (0, 1, 6, 'final')
if VERSION[-1] != "final": # pragma: no cover |
<|file_name|>solve-dialog.component.spec.ts<|end_file_name|><|fim▁begin|>/**
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { async, ComponentFixture, TestBed, inject } from '@angular/core/testing';
import { SolveDialogComponent } from './solve-dialog.component';
import { DialogCloseButtonComponent } from '../dialog-close-button/dialog-close-button.component';
import { RadioListComponent } from '../radio-list/radio-list.component';
import { DialogService } from '../dialog.service';
import { By } from '@angular/platform-browser';
import { IncompleteSolutionDialogComponent } from '../incomplete-solution-dialog/incomplete-solution-dialog.component';
import { IncorrectSolutionDialogComponent } from '../incorrect-solution-dialog/incorrect-solution-dialog.component';
import { WinDialogComponent } from '../win-dialog/win-dialog.component';
describe('SolveDialogComponent', () => {
let component: SolveDialogComponent;
let fixture: ComponentFixture<SolveDialogComponent>;
beforeEach(async(() => {
TestBed.configureTestingModule({
declarations: [
SolveDialogComponent,
DialogCloseButtonComponent,
RadioListComponent,
],
})
.compileComponents();
}));
beforeEach(() => {
fixture = TestBed.createComponent(SolveDialogComponent);
component = fixture.componentInstance;
fixture.detectChanges();
});
it('should create', () => {
expect(component).toBeTruthy();
});
it('should open incomplete solution dialog', inject(
[DialogService],
(dialogService: DialogService) => {
spyOn(dialogService, 'open');
fixture.debugElement.query(
By.css('input[type="radio"][value="Mrs. Bluejay"]')
).nativeElement.click();
fixture.debugElement.query(By.css('.submit-button')).nativeElement.click();
expect(dialogService.open).toHaveBeenCalledWith(IncompleteSolutionDialogComponent);
}
));
it('should open incorrect solution dialog', inject(
[DialogService],
(dialogService: DialogService) => {
spyOn(dialogService, 'open');
fixture.debugElement.query(
By.css('input[type="radio"][value="Mrs. Bluejay"]')
).nativeElement.click();
fixture.debugElement.query(
By.css('input[type="radio"][value="Living Room"]')
).nativeElement.click();
fixture.debugElement.query(
By.css('input[type="radio"][value="A Hollow Bible"]')
).nativeElement.click();<|fim▁hole|> expect(dialogService.open).toHaveBeenCalledWith(IncorrectSolutionDialogComponent);
}
));
it('should open win dialog', inject(
[DialogService],
(dialogService: DialogService) => {
spyOn(dialogService, 'open');
fixture.debugElement.query(
By.css('input[type="radio"][value="Professor Pluot"]')
).nativeElement.click();
fixture.debugElement.query(
By.css('input[type="radio"][value="Living Room"]')
).nativeElement.click();
fixture.debugElement.query(
By.css('input[type="radio"][value="A Hollow Bible"]')
).nativeElement.click();
fixture.debugElement.query(By.css('.submit-button')).nativeElement.click();
expect(dialogService.open).toHaveBeenCalledWith(WinDialogComponent);
}
));
it('should close the dialog when "No" button is clicked', inject(
[DialogService],
async (dialogService: DialogService) => {
let resetCalled = false;
fixture.debugElement.nativeElement.addEventListener('reset', () => {
resetCalled = true;
});
spyOn(dialogService, 'close');
fixture.debugElement.query(
By.css('input[type="radio"][value="Professor Pluot"]')
).nativeElement.click();
fixture.debugElement.query(
By.css('input[type="radio"][value="Living Room"]')
).nativeElement.click();
fixture.debugElement.query(
By.css('input[type="radio"][value="A Hollow Bible"]')
).nativeElement.click();
fixture.debugElement.query(By.css('.cancel-button')).nativeElement.click();
expect(dialogService.close).toHaveBeenCalled();
expect(resetCalled).toBe(true);
}
));
});<|fim▁end|> | fixture.debugElement.query(By.css('.submit-button')).nativeElement.click(); |
<|file_name|>geoWidgets.py<|end_file_name|><|fim▁begin|># -*- coding: ISO-8859-1 -*-<|fim▁hole|># A class that corresponds to an HTML form widget,
# e.g. <input type="text"> or <textarea>.
# This handles rendering of the widget as HTML.
import json
from django.template.loader import render_to_string
from .conf import settings
from django.utils import six
from django import forms
from django.forms import widgets, MultiWidget, Media
from django.utils.html import conditional_escape, format_html, format_html_join
from django.forms.util import flatatt, to_current_timezone
from django.utils.encoding import force_text, python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from django.utils.safestring import mark_safe
from django.templatetags.static import static
from . import LatLng
# classe widget utilizzata dal campo forms.geoFields LatLngField
class LatLngTextInputWidget(forms.MultiWidget):
def __init__(self, attrs=None):
widgets = (
forms.TextInput(),
forms.TextInput(),
)
super(LatLngTextInputWidget, self).__init__(widgets, attrs)
def decompress(self, value):
if isinstance(value, six.text_type):
return value.rsplit(',')
if value:
return [value.lat, value.lng]
return [None,None]
def format_output(self, rendered_widgets):
return render_to_string('geopositionmap/widgets/geopositionmap.html', {
'latitude': {
'html': rendered_widgets[0],
'label': _("latitude"),
},
'longitude': {
'html': rendered_widgets[1],
'label': _("longitude"),
},
'config': {
'map_widget_height': settings.GEOPOSITIONMAP_MAP_WIDGET_HEIGHT,
'map_options': json.dumps(settings.GEOPOSITIONMAP_MAP_OPTIONS),
'marker_options': json.dumps(settings.GEOPOSITIONMAP_MARKER_OPTIONS),
'google_view': json.dumps(settings.GEOPOSITIONMAP_GOOGLE_VIEW),
'osm_view': json.dumps(settings.GEOPOSITIONMAP_OSM_VIEW),
}
})
class Media:
#extend = False
css = {
'all': (
'geopositionmap/geopositionmap.css',
'//cdn.leafletjs.com/leaflet-0.7.3/leaflet.css',
)
}
js = (
'//maps.google.com/maps/api/js?sensor=false',
'//cdn.leafletjs.com/leaflet-0.7.3/leaflet.js',
'geopositionmap/geopositionmap.js',
)<|fim▁end|> | """
Form Widget classes specific to the geoSite admin site.
"""
|
<|file_name|>TestOpenStackClientCloudLive.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
"""
SlipStream Client
=====
Copyright (C) 2015 SixSq Sarl (sixsq.com)
=====
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
import unittest
from slipstream.NodeDecorator import (NodeDecorator, RUN_CATEGORY_IMAGE)
from slipstream.NodeInstance import NodeInstance
from slipstream.UserInfo import UserInfo
import slipstream.util as util
from slipstream_openstack.OpenStackClientCloud import \
OpenStackClientCloud, FLOATING_IPS_KEY
from slipstream_openstack.OpenStackClientCloud import searchInObjectList
from slipstream_openstack.TestBaseLive import TestBaseLive
CONFIG_FILE = os.path.join(os.path.dirname(__file__),
'pyunit.credentials.properties')
# Example configuration file.
"""
[Test]
openstack.location = LVS
openstack.username = [email protected]
openstack.password = xxx
openstack.imageid = d02ee717-33f7-478b-ba14-02196978fea8<|fim▁hole|>
class TestOpenStackClientCloudLive(TestBaseLive):
cin = 'openstack'
conf_keys = ['endpoint',
'tenant-name',
'username',
'password',
'domain-name',
'identityVersion',
'serviceType',
'serviceName',
'serviceRegion',
'tenant-name',
'network.type',
FLOATING_IPS_KEY,
UserInfo.NETWORK_PUBLIC_KEY,
UserInfo.NETWORK_PRIVATE_KEY]
def _update_user_info(self):
self.user_info[self.construct_key(FLOATING_IPS_KEY)] = \
util.str2bool(self.user_info.get_cloud(FLOATING_IPS_KEY, 'false'))
def setUp(self):
self._setUp(OpenStackClientCloud, CONFIG_FILE, self.conf_keys)
self._update_user_info()
security_groups = self._conf_val('security.groups')
image_id = self._conf_val('imageid')
instance_type = self._conf_val('intance.type', 'm1.tiny')
network_type = self._conf_val('network.type')
node_name = 'test_node'
self.node_instances = {}
for i in range(1, self.multiplicity + 1):
node_instance_name = node_name + '.' + str(i)
self.node_instances[node_instance_name] = NodeInstance({
NodeDecorator.NODE_NAME_KEY: node_name,
NodeDecorator.NODE_INSTANCE_NAME_KEY: node_instance_name,
'cloudservice': self.cin,
'image.platform': 'Ubuntu',
'image.imageId': image_id,
'image.id': image_id,
self.construct_key('instance.type'): instance_type,
self.construct_key('security.groups'): security_groups,
'network': network_type
})
self.node_instance = NodeInstance({
NodeDecorator.NODE_NAME_KEY: NodeDecorator.MACHINE_NAME,
NodeDecorator.NODE_INSTANCE_NAME_KEY: NodeDecorator.MACHINE_NAME,
'cloudservice': self.cin,
'image.platform': 'Ubuntu',
'image.imageId': image_id,
'image.id': image_id,
self.construct_key('instance.type'): instance_type,
self.construct_key('security.groups'): security_groups,
'network': network_type,
'image.prerecipe':
"""#!/bin/sh
set -e
set -x
ls -l /tmp
dpkg -l | egrep "nano|lvm" || true
""",
'image.packages': ['lvm2', 'nano'],
'image.recipe':
"""#!/bin/sh
set -e
set -x
dpkg -l | egrep "nano|lvm" || true
lvs
"""
})
self.node_instance_with_additional_disk = NodeInstance({
NodeDecorator.NODE_NAME_KEY: NodeDecorator.MACHINE_NAME,
NodeDecorator.NODE_INSTANCE_NAME_KEY: NodeDecorator.MACHINE_NAME,
'cloudservice': self.cin,
'image.platform': 'Ubuntu',
'image.imageId': image_id,
'image.id': image_id,
self.construct_key('instance.type'): instance_type,
'network': network_type,
'extra.disk.volatile': '20'
})
def tearDown(self):
os.environ.pop('SLIPSTREAM_CONNECTOR_INSTANCE')
os.environ.pop('SLIPSTREAM_BOOTSTRAP_BIN')
self.client = None
self.ch = None
def xtest_1_start_stop_images(self):
self._test_start_stop_images()
def xtest_2_buildImage(self):
self.client.run_category = RUN_CATEGORY_IMAGE
self.client.start_nodes_and_clients(self.user_info, {NodeDecorator.MACHINE_NAME: self.node_instance})
instances_details = self.client.get_vms_details()
assert instances_details
assert instances_details[0][NodeDecorator.MACHINE_NAME]
new_id = self.client.build_image(self.user_info, self.node_instance)
assert new_id
def xtest_3_list_instances(self):
self.client._initialization(self.user_info)
assert isinstance(self.client.list_instances(), list)
def xtest_4_start_image_with_extra_disk(self):
self.client.run_category = RUN_CATEGORY_IMAGE
self.client.start_nodes_and_clients(self.user_info,
{NodeDecorator.MACHINE_NAME: self.node_instance_with_additional_disk})
vm_id = self.client.get_vms()[NodeDecorator.MACHINE_NAME]['id']
nodes = self.client.list_instances()
assert searchInObjectList(nodes, 'id', vm_id).extra['volumes_attached']
self.client.stop_deployment()
if __name__ == '__main__':
unittest.main()<|fim▁end|> | openstack.ssh.username = ubuntu
openstack.ssh.password = yyy
""" # pylint: disable=pointless-string-statement |
<|file_name|>apps.py<|end_file_name|><|fim▁begin|>from django.apps import AppConfig
<|fim▁hole|>class CirculoConfig(AppConfig):
name = 'circulo'<|fim▁end|> | |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>#![cfg_attr(not(feature = "with-syntex"), feature(rustc_private))]
#![deny(warnings)]
extern crate dotenv;
#[cfg(feature = "with-syntex")]
extern crate syntex;
#[cfg(feature = "with-syntex")]
extern crate syntex_syntax as syntax;
#[cfg(not(feature = "with-syntex"))]
extern crate syntax;
#[cfg(not(feature = "with-syntex"))]
extern crate rustc_plugin;
mod dotenv_macro;
#[cfg(feature = "with-syntex")]
pub fn register(reg: &mut syntex::Registry) {
reg.add_macro("dotenv", dotenv_macro::expand_dotenv);
}
<|fim▁hole|>#[cfg(not(feature = "with-syntex"))]
pub fn register(reg: &mut rustc_plugin::Registry) {
reg.register_macro("dotenv", dotenv_macro::expand_dotenv);
}<|fim▁end|> | |
<|file_name|>regions-bounded-by-send.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test which of the builtin types are considered sendable. The tests
// in this file all test region bound and lifetime violations that are
// detected during type check.
fn assert_send<T:Send>() { }
trait Dummy { }
<|fim▁hole|>fn static_lifime_ok<'a,T,U:Send>(_: &'a int) {
assert_send::<&'static int>();
assert_send::<&'static str>();
assert_send::<&'static [int]>();
// whether or not they are mutable
assert_send::<&'static mut int>();
}
// otherwise lifetime pointers are not ok
fn param_not_ok<'a>(x: &'a int) {
assert_send::<&'a int>(); //~ ERROR does not fulfill
}
fn param_not_ok1<'a>(_: &'a int) {
assert_send::<&'a str>(); //~ ERROR does not fulfill
}
fn param_not_ok2<'a>(_: &'a int) {
assert_send::<&'a [int]>(); //~ ERROR does not fulfill
}
// boxes are ok
fn box_ok() {
assert_send::<Box<int>>();
assert_send::<String>();
assert_send::<Vec<int>>();
}
// but not if they own a bad thing
fn box_with_region_not_ok<'a>() {
assert_send::<Box<&'a int>>(); //~ ERROR does not fulfill
}
// objects with insufficient bounds no ok
fn object_with_random_bound_not_ok<'a>() {
assert_send::<&'a (Dummy+'a)>();
//~^ ERROR not implemented
}
fn object_with_send_bound_not_ok<'a>() {
assert_send::<&'a (Dummy+Send)>();
//~^ ERROR does not fulfill
}
fn proc_with_lifetime_not_ok<'a>() {
assert_send::<proc():'a>();
//~^ ERROR not implemented
}
fn closure_with_lifetime_not_ok<'a>() {
assert_send::<||:'a>();
//~^ ERROR not implemented
}
// unsafe pointers are ok unless they point at unsendable things
fn unsafe_ok1<'a>(_: &'a int) {
assert_send::<*const int>();
assert_send::<*mut int>();
}
fn unsafe_ok2<'a>(_: &'a int) {
assert_send::<*const &'a int>(); //~ ERROR does not fulfill
}
fn unsafe_ok3<'a>(_: &'a int) {
assert_send::<*mut &'a int>(); //~ ERROR does not fulfill
}
fn main() {
}<|fim▁end|> | // lifetime pointers with 'static lifetime are ok
|
<|file_name|>test_pattern_matching.py<|end_file_name|><|fim▁begin|>from collections import namedtuple
from cytomine.models._utilities.pattern_matching import resolve_pattern
class TestPatternMatching:
def get_fake_type(self):
return namedtuple("fakeobj", ["lst", "atomstr", "atomfloat"])
def test_no_iterable_pattern(self):
fake = self.get_fake_type()(lst=1, atomstr="aa", atomfloat=1.5)
resolved = sorted(resolve_pattern("{lst}/{atomstr}_{atomfloat}.png", fake))
assert(len(resolved) == 1)
assert(resolved[0] == "1/aa_1.5.png")
def test_single_iterable_pattern(self):
fake = self.get_fake_type()(lst=[1, 2, 3], atomstr="aa", atomfloat=1.5)
resolved = sorted(resolve_pattern("{lst}/{atomstr}_{atomfloat}.png", fake))
assert(len(resolved) == 3)
assert(resolved[0] == "1/aa_1.5.png")
assert(resolved[1] == "2/aa_1.5.png")
assert(resolved[2] == "3/aa_1.5.png")
<|fim▁hole|> def test_no_placeholder(self):
fake = self.get_fake_type()(lst=[1, 2, 3], atomstr="aa", atomfloat=1.5)
resolved = resolve_pattern("no_placeholder", fake)
assert(len(resolved) == 1)<|fim▁end|> | |
<|file_name|>comment.go<|end_file_name|><|fim▁begin|>// Copyright (c) 2017 Ernest Micklei
//
// MIT License
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
package proto
import (
"strings"
"text/scanner"
)
// Comment one or more comment text lines, either in c- or c++ style.
type Comment struct {
Position scanner.Position
// Lines are comment text lines without prefixes //, ///, /* or suffix */
Lines []string
Cstyle bool // refers to /* ... */, C++ style is using //
ExtraSlash bool // is true if the comment starts with 3 slashes
}
// newComment returns a comment.
func newComment(pos scanner.Position, lit string) *Comment {
extraSlash := strings.HasPrefix(lit, "///")
isCstyle := strings.HasPrefix(lit, "/*") && strings.HasSuffix(lit, "*/")
var lines []string
if isCstyle {
withoutMarkers := strings.TrimRight(strings.TrimLeft(lit, "/*"), "*/")
lines = strings.Split(withoutMarkers, "\n")
} else {
lines = strings.Split(strings.TrimLeft(lit, "/"), "\n")
}
return &Comment{Position: pos, Lines: lines, Cstyle: isCstyle, ExtraSlash: extraSlash}
}
//type inlineComment struct {
// line string
// extraSlash bool
//}
// Accept dispatches the call to the visitor.
func (c *Comment) Accept(v Visitor) {
v.VisitComment(c)
}
// Merge appends all lines from the argument comment.
func (c *Comment) Merge(other *Comment) {
c.Lines = append(c.Lines, other.Lines...)
c.Cstyle = c.Cstyle || other.Cstyle
}
func (c Comment) hasTextOnLine(line int) bool {
if len(c.Lines) == 0 {
return false
}
return c.Position.Line <= line && line <= c.Position.Line+len(c.Lines)-1
}
// Message returns the first line or empty if no lines.
func (c Comment) Message() string {
if len(c.Lines) == 0 {
return ""
}
return c.Lines[0]
}
// commentInliner is for types that can have an inline comment.
type commentInliner interface {
inlineComment(c *Comment)
}
// maybeScanInlineComment tries to scan comment on the current line ; if present then set it for the last element added.
func maybeScanInlineComment(p *Parser, c elementContainer) {
currentPos := p.scanner.Position
// see if there is an inline Comment
pos, tok, lit := p.next()
esize := len(c.elements())
// seen comment and on same line and elements have been added
if tCOMMENT == tok && pos.Line == currentPos.Line && esize > 0 {
// if the last added element can have an inline comment then set it
last := c.elements()[esize-1]
if inliner, ok := last.(commentInliner); ok {
// TODO skip multiline?
inliner.inlineComment(newComment(pos, lit))
}
} else {<|fim▁hole|>}
// takeLastCommentIfEndsOnLine removes and returns the last element of the list if it is a Comment
func takeLastCommentIfEndsOnLine(list []Visitee, line int) (*Comment, []Visitee) {
if len(list) == 0 {
return nil, list
}
if last, ok := list[len(list)-1].(*Comment); ok && last.hasTextOnLine(line) {
return last, list[:len(list)-1]
}
return nil, list
}
// mergeOrReturnComment creates a new comment and tries to merge it with the last element (if is a comment and is on the next line).
func mergeOrReturnComment(elements []Visitee, lit string, pos scanner.Position) *Comment {
com := newComment(pos, lit)
esize := len(elements)
if esize == 0 {
return com
}
// last element must be a comment to merge
last, ok := elements[esize-1].(*Comment)
if !ok {
return com
}
// do not merge c-style comments
if last.Cstyle {
return com
}
// last comment has text on previous line
// TODO handle last line of file could be inline comment
if !last.hasTextOnLine(pos.Line - 1) {
return com
}
last.Merge(com)
return nil
}
// parent is part of elementContainer
func (c *Comment) parent(Visitee) {}<|fim▁end|> | p.nextPut(pos, tok, lit)
} |
<|file_name|>action-types.js<|end_file_name|><|fim▁begin|>export const GET_RESOURCE_TO_VERIFY =
'verificationPortal/GET_RESOURCE_TO_VERIFY'<|fim▁hole|>export const CLEAR_RESOURCE = 'verificationPortal/CLEAR_RESOURCE'<|fim▁end|> | export const FORM_SUCCESSFULLY_SUBMITTED = 'FORM_SUCCESSFULLY_SUBMITTED' |
<|file_name|>app.js<|end_file_name|><|fim▁begin|>var express = require('express');
var path = require('path');
var favicon = require('serve-favicon');
var logger = require('morgan');
var cookieParser = require('cookie-parser');
var bodyParser = require('body-parser');
// Database
var mongo = require('mongodb');
var monk = require('monk');<|fim▁hole|>var seller = require('./routes/seller');
var products = require('./routes/products');
var app = express();
const ObjectID = mongo.ObjectID;
// view engine setup
app.set('views', path.join(__dirname, 'views'));
app.set('view engine', 'ejs');
app.engine('html', require('ejs').renderFile);
// uncomment after placing your favicon in /public
//app.use(favicon(path.join(__dirname, 'public', 'favicon.ico')));
app.use(logger('dev'));
app.use(bodyParser.json());
app.use(bodyParser.urlencoded({ extended: false }));
app.use(cookieParser());
app.use(express.static(path.join(__dirname, 'public')));
// Make our db accessible to our router
app.use(function(req,res,next){
req.db = db;
req.ObjectID = ObjectID;
next();
});
app.use('/', index);
app.use('/buyer', buyer);
app.use('/seller', seller);
app.use('/products', products);
// catch 404 and forward to error handler
app.use(function(req, res, next) {
var err = new Error('Not Found');
err.status = 404;
next(err);
});
// error handler
app.use(function(err, req, res, next) {
// set locals, only providing error in development
res.locals.message = err.message;
res.locals.error = req.app.get('env') === 'development' ? err : {};
// render the error page
res.status(err.status || 500);
res.render('error.html', err);
});
module.exports = app;<|fim▁end|> | var db = monk('localhost:27017/mydb');
var index = require('./routes/index');
var buyer = require('./routes/buyer'); |
<|file_name|>stepA_mal.rs<|end_file_name|><|fim▁begin|>#![allow(non_snake_case)]
extern crate mal;
use std::collections::HashMap;
use std::env as stdenv;
use std::process as process;
use mal::types::{MalVal, MalRet, MalError, err_str};
use mal::types::{symbol, _nil, string, list, vector, hash_map, malfunc, malfuncd};
use mal::types::MalError::{ErrString, ErrMalVal};
use mal::types::MalType::{Nil, False, Sym, List, Vector, Hash_Map, Func, MalFunc};
use mal::{readline, reader, core};
use mal::env::{env_set, env_get, env_new, env_bind, env_find, env_root, Env};
// read
fn read(str: String) -> MalRet {
reader::read_str(str)
}
// eval
fn is_pair(x: MalVal) -> bool {
match *x {
List(ref lst,_) | Vector(ref lst,_) => lst.len() > 0,
_ => false,
}
}
fn quasiquote(ast: MalVal) -> MalVal {
if !is_pair(ast.clone()) {
return list(vec![symbol("quote"), ast])
}
match *ast.clone() {
List(ref args,_) | Vector(ref args,_) => {
let ref a0 = args[0];
match **a0 {
Sym(ref s) if *s == "unquote" => return args[1].clone(),
_ => (),
}
if is_pair(a0.clone()) {
match **a0 {
List(ref a0args,_) | Vector(ref a0args,_) => {
match *a0args[0] {
Sym(ref s) if *s == "splice-unquote" => {
return list(vec![symbol("concat"),
a0args[1].clone(),
quasiquote(list(args[1..].to_vec()))])
},
_ => (),
}
},
_ => (),
}
}
let rest = list(args[1..].to_vec());
return list(vec![symbol("cons"),
quasiquote(a0.clone()),
quasiquote(rest)])
},
_ => _nil(), // should never reach
}
}
fn is_macro_call(ast: MalVal, env: Env) -> bool {
let lst = match *ast {
List(ref lst,_) => &lst[0],
_ => return false
};
match **lst {
Sym(_) => {},
_ => return false
}
if env_find(&env, lst).is_none() {
return false
}
let f = match env_get(&env, lst) {
Ok(f) => f,
_ => return false
};
match *f {
MalFunc(ref mfd,_) => mfd.is_macro,
_ => false,
}
}
fn macroexpand(mut ast: MalVal, env: Env) -> MalRet {
while is_macro_call(ast.clone(), env.clone()) {
let ast2 = ast.clone();
let args = match *ast2 {
List(ref args,_) => args,
_ => break,
};
let ref a0 = args[0];
let mf = match **a0 {
Sym(_) => try!(env_get(&env, &a0)),
_ => break,
};
match *mf {
MalFunc(_,_) => ast = try!(mf.apply(args[1..].to_vec())),
_ => break,
}
}
Ok(ast)
}
fn eval_ast(ast: MalVal, env: Env) -> MalRet {
match *ast {
Sym(_) => env_get(&env, &ast),
List(ref a,_) | Vector(ref a,_) => {
let mut ast_vec : Vec<MalVal> = vec![];
for mv in a.iter() {
let mv2 = mv.clone();
ast_vec.push(try!(eval(mv2, env.clone())));
}
Ok(match *ast { List(_,_) => list(ast_vec),
_ => vector(ast_vec) })
}
Hash_Map(ref hm,_) => {
let mut new_hm: HashMap<String,MalVal> = HashMap::new();
for (key, value) in hm.iter() {
new_hm.insert(key.to_string(),
try!(eval(value.clone(), env.clone())));
}
Ok(hash_map(new_hm))
}
_ => Ok(ast.clone()),
}
}
fn eval(mut ast: MalVal, mut env: Env) -> MalRet {
'tco: loop {
//println!("eval: {}, {}", ast, env.borrow());
//println!("eval: {}", ast);
match *ast {
List(_,_) => (), // continue
_ => return eval_ast(ast, env),
}
// apply list
ast = try!(macroexpand(ast, env.clone()));
match *ast {
List(_,_) => (), // continue
_ => return Ok(ast),
}
let tmp = ast;
let (args, a0sym) = match *tmp {
List(ref args,_) => {
if args.len() == 0 {
return Ok(tmp.clone());
}
let ref a0 = *args[0];
match *a0 {
Sym(ref a0sym) => (args, &a0sym[..]),
_ => (args, "__<fn*>__"),
}
},
_ => return err_str("Expected list"),
};
match a0sym {
"def!" => {
let a1 = (*args)[1].clone();
let a2 = (*args)[2].clone();
let r = try!(eval(a2, env.clone()));
match *a1 {
Sym(_) => {
env_set(&env.clone(), a1, r.clone());
return Ok(r);
},
_ => return err_str("def! of non-symbol"),
}
},
"let*" => {
let let_env = env_new(Some(env.clone()));
let a1 = (*args)[1].clone();
let a2 = (*args)[2].clone();
match *a1 {
List(ref binds,_) | Vector(ref binds,_) => {
let mut it = binds.iter();
while it.len() >= 2 {
let b = it.next().unwrap();
let exp = it.next().unwrap();
match **b {<|fim▁hole|> _ => return err_str("let* with non-symbol binding"),
}
}
},
_ => return err_str("let* with non-list bindings"),
}
ast = a2;
env = let_env.clone();
continue 'tco;
},
"quote" => return Ok((*args)[1].clone()),
"quasiquote" => {
let a1 = (*args)[1].clone();
ast = quasiquote(a1);
continue 'tco;
},
"defmacro!" => {
let a1 = (*args)[1].clone();
let a2 = (*args)[2].clone();
let r = try!(eval(a2, env.clone()));
match *r {
MalFunc(ref mfd,_) => {
match *a1 {
Sym(_) => {
let mut new_mfd = mfd.clone();
new_mfd.is_macro = true;
let mf = malfuncd(new_mfd,_nil());
env_set(&env.clone(), a1.clone(), mf.clone());
return Ok(mf);
},
_ => return err_str("def! of non-symbol"),
}
},
_ => return err_str("def! of non-symbol"),
}
},
"macroexpand" => {
let a1 = (*args)[1].clone();
return macroexpand(a1, env.clone())
},
"try*" => {
let a1 = (*args)[1].clone();
match eval(a1, env.clone()) {
Ok(res) => return Ok(res),
Err(err) => {
if args.len() < 3 { return Err(err); }
let a2 = (*args)[2].clone();
let cat = match *a2 {
List(ref cat,_) => cat,
_ => return err_str("invalid catch* clause"),
};
if cat.len() != 3 {
return err_str("wrong arity to catch* clause");
}
let c1 = (*cat)[1].clone();
match *c1 {
Sym(_) => {},
_ => return err_str("invalid catch* binding"),
};
let exc = match err {
ErrMalVal(mv) => mv,
ErrString(s) => string(s),
};
let bind_env = env_new(Some(env.clone()));
env_set(&bind_env, c1.clone(), exc);
let c2 = (*cat)[2].clone();
return eval(c2, bind_env);
},
};
}
"do" => {
let el = list(args[1..args.len()-1].to_vec());
try!(eval_ast(el, env.clone()));
ast = args[args.len() - 1].clone();
continue 'tco;
},
"if" => {
let a1 = (*args)[1].clone();
let c = try!(eval(a1, env.clone()));
match *c {
False | Nil => {
if args.len() >= 4 {
ast = args[3].clone();
continue 'tco;
} else {
return Ok(_nil());
}
},
_ => {
ast = args[2].clone();
continue 'tco;
},
}
},
"fn*" => {
let a1 = args[1].clone();
let a2 = args[2].clone();
return Ok(malfunc(eval, a2, env, a1, _nil()));
},
"eval" => {
let a1 = (*args)[1].clone();
ast = try!(eval(a1, env.clone()));
env = env_root(&env);
continue 'tco;
},
_ => { // function call
let el = try!(eval_ast(tmp.clone(), env.clone()));
let args = match *el {
List(ref args,_) => args,
_ => return err_str("Invalid apply"),
};
return match *args.clone()[0] {
Func(f,_) => f(args[1..].to_vec()),
MalFunc(ref mf,_) => {
let mfc = mf.clone();
let alst = list(args[1..].to_vec());
let new_env = env_new(Some(mfc.env.clone()));
match env_bind(&new_env, mfc.params, alst) {
Ok(_) => {
ast = mfc.exp;
env = new_env;
continue 'tco;
},
Err(e) => err_str(&e),
}
},
_ => err_str("attempt to call non-function"),
}
},
}
}
}
// print
fn print(exp: MalVal) -> String {
exp.pr_str(true)
}
fn rep(str: &str, env: Env) -> Result<String,MalError> {
let ast = try!(read(str.to_string()));
//println!("read: {}", ast);
let exp = try!(eval(ast, env));
Ok(print(exp))
}
fn main() {
// core.rs: defined using rust
let repl_env = env_new(None);
for (k, v) in core::ns().into_iter() {
env_set(&repl_env, symbol(&k), v);
}
// see eval() for definition of "eval"
env_set(&repl_env, symbol("*ARGV*"), list(vec![]));
// core.mal: defined using the language itself
let _ = rep("(def! *host-language* \"rust\")", repl_env.clone());
let _ = rep("(def! not (fn* (a) (if a false true)))", repl_env.clone());
let _ = rep("(def! load-file (fn* (f) (eval (read-string (str \"(do \" (slurp f) \")\")))))", repl_env.clone());
let _ = rep("(defmacro! cond (fn* (& xs) (if (> (count xs) 0) (list 'if (first xs) (if (> (count xs) 1) (nth xs 1) (throw \"odd number of forms to cond\")) (cons 'cond (rest (rest xs)))))))", repl_env.clone());
let _ = rep("(defmacro! or (fn* (& xs) (if (empty? xs) nil (if (= 1 (count xs)) (first xs) `(let* (or_FIXME ~(first xs)) (if or_FIXME or_FIXME (or ~@(rest xs))))))))", repl_env.clone());
// Invoked with command line arguments
let args = stdenv::args();
if args.len() > 1 {
let mv_args = args.skip(2)
.map(|a| string(a))
.collect::<Vec<MalVal>>();
env_set(&repl_env, symbol("*ARGV*"), list(mv_args));
let lf = format!("(load-file \"{}\")",
stdenv::args().skip(1).next().unwrap());
return match rep(&lf, repl_env.clone()) {
Ok(_) => process::exit(0),
Err(str) => {
println!("Error: {:?}", str);
process::exit(1);
}
};
}
// repl loop
let _ = rep("(println (str \"Mal [\" *host-language* \"]\"))", repl_env.clone());
loop {
let line = readline::mal_readline("user> ");
match line { None => break, _ => () }
match rep(&line.unwrap(), repl_env.clone()) {
Ok(str) => println!("{}", str),
Err(ErrMalVal(_)) => (), // Blank line
Err(ErrString(s)) => println!("Error: {}", s),
}
}
}<|fim▁end|> | Sym(_) => {
let r = try!(eval(exp.clone(), let_env.clone()));
env_set(&let_env, b.clone(), r);
}, |
<|file_name|>Qualifiers.hpp<|end_file_name|><|fim▁begin|>/*******************************************************************************
* refn - All content 2016 Trent Reed, all rights reserved.
*------------------------------------------------------------------------------
* A type containing information for type qualifications and declarators.
******************************************************************************/
#ifndef REFN_QUALIFIERS_HPP
#define REFN_QUALIFIERS_HPP<|fim▁hole|>
#include <vector>
namespace RefN {
/*******************************************************************************
* Public Types
******************************************************************************/
class Qualifier {
public:
// Types
enum Type {
LValueReference,
RValueReference,
ConstVolatileQualifier,
ConstQualifier,
VolatileQualifier,
IncompleteArrayExtent,
ArrayExtent,
Pointer
};
// Constructors / Destructors
Qualifier() = default;
inline Qualifier(Type type, size_t extent = 0) :
m_type(type), m_extent(extent)
{
// Intentionally Empty
}
// Public Accessors
inline size_t getExtent() const {
return m_extent;
}
inline Type getType() const {
return m_type;
}
private:
Type m_type;
size_t m_extent;
};
using Qualifiers = std::vector<Qualifier>;
}
#endif // REFN_QUALIFIERS_HPP<|fim▁end|> | |
<|file_name|>test.js<|end_file_name|><|fim▁begin|>'use strict';
const AWS = require('aws-sdk');
let x = "/delegationset/NHKXBB6SHGKLN";
console.log(x.replace('/delegationset/', ''));
return;
const route53 = new AWS.Route53();
route53.listHostedZones({}).promise()
.then(response => {
console.log(response);
return response.HostedZones.find(hostedZone => {
return hostedZone.Name === 'manapaho.com.';<|fim▁hole|> console.log(err);
})
.then(hostedZone => {
console.log(hostedZone);
});
return;
/*
route53.listReusableDelegationSets({}).promise()
.then(response => {
return response.DelegationSets.find(delegationSet => {
return delegationSet.CallerReference === 'arn:aws:lambda:us-east-1:238541850529:function:Prod-Wessels-us-east-1-Route53ReusableDelegationSet';
});
})
.catch(err => {
console.log(err);
})
.then(reusableDelegationSet => {
console.log(reusableDelegationSet);
});
return;
AWS.config.update({region: 'us-east-1'});
let stackName = 'Prod-Manapaho-us-east-1-NameServerSet';
let responseStatus = "FAILED";
let responseData = {};
let cfn = new AWS.CloudFormation();
cfn.describeStacks({StackName: stackName}).promise()
.then(data => {
console.log('333333333333333333333', JSON.stringify(data, null, 2));
data.Stacks[0].Outputs.forEach(function (output) {
responseData[output.OutputKey] = output.OutputValue;
});
responseStatus = "SUCCESS";
console.log(JSON.stringify(responseData, null, 2));
})
.catch(err => {
console.log('4444444444444444444444444');
console.log('FAILED TO DESCRIBE STACK:', err);
});
return;
const route53 = new AWS.Route53();
route53.listReusableDelegationSets({}).promise()
.then(response => {
console.log(response.DelegationSets.find(delegationSet => {
return delegationSet.CallerReference === 'arn:aws:lambda:us-east-1:238541850529:function:Prod-Manapaho-us-east-1-Route53ReusableDelegationSet';
}));
})
.catch(err => {
console.log(err);
});
return;
route53.createReusableDelegationSet({CallerReference: 'createReusableDelegationSet'}).promise()
.then(response => {
console.log('XXXXXXXX', JSON.stringify(response, null, 2));
})
.catch(err => {
console.log(err, err.stack);
});
*/<|fim▁end|> | });
})
.catch(err => { |
<|file_name|>cef_request_handler.rs<|end_file_name|><|fim▁begin|>// Copyright (c) 2014 Marshall A. Greenblatt. All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the name Chromium Embedded
// Framework nor the names of its contributors may be used to endorse
// or promote products derived from this software without specific prior
// written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
// ---------------------------------------------------------------------------
//
// This file was generated by the CEF translator tool and should not be edited
// by hand. See the translator.README.txt file in the tools directory for
// more information.
//
#![allow(non_snake_case, unused_imports)]
use eutil;
use interfaces;
use types;
use wrappers::CefWrap;
use libc;
use std::collections::HashMap;
use std::ptr;
//
// Callback structure used for asynchronous continuation of quota requests.
//
#[repr(C)]
pub struct _cef_quota_callback_t {
//
// Base structure.
//
pub base: types::cef_base_t,
//
// Continue the quota request. If |allow| is true (1) the request will be
// allowed. Otherwise, the request will be denied.
//
pub cont: Option<extern "C" fn(this: *mut cef_quota_callback_t,
allow: libc::c_int) -> ()>,
//
// Cancel the quota request.
//
pub cancel: Option<extern "C" fn(this: *mut cef_quota_callback_t) -> ()>,
//
// The reference count. This will only be present for Rust instances!
//
pub ref_count: uint,
//
// Extra data. This will only be present for Rust instances!
//
pub extra: u8,
}
pub type cef_quota_callback_t = _cef_quota_callback_t;
//
// Callback structure used for asynchronous continuation of quota requests.
//
pub struct CefQuotaCallback {
c_object: *mut cef_quota_callback_t,
}
impl Clone for CefQuotaCallback {
fn clone(&self) -> CefQuotaCallback{
unsafe {
if !self.c_object.is_null() {
((*self.c_object).base.add_ref.unwrap())(&mut (*self.c_object).base);
}
CefQuotaCallback {
c_object: self.c_object,
}
}
}
}
impl Drop for CefQuotaCallback {
fn drop(&mut self) {
unsafe {
if !self.c_object.is_null() {
((*self.c_object).base.release.unwrap())(&mut (*self.c_object).base);
}
}
}
}
impl CefQuotaCallback {
pub unsafe fn from_c_object(c_object: *mut cef_quota_callback_t) -> CefQuotaCallback {
CefQuotaCallback {
c_object: c_object,
}
}
pub unsafe fn from_c_object_addref(c_object: *mut cef_quota_callback_t) -> CefQuotaCallback {
if !c_object.is_null() {
((*c_object).base.add_ref.unwrap())(&mut (*c_object).base);
}
CefQuotaCallback {
c_object: c_object,
}
}
pub fn c_object(&self) -> *mut cef_quota_callback_t {
self.c_object
}
pub fn c_object_addrefed(&self) -> *mut cef_quota_callback_t {
unsafe {
if !self.c_object.is_null() {
eutil::add_ref(self.c_object as *mut types::cef_base_t);
}
self.c_object
}
}
pub fn is_null_cef_object(&self) -> bool {
self.c_object.is_null()
}
pub fn is_not_null_cef_object(&self) -> bool {
!self.c_object.is_null()
}
//
// Continue the quota request. If |allow| is true (1) the request will be
// allowed. Otherwise, the request will be denied.
//
pub fn cont(&self, allow: libc::c_int) -> () {
if self.c_object.is_null() {
panic!("called a CEF method on a null object")
}
unsafe {
CefWrap::to_rust(
((*self.c_object).cont.unwrap())(
self.c_object,
CefWrap::to_c(allow)))
}
}
//
// Cancel the quota request.
//
pub fn cancel(&self) -> () {
if self.c_object.is_null() {
panic!("called a CEF method on a null object")
}
unsafe {
CefWrap::to_rust(
((*self.c_object).cancel.unwrap())(
self.c_object))
}
}
}
impl CefWrap<*mut cef_quota_callback_t> for CefQuotaCallback {
fn to_c(rust_object: CefQuotaCallback) -> *mut cef_quota_callback_t {
rust_object.c_object_addrefed()
}
unsafe fn to_rust(c_object: *mut cef_quota_callback_t) -> CefQuotaCallback {
CefQuotaCallback::from_c_object_addref(c_object)
}
}
impl CefWrap<*mut cef_quota_callback_t> for Option<CefQuotaCallback> {
fn to_c(rust_object: Option<CefQuotaCallback>) -> *mut cef_quota_callback_t {
match rust_object {
None => ptr::null_mut(),
Some(rust_object) => rust_object.c_object_addrefed(),
}
}
unsafe fn to_rust(c_object: *mut cef_quota_callback_t) -> Option<CefQuotaCallback> {
if c_object.is_null() {
None
} else {
Some(CefQuotaCallback::from_c_object_addref(c_object))
}
}
}
//
// Callback structure used for asynchronous continuation of url requests when
// invalid SSL certificates are encountered.
//
#[repr(C)]
pub struct _cef_allow_certificate_error_callback_t {
//
// Base structure.
//
pub base: types::cef_base_t,
//
// Continue the url request. If |allow| is true (1) the request will be
// continued. Otherwise, the request will be canceled.
//
pub cont: Option<extern "C" fn(
this: *mut cef_allow_certificate_error_callback_t,
allow: libc::c_int) -> ()>,
//
// The reference count. This will only be present for Rust instances!
//
pub ref_count: uint,
//
// Extra data. This will only be present for Rust instances!
//
pub extra: u8,
}
pub type cef_allow_certificate_error_callback_t = _cef_allow_certificate_error_callback_t;
//
// Callback structure used for asynchronous continuation of url requests when
// invalid SSL certificates are encountered.
//
pub struct CefAllowCertificateErrorCallback {
c_object: *mut cef_allow_certificate_error_callback_t,
}
impl Clone for CefAllowCertificateErrorCallback {
fn clone(&self) -> CefAllowCertificateErrorCallback{
unsafe {
if !self.c_object.is_null() {
((*self.c_object).base.add_ref.unwrap())(&mut (*self.c_object).base);
}
CefAllowCertificateErrorCallback {
c_object: self.c_object,
}
}
}
}
impl Drop for CefAllowCertificateErrorCallback {
fn drop(&mut self) {
unsafe {
if !self.c_object.is_null() {
((*self.c_object).base.release.unwrap())(&mut (*self.c_object).base);
}
}
}
}
impl CefAllowCertificateErrorCallback {
pub unsafe fn from_c_object(c_object: *mut cef_allow_certificate_error_callback_t) -> CefAllowCertificateErrorCallback {
CefAllowCertificateErrorCallback {
c_object: c_object,
}
}
pub unsafe fn from_c_object_addref(c_object: *mut cef_allow_certificate_error_callback_t) -> CefAllowCertificateErrorCallback {
if !c_object.is_null() {
((*c_object).base.add_ref.unwrap())(&mut (*c_object).base);
}
CefAllowCertificateErrorCallback {
c_object: c_object,
}
}
pub fn c_object(&self) -> *mut cef_allow_certificate_error_callback_t {
self.c_object
}
pub fn c_object_addrefed(&self) -> *mut cef_allow_certificate_error_callback_t {
unsafe {
if !self.c_object.is_null() {
eutil::add_ref(self.c_object as *mut types::cef_base_t);
}
self.c_object
}
}
pub fn is_null_cef_object(&self) -> bool {
self.c_object.is_null()
}
pub fn is_not_null_cef_object(&self) -> bool {
!self.c_object.is_null()
}
//
// Continue the url request. If |allow| is true (1) the request will be
// continued. Otherwise, the request will be canceled.
//
pub fn cont(&self, allow: libc::c_int) -> () {
if self.c_object.is_null() {
panic!("called a CEF method on a null object")
}
unsafe {
CefWrap::to_rust(
((*self.c_object).cont.unwrap())(
self.c_object,
CefWrap::to_c(allow)))
}
}
}
impl CefWrap<*mut cef_allow_certificate_error_callback_t> for CefAllowCertificateErrorCallback {
fn to_c(rust_object: CefAllowCertificateErrorCallback) -> *mut cef_allow_certificate_error_callback_t {
rust_object.c_object_addrefed()
}
unsafe fn to_rust(c_object: *mut cef_allow_certificate_error_callback_t) -> CefAllowCertificateErrorCallback {
CefAllowCertificateErrorCallback::from_c_object_addref(c_object)
}
}
impl CefWrap<*mut cef_allow_certificate_error_callback_t> for Option<CefAllowCertificateErrorCallback> {
fn to_c(rust_object: Option<CefAllowCertificateErrorCallback>) -> *mut cef_allow_certificate_error_callback_t {
match rust_object {
None => ptr::null_mut(),
Some(rust_object) => rust_object.c_object_addrefed(),
}
}
unsafe fn to_rust(c_object: *mut cef_allow_certificate_error_callback_t) -> Option<CefAllowCertificateErrorCallback> {
if c_object.is_null() {
None
} else {
Some(CefAllowCertificateErrorCallback::from_c_object_addref(c_object))
}
}
}
//
// Implement this structure to handle events related to browser requests. The
// functions of this structure will be called on the thread indicated.
//
#[repr(C)]
pub struct _cef_request_handler_t {
//
// Base structure.
//
pub base: types::cef_base_t,
//
// Called on the UI thread before browser navigation. Return true (1) to
// cancel the navigation or false (0) to allow the navigation to proceed. The
// |request| object cannot be modified in this callback.
// cef_load_handler_t::OnLoadingStateChange will be called twice in all cases.
// If the navigation is allowed cef_load_handler_t::OnLoadStart and
// cef_load_handler_t::OnLoadEnd will be called. If the navigation is canceled
// cef_load_handler_t::OnLoadError will be called with an |errorCode| value of
// ERR_ABORTED.
//
pub on_before_browse: Option<extern "C" fn(this: *mut cef_request_handler_t,
browser: *mut interfaces::cef_browser_t,
frame: *mut interfaces::cef_frame_t,
request: *mut interfaces::cef_request_t,
is_redirect: libc::c_int) -> libc::c_int>,
//
// Called on the IO thread before a resource request is loaded. The |request|
// object may be modified. To cancel the request return true (1) otherwise
// return false (0).
//
pub on_before_resource_load: Option<extern "C" fn(
this: *mut cef_request_handler_t, browser: *mut interfaces::cef_browser_t,
frame: *mut interfaces::cef_frame_t,
request: *mut interfaces::cef_request_t) -> libc::c_int>,
//
// Called on the IO thread before a resource is loaded. To allow the resource
// to load normally return NULL. To specify a handler for the resource return
// a cef_resource_handler_t object. The |request| object should not be
// modified in this callback.
//
pub get_resource_handler: Option<extern "C" fn(
this: *mut cef_request_handler_t, browser: *mut interfaces::cef_browser_t,
frame: *mut interfaces::cef_frame_t,
request: *mut interfaces::cef_request_t) -> *mut interfaces::cef_resource_handler_t>,
//
// Called on the IO thread when a resource load is redirected. The |old_url|
// parameter will contain the old URL. The |new_url| parameter will contain
// the new URL and can be changed if desired.
//
pub on_resource_redirect: Option<extern "C" fn(
this: *mut cef_request_handler_t, browser: *mut interfaces::cef_browser_t,
frame: *mut interfaces::cef_frame_t, old_url: *const types::cef_string_t,
new_url: *mut types::cef_string_t) -> ()>,
//
// Called on the IO thread when the browser needs credentials from the user.
// |isProxy| indicates whether the host is a proxy server. |host| contains the
// hostname and |port| contains the port number. Return true (1) to continue
// the request and call cef_auth_callback_t::cont() when the authentication
// information is available. Return false (0) to cancel the request.
//
pub get_auth_credentials: Option<extern "C" fn(
this: *mut cef_request_handler_t, browser: *mut interfaces::cef_browser_t,
frame: *mut interfaces::cef_frame_t, isProxy: libc::c_int,
host: *const types::cef_string_t, port: libc::c_int,
realm: *const types::cef_string_t, scheme: *const types::cef_string_t,
callback: *mut interfaces::cef_auth_callback_t) -> libc::c_int>,
//
// Called on the IO thread when JavaScript requests a specific storage quota
// size via the webkitStorageInfo.requestQuota function. |origin_url| is the
// origin of the page making the request. |new_size| is the requested quota
// size in bytes. Return true (1) and call cef_quota_callback_t::cont() either
// in this function or at a later time to grant or deny the request. Return
// false (0) to cancel the request.
//
pub on_quota_request: Option<extern "C" fn(this: *mut cef_request_handler_t,
browser: *mut interfaces::cef_browser_t,
origin_url: *const types::cef_string_t, new_size: i64,
callback: *mut interfaces::cef_quota_callback_t) -> libc::c_int>,
//
// Called on the UI thread to handle requests for URLs with an unknown
// protocol component. Set |allow_os_execution| to true (1) to attempt
// execution via the registered OS protocol handler, if any. SECURITY WARNING:
// YOU SHOULD USE THIS METHOD TO ENFORCE RESTRICTIONS BASED ON SCHEME, HOST OR
// OTHER URL ANALYSIS BEFORE ALLOWING OS EXECUTION.
//
pub on_protocol_execution: Option<extern "C" fn(
this: *mut cef_request_handler_t, browser: *mut interfaces::cef_browser_t,
url: *const types::cef_string_t,
allow_os_execution: *mut libc::c_int) -> ()>,
//
// Called on the UI thread to handle requests for URLs with an invalid SSL
// certificate. Return true (1) and call
// cef_allow_certificate_error_callback_t:: cont() either in this function or
// at a later time to continue or cancel the request. Return false (0) to
// cancel the request immediately. If |callback| is NULL the error cannot be
// recovered from and the request will be canceled automatically. If
// CefSettings.ignore_certificate_errors is set all invalid certificates will
// be accepted without calling this function.
//
pub on_certificate_error: Option<extern "C" fn(
this: *mut cef_request_handler_t, cert_error: types::cef_errorcode_t,
request_url: *const types::cef_string_t,
callback: *mut interfaces::cef_allow_certificate_error_callback_t) -> libc::c_int>,
//
// Called on the browser process IO thread before a plugin is loaded. Return
// true (1) to block loading of the plugin.
//
pub on_before_plugin_load: Option<extern "C" fn(
this: *mut cef_request_handler_t, browser: *mut interfaces::cef_browser_t,
url: *const types::cef_string_t, policy_url: *const types::cef_string_t,
info: *mut interfaces::cef_web_plugin_info_t) -> libc::c_int>,
//
// Called on the browser process UI thread when a plugin has crashed.
// |plugin_path| is the path of the plugin that crashed.
//
pub on_plugin_crashed: Option<extern "C" fn(this: *mut cef_request_handler_t,
browser: *mut interfaces::cef_browser_t,
plugin_path: *const types::cef_string_t) -> ()>,
//
// Called on the browser process UI thread when the render process terminates
// unexpectedly. |status| indicates how the process terminated.
//
pub on_render_process_terminated: Option<extern "C" fn(
this: *mut cef_request_handler_t, browser: *mut interfaces::cef_browser_t,
status: types::cef_termination_status_t) -> ()>,
//
// The reference count. This will only be present for Rust instances!
//
pub ref_count: uint,
//
// Extra data. This will only be present for Rust instances!
//
pub extra: u8,
}
pub type cef_request_handler_t = _cef_request_handler_t;
//
// Implement this structure to handle events related to browser requests. The
// functions of this structure will be called on the thread indicated.
//
pub struct CefRequestHandler {
c_object: *mut cef_request_handler_t,
}
impl Clone for CefRequestHandler {
fn clone(&self) -> CefRequestHandler{
unsafe {
if !self.c_object.is_null() {
((*self.c_object).base.add_ref.unwrap())(&mut (*self.c_object).base);
}
CefRequestHandler {
c_object: self.c_object,
}
}
}
}
impl Drop for CefRequestHandler {
fn drop(&mut self) {
unsafe {
if !self.c_object.is_null() {
((*self.c_object).base.release.unwrap())(&mut (*self.c_object).base);
}
}
}
}
impl CefRequestHandler {
pub unsafe fn from_c_object(c_object: *mut cef_request_handler_t) -> CefRequestHandler {
CefRequestHandler {
c_object: c_object,
}
}
pub unsafe fn from_c_object_addref(c_object: *mut cef_request_handler_t) -> CefRequestHandler {
if !c_object.is_null() {
((*c_object).base.add_ref.unwrap())(&mut (*c_object).base);
}
CefRequestHandler {
c_object: c_object,
}
}
pub fn c_object(&self) -> *mut cef_request_handler_t {
self.c_object
}
pub fn c_object_addrefed(&self) -> *mut cef_request_handler_t {
unsafe {
if !self.c_object.is_null() {
eutil::add_ref(self.c_object as *mut types::cef_base_t);
}
self.c_object
}
}
pub fn is_null_cef_object(&self) -> bool {
self.c_object.is_null()
}
pub fn is_not_null_cef_object(&self) -> bool {
!self.c_object.is_null()
}
//
// Called on the UI thread before browser navigation. Return true (1) to
// cancel the navigation or false (0) to allow the navigation to proceed. The
// |request| object cannot be modified in this callback.
// cef_load_handler_t::OnLoadingStateChange will be called twice in all cases.
// If the navigation is allowed cef_load_handler_t::OnLoadStart and
// cef_load_handler_t::OnLoadEnd will be called. If the navigation is canceled
// cef_load_handler_t::OnLoadError will be called with an |errorCode| value of
// ERR_ABORTED.
//
pub fn on_before_browse(&self, browser: interfaces::CefBrowser,
frame: interfaces::CefFrame, request: interfaces::CefRequest,
is_redirect: libc::c_int) -> libc::c_int {
if self.c_object.is_null() {
panic!("called a CEF method on a null object")
}
unsafe {
CefWrap::to_rust(
((*self.c_object).on_before_browse.unwrap())(
self.c_object,
CefWrap::to_c(browser),
CefWrap::to_c(frame),
CefWrap::to_c(request),
CefWrap::to_c(is_redirect)))
}
}
//
// Called on the IO thread before a resource request is loaded. The |request|
// object may be modified. To cancel the request return true (1) otherwise
// return false (0).
//
pub fn on_before_resource_load(&self, browser: interfaces::CefBrowser,
frame: interfaces::CefFrame,
request: interfaces::CefRequest) -> libc::c_int {
if self.c_object.is_null() {
panic!("called a CEF method on a null object")
}
unsafe {
CefWrap::to_rust(
((*self.c_object).on_before_resource_load.unwrap())(
self.c_object,
CefWrap::to_c(browser),
CefWrap::to_c(frame),
CefWrap::to_c(request)))
}
}
//
// Called on the IO thread before a resource is loaded. To allow the resource
// to load normally return NULL. To specify a handler for the resource return
// a cef_resource_handler_t object. The |request| object should not be
// modified in this callback.
//<|fim▁hole|> panic!("called a CEF method on a null object")
}
unsafe {
CefWrap::to_rust(
((*self.c_object).get_resource_handler.unwrap())(
self.c_object,
CefWrap::to_c(browser),
CefWrap::to_c(frame),
CefWrap::to_c(request)))
}
}
//
// Called on the IO thread when a resource load is redirected. The |old_url|
// parameter will contain the old URL. The |new_url| parameter will contain
// the new URL and can be changed if desired.
//
pub fn on_resource_redirect(&self, browser: interfaces::CefBrowser,
frame: interfaces::CefFrame, old_url: &[u16],
new_url: *mut types::cef_string_t) -> () {
if self.c_object.is_null() {
panic!("called a CEF method on a null object")
}
unsafe {
CefWrap::to_rust(
((*self.c_object).on_resource_redirect.unwrap())(
self.c_object,
CefWrap::to_c(browser),
CefWrap::to_c(frame),
CefWrap::to_c(old_url),
CefWrap::to_c(new_url)))
}
}
//
// Called on the IO thread when the browser needs credentials from the user.
// |isProxy| indicates whether the host is a proxy server. |host| contains the
// hostname and |port| contains the port number. Return true (1) to continue
// the request and call cef_auth_callback_t::cont() when the authentication
// information is available. Return false (0) to cancel the request.
//
pub fn get_auth_credentials(&self, browser: interfaces::CefBrowser,
frame: interfaces::CefFrame, isProxy: libc::c_int, host: &[u16],
port: libc::c_int, realm: &[u16], scheme: &[u16],
callback: interfaces::CefAuthCallback) -> libc::c_int {
if self.c_object.is_null() {
panic!("called a CEF method on a null object")
}
unsafe {
CefWrap::to_rust(
((*self.c_object).get_auth_credentials.unwrap())(
self.c_object,
CefWrap::to_c(browser),
CefWrap::to_c(frame),
CefWrap::to_c(isProxy),
CefWrap::to_c(host),
CefWrap::to_c(port),
CefWrap::to_c(realm),
CefWrap::to_c(scheme),
CefWrap::to_c(callback)))
}
}
//
// Called on the IO thread when JavaScript requests a specific storage quota
// size via the webkitStorageInfo.requestQuota function. |origin_url| is the
// origin of the page making the request. |new_size| is the requested quota
// size in bytes. Return true (1) and call cef_quota_callback_t::cont() either
// in this function or at a later time to grant or deny the request. Return
// false (0) to cancel the request.
//
pub fn on_quota_request(&self, browser: interfaces::CefBrowser,
origin_url: &[u16], new_size: i64,
callback: interfaces::CefQuotaCallback) -> libc::c_int {
if self.c_object.is_null() {
panic!("called a CEF method on a null object")
}
unsafe {
CefWrap::to_rust(
((*self.c_object).on_quota_request.unwrap())(
self.c_object,
CefWrap::to_c(browser),
CefWrap::to_c(origin_url),
CefWrap::to_c(new_size),
CefWrap::to_c(callback)))
}
}
//
// Called on the UI thread to handle requests for URLs with an unknown
// protocol component. Set |allow_os_execution| to true (1) to attempt
// execution via the registered OS protocol handler, if any. SECURITY WARNING:
// YOU SHOULD USE THIS METHOD TO ENFORCE RESTRICTIONS BASED ON SCHEME, HOST OR
// OTHER URL ANALYSIS BEFORE ALLOWING OS EXECUTION.
//
pub fn on_protocol_execution(&self, browser: interfaces::CefBrowser,
url: &[u16], allow_os_execution: &mut libc::c_int) -> () {
if self.c_object.is_null() {
panic!("called a CEF method on a null object")
}
unsafe {
CefWrap::to_rust(
((*self.c_object).on_protocol_execution.unwrap())(
self.c_object,
CefWrap::to_c(browser),
CefWrap::to_c(url),
CefWrap::to_c(allow_os_execution)))
}
}
//
// Called on the UI thread to handle requests for URLs with an invalid SSL
// certificate. Return true (1) and call
// cef_allow_certificate_error_callback_t:: cont() either in this function or
// at a later time to continue or cancel the request. Return false (0) to
// cancel the request immediately. If |callback| is NULL the error cannot be
// recovered from and the request will be canceled automatically. If
// CefSettings.ignore_certificate_errors is set all invalid certificates will
// be accepted without calling this function.
//
pub fn on_certificate_error(&self, cert_error: types::cef_errorcode_t,
request_url: &[u16],
callback: interfaces::CefAllowCertificateErrorCallback) -> libc::c_int {
if self.c_object.is_null() {
panic!("called a CEF method on a null object")
}
unsafe {
CefWrap::to_rust(
((*self.c_object).on_certificate_error.unwrap())(
self.c_object,
CefWrap::to_c(cert_error),
CefWrap::to_c(request_url),
CefWrap::to_c(callback)))
}
}
//
// Called on the browser process IO thread before a plugin is loaded. Return
// true (1) to block loading of the plugin.
//
pub fn on_before_plugin_load(&self, browser: interfaces::CefBrowser,
url: &[u16], policy_url: &[u16],
info: interfaces::CefWebPluginInfo) -> libc::c_int {
if self.c_object.is_null() {
panic!("called a CEF method on a null object")
}
unsafe {
CefWrap::to_rust(
((*self.c_object).on_before_plugin_load.unwrap())(
self.c_object,
CefWrap::to_c(browser),
CefWrap::to_c(url),
CefWrap::to_c(policy_url),
CefWrap::to_c(info)))
}
}
//
// Called on the browser process UI thread when a plugin has crashed.
// |plugin_path| is the path of the plugin that crashed.
//
pub fn on_plugin_crashed(&self, browser: interfaces::CefBrowser,
plugin_path: &[u16]) -> () {
if self.c_object.is_null() {
panic!("called a CEF method on a null object")
}
unsafe {
CefWrap::to_rust(
((*self.c_object).on_plugin_crashed.unwrap())(
self.c_object,
CefWrap::to_c(browser),
CefWrap::to_c(plugin_path)))
}
}
//
// Called on the browser process UI thread when the render process terminates
// unexpectedly. |status| indicates how the process terminated.
//
pub fn on_render_process_terminated(&self, browser: interfaces::CefBrowser,
status: types::cef_termination_status_t) -> () {
if self.c_object.is_null() {
panic!("called a CEF method on a null object")
}
unsafe {
CefWrap::to_rust(
((*self.c_object).on_render_process_terminated.unwrap())(
self.c_object,
CefWrap::to_c(browser),
CefWrap::to_c(status)))
}
}
}
impl CefWrap<*mut cef_request_handler_t> for CefRequestHandler {
fn to_c(rust_object: CefRequestHandler) -> *mut cef_request_handler_t {
rust_object.c_object_addrefed()
}
unsafe fn to_rust(c_object: *mut cef_request_handler_t) -> CefRequestHandler {
CefRequestHandler::from_c_object_addref(c_object)
}
}
impl CefWrap<*mut cef_request_handler_t> for Option<CefRequestHandler> {
fn to_c(rust_object: Option<CefRequestHandler>) -> *mut cef_request_handler_t {
match rust_object {
None => ptr::null_mut(),
Some(rust_object) => rust_object.c_object_addrefed(),
}
}
unsafe fn to_rust(c_object: *mut cef_request_handler_t) -> Option<CefRequestHandler> {
if c_object.is_null() {
None
} else {
Some(CefRequestHandler::from_c_object_addref(c_object))
}
}
}<|fim▁end|> | pub fn get_resource_handler(&self, browser: interfaces::CefBrowser,
frame: interfaces::CefFrame,
request: interfaces::CefRequest) -> interfaces::CefResourceHandler {
if self.c_object.is_null() { |
<|file_name|>ActionGoto.java<|end_file_name|><|fim▁begin|>package me.mrdaniel.npcs.actions;
import javax.annotation.Nonnull;
import org.spongepowered.api.entity.living.player.Player;
import org.spongepowered.api.text.Text;
import org.spongepowered.api.text.action.TextActions;
import org.spongepowered.api.text.format.TextColors;
import me.mrdaniel.npcs.catalogtypes.actiontype.ActionTypes;
import me.mrdaniel.npcs.io.NPCFile;
import me.mrdaniel.npcs.managers.ActionResult;
import ninja.leaping.configurate.ConfigurationNode;
public class ActionGoto extends Action {
private int next;
public ActionGoto(@Nonnull final ConfigurationNode node) { this(node.getNode("Next").getInt(0)); }
public ActionGoto(final int next) {
super(ActionTypes.GOTO);
this.next = next;
}
public void setNext(final int next) { this.next = next; }
@Override
public void execute(final Player p, final NPCFile file, final ActionResult result) {
result.setNextAction(this.next);
}
@Override
public void serializeValue(final ConfigurationNode node) {
node.getNode("Next").setValue(this.next);
}
@Override
public Text getLine(final int index) {<|fim▁hole|> Text.builder().append(Text.of(TextColors.AQUA, this.next))
.onHover(TextActions.showText(Text.of(TextColors.YELLOW, "Change")))
.onClick(TextActions.suggestCommand("/npc action edit " + index + " goto <goto>")).build()).build();
}
}<|fim▁end|> | return Text.builder().append(Text.of(TextColors.GOLD, "Goto: "), |
<|file_name|>test_content_type.py<|end_file_name|><|fim▁begin|># Copyright (c) 2008, 2012 testtools developers. See LICENSE for details.
from testtools import TestCase
from testtools.matchers import Equals, MatchesException, Raises
from testtools.content_type import (
ContentType,
JSON,
UTF8_TEXT,
)
class TestContentType(TestCase):
def test___init___None_errors(self):
raises_value_error = Raises(MatchesException(ValueError))
self.assertThat(lambda:ContentType(None, None), raises_value_error)
self.assertThat(lambda:ContentType(None, "traceback"),
raises_value_error)
self.assertThat(lambda:ContentType("text", None), raises_value_error)
def test___init___sets_ivars(self):
content_type = ContentType("foo", "bar")
self.assertEqual("foo", content_type.type)
self.assertEqual("bar", content_type.subtype)
self.assertEqual({}, content_type.parameters)
def test___init___with_parameters(self):
content_type = ContentType("foo", "bar", {"quux": "thing"})
self.assertEqual({"quux": "thing"}, content_type.parameters)
def test___eq__(self):
content_type1 = ContentType("foo", "bar", {"quux": "thing"})
content_type2 = ContentType("foo", "bar", {"quux": "thing"})
content_type3 = ContentType("foo", "bar", {"quux": "thing2"})
self.assertTrue(content_type1.__eq__(content_type2))
self.assertFalse(content_type1.__eq__(content_type3))
def test_basic_repr(self):
content_type = ContentType('text', 'plain')
self.assertThat(repr(content_type), Equals('text/plain'))
def test_extended_repr(self):
content_type = ContentType(
'text', 'plain', {'foo': 'bar', 'baz': 'qux'})
self.assertThat(
repr(content_type), Equals('text/plain; baz="qux"; foo="bar"'))
class TestBuiltinContentTypes(TestCase):
def test_plain_text(self):
# The UTF8_TEXT content type represents UTF-8 encoded text/plain.
self.assertThat(UTF8_TEXT.type, Equals('text'))
self.assertThat(UTF8_TEXT.subtype, Equals('plain'))
self.assertThat(UTF8_TEXT.parameters, Equals({'charset': 'utf8'}))
def test_json_content(self):
# The JSON content type represents implictly UTF-8 application/json.
self.assertThat(JSON.type, Equals('application'))
self.assertThat(JSON.subtype, Equals('json'))
self.assertThat(JSON.parameters, Equals({}))<|fim▁hole|>
def test_suite():
from unittest import TestLoader
return TestLoader().loadTestsFromName(__name__)<|fim▁end|> | |
<|file_name|>Gate AND.py<|end_file_name|><|fim▁begin|>#-------------------------------------------------------------------------------
# PROJECT: VHDL Code Generator
# NAME: Dynamic AND Gate
#
# LICENSE: GNU-GPL V3
#-------------------------------------------------------------------------------
__isBlock__ = True
__className__ = "ANDGate"
__win__ = "ANDGateWindow"
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from PyQt4 import uic
from lib.Block import *
class ANDGate(Block):
""" AND Gate
PORTS SPECIFICATIONS
"""
# TODO: Specifications of AND Gate (Documentation)
def __init__(self,system,numInput,sizeInput):
"""
:param name:
:param numInput: Number of input
:param size: Size of each input
:param system:
"""
self.numInput = numInput
self.name = "AND_GATE"
self.sizeInput = sizeInput
input_vector = [sizeInput]*self.numInput
output_vector = [sizeInput]
super().__init__(input_vector,output_vector,system,self.name)
def generate(self):
filetext = ""
if self.getOutputSignalSize(0) == 1:
filetext += "%s <= %s"%(self.getOutputSignalName(0),self.getInputSignalName(0))
for i in range(1,self.numInput):
filetext += " and %s"%(self.getInputSignalName(i))
else:
filetext += "%s <= "%self.getOutputSignalName(0)
for i in range (self.sizeInput):
filetext += "%s[%d]"%(self.getInputSignalName(0),self.sizeInput-i-1)
for j in range(1,self.numInput):
filetext += " and %s[%d]"%(self.getInputSignalName(j),self.sizeInput-i-1)
if i != self.sizeInput - 1:
filetext += " & "
filetext += ";\n"
return filetext
class ANDGateWindow(QWidget):
accept = pyqtSignal(list)
def __init__(self,parent = None):<|fim▁hole|> self.ui.acceptButton.clicked.connect(self.accepted)
self.ui.setWindowTitle("AND GATE")
def accepted(self):
numInput = self.ui.numInput.value()
sizeInput = self.ui.sizeInput.value()
self.accept.emit([numInput,sizeInput])
self.close()<|fim▁end|> | super().__init__()
self.ui = uic.loadUi("blocks\\Standard Library\\Gate.ui",self) |
<|file_name|>movies.publications.js<|end_file_name|><|fim▁begin|><|fim▁hole|> this.autorun(autorun);
return;
function validate() {
new SimpleSchema({
movieId: ML.fields.id
}).validate({ movieId });
}
function autorun(computation) {
if (!this.userId) {
return this.ready();
} else {
return Movies.find({ _id: movieId }, { fields: Movies.publicFields });
}
}
}<|fim▁end|> | Meteor.publish('movieDetails.user', publishUserMovieDetails);
function publishUserMovieDetails({ movieId }) {
validate(); |
<|file_name|>tango_fe.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# This file is part of the bliss project
#
# Copyright (c) 2016 Beamline Control Unit, ESRF<|fim▁hole|>
class tango_fe(TangoAttrCounter):
def __init__(self, name, config):
TangoAttrCounter.__init__(self, name, config)<|fim▁end|> | # Distributed under the GNU LGPLv3. See LICENSE for more info.
from bliss.controllers.tango_attr_as_counter import TangoAttrCounter |
<|file_name|>jsb_cocos2d_extension.js<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2013-2014 Chukong Technologies Inc.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
/**
* @type {Object}
* @name jsb.AssetsManager
* jsb.AssetsManager is the native AssetsManager for your game resources or scripts.
* please refer to this document to know how to use it: http://www.cocos2d-x.org/docs/manual/framework/html5/v3/assets-manager/en
* Only available in JSB
*/
jsb.AssetsManager = cc.AssetsManager;
delete cc.AssetsManager;
/**
* @type {Object}
* @name jsb.EventListenerAssetsManager
* jsb.EventListenerAssetsManager is the native event listener for AssetsManager.
* please refer to this document to know how to use it: http://www.cocos2d-x.org/docs/manual/framework/html5/v3/assets-manager/en
* Only available in JSB
*/
jsb.EventListenerAssetsManager = cc.EventListenerAssetsManager;
delete cc.EventListenerAssetsManager;
/**
* @type {Object}
* @name jsb.EventAssetsManager
* jsb.EventAssetsManager is the native event for AssetsManager.
* please refer to this document to know how to use it: http://www.cocos2d-x.org/docs/manual/framework/html5/v3/assets-manager/en
* Only available in JSB
*/
jsb.EventAssetsManager = cc.EventAssetsManager;
delete cc.EventAssetsManager;
// move from jsb_cocos2d
//start------------------------------
cc.ControlButton.extend = cc.Class.extend;
cc.ControlColourPicker.extend = cc.Class.extend;
cc.ControlPotentiometer.extend = cc.Class.extend;
cc.ControlSlider.extend = cc.Class.extend;
cc.ControlStepper.extend = cc.Class.extend;
cc.ControlSwitch.extend = cc.Class.extend;
//end------------------------------
//
// cocos2d constants
//
// This helper file should be required after jsb_cocos2d.js<|fim▁hole|>//
var cc = cc || {};
cc.SCROLLVIEW_DIRECTION_NONE = -1;
cc.SCROLLVIEW_DIRECTION_HORIZONTAL = 0;
cc.SCROLLVIEW_DIRECTION_VERTICAL = 1;
cc.SCROLLVIEW_DIRECTION_BOTH = 2;
cc.TABLEVIEW_FILL_TOPDOWN = 0;
cc.TABLEVIEW_FILL_BOTTOMUP = 1;
/**
* @constant
* @type Number
*/
cc.KEYBOARD_RETURNTYPE_DEFAULT = 0;
/**
* @constant
* @type Number
*/
cc.KEYBOARD_RETURNTYPE_DONE = 1;
/**
* @constant
* @type Number
*/
cc.KEYBOARD_RETURNTYPE_SEND = 2;
/**
* @constant
* @type Number
*/
cc.KEYBOARD_RETURNTYPE_SEARCH = 3;
/**
* @constant
* @type Number
*/
cc.KEYBOARD_RETURNTYPE_GO = 4;
/**
* The EditBox::InputMode defines the type of text that the user is allowed * to enter.
* @constant
* @type Number
*/
cc.EDITBOX_INPUT_MODE_ANY = 0;
/**
* The user is allowed to enter an e-mail address.
* @constant
* @type Number
*/
cc.EDITBOX_INPUT_MODE_EMAILADDR = 1;
/**
* The user is allowed to enter an integer value.
* @constant
* @type Number
*/
cc.EDITBOX_INPUT_MODE_NUMERIC = 2;
/**
* The user is allowed to enter a phone number.
* @constant
* @type Number
*/
cc.EDITBOX_INPUT_MODE_PHONENUMBER = 3;
/**
* The user is allowed to enter a URL.
* @constant
* @type Number
*/
cc.EDITBOX_INPUT_MODE_URL = 4;
/**
* The user is allowed to enter a real number value.
* This extends kEditBoxInputModeNumeric by allowing a decimal point.
* @constant
* @type Number
*/
cc.EDITBOX_INPUT_MODE_DECIMAL = 5;
/**
* The user is allowed to enter any text, except for line breaks.
* @constant
* @type Number
*/
cc.EDITBOX_INPUT_MODE_SINGLELINE = 6;
/**
* Indicates that the text entered is confidential data that should be
* obscured whenever possible. This implies EDIT_BOX_INPUT_FLAG_SENSITIVE.
* @constant
* @type Number
*/
cc.EDITBOX_INPUT_FLAG_PASSWORD = 0;
/**
* Indicates that the text entered is sensitive data that the
* implementation must never store into a dictionary or table for use
* in predictive, auto-completing, or other accelerated input schemes.
* A credit card number is an example of sensitive data.
* @constant
* @type Number
*/
cc.EDITBOX_INPUT_FLAG_SENSITIVE = 1;
/**
* This flag is a hint to the implementation that during text editing,
* the initial letter of each word should be capitalized.
* @constant
* @type Number
*/
cc.EDITBOX_INPUT_FLAG_INITIAL_CAPS_WORD = 2;
/**
* This flag is a hint to the implementation that during text editing,
* the initial letter of each sentence should be capitalized.
* @constant
* @type Number
*/
cc.EDITBOX_INPUT_FLAG_INITIAL_CAPS_SENTENCE = 3;
/**
* Capitalize all characters automatically.
* @constant
* @type Number
*/
cc.EDITBOX_INPUT_FLAG_INITIAL_CAPS_ALL_CHARACTERS = 4;
cc.CONTROL_EVENT_TOTAL_NUMBER = 9;
cc.CONTROL_EVENT_TOUCH_DOWN = 1 << 0; // A touch-down event in the control.
cc.CONTROL_EVENT_TOUCH_DRAG_INSIDE = 1 << 1; // An event where a finger is dragged inside the bounds of the control.
cc.CONTROL_EVENT_TOUCH_DRAG_OUTSIDE = 1 << 2; // An event where a finger is dragged just outside the bounds of the control.
cc.CONTROL_EVENT_TOUCH_DRAG_ENTER = 1 << 3; // An event where a finger is dragged into the bounds of the control.
cc.CONTROL_EVENT_TOUCH_DRAG_EXIT = 1 << 4; // An event where a finger is dragged from within a control to outside its bounds.
cc.CONTROL_EVENT_TOUCH_UP_INSIDE = 1 << 5; // A touch-up event in the control where the finger is inside the bounds of the control.
cc.CONTROL_EVENT_TOUCH_UP_OUTSIDE = 1 << 6; // A touch-up event in the control where the finger is outside the bounds of the control.
cc.CONTROL_EVENT_TOUCH_CANCEL = 1 << 7; // A system event canceling the current touches for the control.
cc.CONTROL_EVENT_VALUECHANGED = 1 << 8; // A touch dragging or otherwise manipulating a control; causing it to emit a series of different values.
cc.CONTROL_STATE_NORMAL = 1 << 0; // The normal; or default state of a control梩hat is; enabled but neither selected nor highlighted.
cc.CONTROL_STATE_HIGHLIGHTED = 1 << 1; // Highlighted state of a control. A control enters this state when a touch down; drag inside or drag enter is performed. You can retrieve and set this value through the highlighted property.
cc.CONTROL_STATE_DISABLED = 1 << 2; // Disabled state of a control. This state indicates that the control is currently disabled. You can retrieve and set this value through the enabled property.
cc.CONTROL_STATE_SELECTED = 1 << 3; // Selected state of a control. This state indicates that the control is currently selected. You can retrieve and set this value through the selected property.
cc.CONTROL_STATE_INITIAL = 1 << 3;
cc.CONTROL_ZOOM_ACTION_TAG = 0xCCCB0001; //CCControlButton.js
cc.CONTROL_STEPPER_PARTMINUS = 0; //CCControlStepper.js
cc.CONTROL_STEPPER_PARTPLUS = 1;
cc.CONTROL_STEPPER_PARTNONE = 2;
cc.CONTROL_STEPPER_LABELCOLOR_ENABLED = cc.color(55, 55, 55);
cc.CONTROL_STEPPER_LABELCOLOR_DISABLED = cc.color(147, 147, 147);
cc.CONTROL_STEPPER_LABELFONT = "CourierNewPSMT";
cc.AUTOREPEAT_DELTATIME = 0.15;
cc.AUTOREPEAT_INCREASETIME_INCREMENT = 12;
jsb.EventAssetsManager.ERROR_NO_LOCAL_MANIFEST = 0;
jsb.EventAssetsManager.ERROR_DOWNLOAD_MANIFEST = 1;
jsb.EventAssetsManager.ERROR_PARSE_MANIFEST = 2;
jsb.EventAssetsManager.NEW_VERSION_FOUND = 3;
jsb.EventAssetsManager.ALREADY_UP_TO_DATE = 4;
jsb.EventAssetsManager.UPDATE_PROGRESSION = 5;
jsb.EventAssetsManager.ASSET_UPDATED = 6;
jsb.EventAssetsManager.ERROR_UPDATING = 7;
jsb.EventAssetsManager.UPDATE_FINISHED = 8;
jsb.EventAssetsManager.UPDATE_FAILED = 9;
jsb.EventAssetsManager.ERROR_DECOMPRESS = 10;
cc.ScrollView.extend = cc.Class.extend;
cc.TableView.extend = cc.Class.extend;
cc.TableViewCell.extend = cc.Class.extend;<|fim▁end|> | |
<|file_name|>_hoverlabel.py<|end_file_name|><|fim▁begin|>import _plotly_utils.basevalidators
class HoverlabelValidator(_plotly_utils.basevalidators.CompoundValidator):
def __init__(self, plotly_name="hoverlabel", parent_name="sankey", **kwargs):
super(HoverlabelValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
data_class_str=kwargs.pop("data_class_str", "Hoverlabel"),
data_docs=kwargs.pop(
"data_docs",
"""
align<|fim▁hole|> only if the hover label text spans more two or
more lines
alignsrc
Sets the source reference on Chart Studio Cloud
for align .
bgcolor
Sets the background color of the hover labels
for this trace
bgcolorsrc
Sets the source reference on Chart Studio Cloud
for bgcolor .
bordercolor
Sets the border color of the hover labels for
this trace.
bordercolorsrc
Sets the source reference on Chart Studio Cloud
for bordercolor .
font
Sets the font used in hover labels.
namelength
Sets the default length (in number of
characters) of the trace name in the hover
labels for all traces. -1 shows the whole name
regardless of length. 0-3 shows the first 0-3
characters, and an integer >3 will show the
whole name if it is less than that many
characters, but if it is longer, will truncate
to `namelength - 3` characters and add an
ellipsis.
namelengthsrc
Sets the source reference on Chart Studio Cloud
for namelength .
""",
),
**kwargs
)<|fim▁end|> | Sets the horizontal alignment of the text
content within hover label box. Has an effect |
<|file_name|>force_https.py<|end_file_name|><|fim▁begin|>"""An example of using a middleware to require HTTPS connections.
requires https://github.com/falconry/falcon-require-https to be installed via
pip install falcon-require-https
"""
import hug
from falcon_require_https import RequireHTTPS
hug.API(__name__).http.add_middleware(RequireHTTPS())<|fim▁hole|>@hug.get()
def my_endpoint():
return "Success!"<|fim▁end|> | |
<|file_name|>walknote.js<|end_file_name|><|fim▁begin|>/**
* React Static Boilerplate
* https://github.com/koistya/react-static-boilerplate
* Copyright (c) Konstantin Tarkus (@koistya) | MIT license
*/
import React, { Component } from 'react'
import { Col, Row } from 'react-bootstrap'
import WorkPageLayout from '../../components/work-page-layout'
import './index.scss'
export default class WorkPage extends Component {
static title = 'walknote'
static image = '/works/walknote_eyecatch.png'
static description = 'Free music discovery player for iOS'
render() {
return (
<WorkPageLayout
title={WorkPage.title}
eyecatch="/works/walknote_eyecatch.png"
>
<Row>
<Col sm={6}>
<div>
<img
src="/works/walknote_01.png"
className="image-screenshot"
alt="image screenshot"
/>
</div>
</Col>
<Col sm={6}>
<div>
<img
src="/works/walknote_02.png"
className="image-screenshot"
alt="image screenshot"
/>
</div>
</Col>
</Row>
<div>
<img
src="/works/walknote_05.png"
className="image-screenshot"
alt="image screenshot"
/>
</div>
<div>
<img
src="/works/walknote_03.png"
className="image-screenshot"
alt="image screenshot"
/>
</div>
<div>
<img
src="/works/walknote_04.png"
className="image-screenshot"
alt="image screenshot"
/>
</div>
<h3>walknote (2011-2016)</h3>
<div className="work-description">
<div>好みを理解して推薦する無料で聴き放題な音楽プレーヤー</div>
<div>Free music discovery player for iOS</div>
</div>
<div className="work-long-description">
<p>
13万人超が使う音楽アプリ。
あなたのiPhoneに入っている曲から好みを理解して、新しい曲を提示。
まるでラジオのように推薦曲を聴いて楽しめる!
</p>
<p>
※本サービスは終了しました。 詳細は
<a href="http://blog.odoruinu.net/2016/09/06/farewell-from-walknote/">
こちら
</a>
。
</p>
</div>
<div className="work-long-description">
<p>
walknote recommends new music you may like based on your music
preferences by recognizing your favorite songs stored in your
device. You can listen to recommended music just like a radio!
</p>
<p>
This service has been closed. Thank you for over 130,000 registered
users!
</p>
</div>
<h3>掲載実績</h3>
<div className="work-description">
<ul>
<li>
<a
href="http://renewal49.hateblo.jp/entry/20120710/1341925681"
target="_blank"
rel="noopener noreferrer"
>
強力すぎて紹介しそびれていた音楽好きのための神アプリ『walknote』
- リニューアル式様
</a>
</li>
<li>
<a
href="http://www.appbank.net/2011/10/15/iphone-application/309349.php"
target="_blank"<|fim▁hole|> - appbank様
</a>
</li>
<li>
<a
href="http://www.danshihack.com/2012/07/18/junp/iphoneapp-walknote.html"
target="_blank"
rel="noopener noreferrer"
>
おすすめの音楽をレコメンド!ストリーミング再生してくれるiPhoneアプリ「walknote」が素敵。
- 男子ハック様
</a>
</li>
<li>
<a
href="http://www.tabroid.jp/app/multimedia/2013/05/app.walknote.html"
target="_blank"
rel="noopener noreferrer"
>
「YOU、これ聴いちゃいなよ」自分好みの曲が勝手に集まる音楽プレーヤー『walknote』
- タブロイド様
</a>
</li>
<li>その他、多数</li>
</ul>
</div>
</WorkPageLayout>
)
}
}<|fim▁end|> | rel="noopener noreferrer"
>
walknote:
CD屋の試聴機が、自分向けになって手元に到着。そんな曲探しアプリ。無料。 |
<|file_name|>base.py<|end_file_name|><|fim▁begin|>"""
Django settings for tiny_hands_pac project.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
import os
from os.path import abspath, dirname, join, normpath
from sys import path
from django.core.exceptions import ImproperlyConfigured
def get_env_variable(var_name):
""" Get the environment variable or return exception """
try:
return os.environ[var_name]
except KeyError:
error_msg = "Set the %s environment variable" % var_name
raise ImproperlyConfigured(error_msg)
# Absolute filesystem path to the Django project directory:
DJANGO_ROOT = dirname(dirname(abspath(__file__)))
# Absolute filesystem path to the top-level project folder:
PROJECT_ROOT = dirname(DJANGO_ROOT)
# Add our project to our pythonpath, this way we don't need to type our project
# name in our dotted import paths:
path.append(DJANGO_ROOT)
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# Do not set SECRET_KEY or LDAP password or any other sensitive data here.
# Instead, create a local.py file on the server.
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'compressor',
'taggit',
'modelcluster',
'wagtail.contrib.wagtailsitemaps',
'wagtail.contrib.wagtailsearchpromotions',
'wagtail.wagtailforms',
'wagtail.wagtailredirects',
'wagtail.wagtailembeds',
'wagtail.wagtailsites',
'wagtail.wagtailusers',
'wagtail.wagtailsnippets',
'wagtail.wagtaildocs',
'wagtail.wagtailimages',
'wagtail.wagtailsearch',
'wagtail.wagtailadmin',
'wagtail.wagtailcore',
'wagtail.contrib.settings',
'wagtailfontawesome',
'utils',
'pages',
'blog',
'events',
'contact',
'people',
'photo_gallery',
'products',
'documents_gallery',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
'wagtail.wagtailcore.middleware.SiteMiddleware',
'wagtail.wagtailredirects.middleware.RedirectMiddleware',
)
ROOT_URLCONF = 'tiny_hands_pac.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'debug' : DEBUG,
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'pages.context_processors.site_url',
],
},
},
]
WSGI_APPLICATION = 'tiny_hands_pac.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'tiny_hands_pac',
'USER': '',
'HOST': '', # Set to empty string for localhost.
'PORT': '', # Set to empty string for default.
'CONN_MAX_AGE': 600,
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en-gb'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_ROOT = join(PROJECT_ROOT, 'static')
STATIC_URL = '/static/'
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
'compressor.finders.CompressorFinder',
)
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
MEDIA_ROOT = join(PROJECT_ROOT, 'media')
MEDIA_URL = '/files/'
# Django compressor settings
# http://django-compressor.readthedocs.org/en/latest/settings/
COMPRESS_PRECOMPILERS = (
('text/x-scss', 'django_libsass.SassCompiler'),
)
COMPRESS_OFFLINE = True
# Feeds app for Wagtail CMS
FEED_APP_LABEL = 'blog'
FEED_MODEL_NAME = 'BlogPage'
FEED_ITEM_DESCRIPTION_FIELD = 'intro'
FEED_ITEM_CONTENT_FIELD = 'body'
FEED_TITLE = 'Tiny Hands Big News'
FEED_LINK = '/news/'
FEED_DESCRIPTION = ""
FEED_AUTHOR_EMAIL = '[email protected]'
FEED_AUTHOR_LINK = 'https://www.donaldtrumphastinyhands.com'
# Settings for wagalytics
GA_KEY_FILEPATH = ''
GA_VIEW_ID = ''
# Google Maps Key
GOOGLE_MAPS_KEY = ''
DYNAMIC_MAP_URL = ''
STATIC_MAP_URL = ''
# Facebook Open Tags
FB_SITE_NAME = ''
FB_URL = ''
FB_DESCRIPTION = ''
FB_APP_ID = ''
# Twitter Cards
TWITTER_URL = ''
TWITTER_CREATOR = ''
TWITTER_DESCRIPTION = ''
# Use Redis as the cache backend for extra performance
# CACHES = {
# 'default': {
# 'BACKEND': 'redis_cache.cache.RedisCache',
# 'LOCATION': '127.0.0.1:6379',
# 'KEY_PREFIX': 'tiny_hands_pac',
# 'OPTIONS': {
# 'CLIENT_CLASS': 'redis_cache.client.DefaultClient',
# }
# }
# }
# Wagtail settings
LOGIN_URL = 'wagtailadmin_login'
LOGIN_REDIRECT_URL = 'wagtailadmin_home'
WAGTAIL_SITE_NAME = "Tiny Hands PAC"
<|fim▁hole|># Use Elasticsearch as the search backend for extra performance and better search results
# WAGTAILSEARCH_BACKENDS = {
# 'default': {
# 'BACKEND': 'wagtail.wagtailsearch.backends.elasticsearch.ElasticSearch',
# 'INDEX': 'tiny_hands_pac',
# },
# }
# Celery settings
# When you have multiple sites using the same Redis server,
# specify a different Redis DB. e.g. redis://localhost/5
BROKER_URL = 'redis://'
CELERY_SEND_TASK_ERROR_EMAILS = True
CELERYD_LOG_COLOR = False<|fim▁end|> | WAGTAILSEARCH_RESULTS_TEMPLATE = 'utils/tags/search/search_results.html'
|
<|file_name|>manticore_protocol_cerberus_DeviceUptime__req_from_wire.rs<|end_file_name|><|fim▁begin|>// Copyright lowRISC contributors.
// Licensed under the Apache License, Version 2.0, see LICENSE for details.
// SPDX-License-Identifier: Apache-2.0
// !! DO NOT EDIT !!
// To regenerate this file, run `fuzz/generate_proto_tests.py`.
#![no_main]
#![allow(non_snake_case)]
use libfuzzer_sys::fuzz_target;
use manticore::mem::BumpArena;
use manticore::protocol::Command;
use manticore::protocol::wire::FromWire;<|fim▁hole|>fuzz_target!(|data: &[u8]| {
let mut arena = vec![0; data.len()];
let arena = BumpArena::new(&mut arena);
let mut data = data;
let _ = <C as Command<'_>>::Req::from_wire(&mut data, &arena);
});<|fim▁end|> |
use manticore::protocol::cerberus::DeviceUptime as C;
|
<|file_name|>route_network_test.go<|end_file_name|><|fim▁begin|>// Copyright 2017 flannel authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// +build !windows
package backend
import (
"net"
"testing"
"github.com/coreos/flannel/pkg/ip"
"github.com/coreos/flannel/pkg/ns"
"github.com/coreos/flannel/subnet"
"github.com/vishvananda/netlink"
)
func TestRouteCache(t *testing.T) {
teardown := ns.SetUpNetlinkTest(t)
defer teardown()
lo, err := netlink.LinkByName("lo")
if err != nil {
t.Fatal(err)
}
if err := netlink.AddrAdd(lo, &netlink.Addr{IPNet: &net.IPNet{IP: net.ParseIP("127.0.0.1"), Mask: net.CIDRMask(32, 32)}}); err != nil {
t.Fatal(err)
}
if err := netlink.LinkSetUp(lo); err != nil {
t.Fatal(err)
}
nw := RouteNetwork{
SimpleNetwork: SimpleNetwork{
ExtIface: &ExternalInterface{Iface: &net.Interface{Index: lo.Attrs().Index}},
},
BackendType: "host-gw",
LinkIndex: lo.Attrs().Index,
}
nw.GetRoute = func(lease *subnet.Lease) *netlink.Route {
return &netlink.Route{
Dst: lease.Subnet.ToIPNet(),
Gw: lease.Attrs.PublicIP.ToIP(),
LinkIndex: nw.LinkIndex,
}
}
gw1, gw2 := ip.FromIP(net.ParseIP("127.0.0.1")), ip.FromIP(net.ParseIP("127.0.0.2"))
subnet1 := ip.IP4Net{IP: ip.FromIP(net.ParseIP("192.168.0.0")), PrefixLen: 24}
nw.handleSubnetEvents([]subnet.Event{
{Type: subnet.EventAdded, Lease: subnet.Lease{
Subnet: subnet1, Attrs: subnet.LeaseAttrs{PublicIP: gw1, BackendType: "host-gw"}}},
})
if len(nw.routes) != 1 {
t.Fatal(nw.routes)
}
if !routeEqual(nw.routes[0], netlink.Route{Dst: subnet1.ToIPNet(), Gw: gw1.ToIP(), LinkIndex: lo.Attrs().Index}) {<|fim▁hole|> {Type: subnet.EventAdded, Lease: subnet.Lease{
Subnet: subnet1, Attrs: subnet.LeaseAttrs{PublicIP: gw2, BackendType: "host-gw"}}}})
if len(nw.routes) != 1 {
t.Fatal(nw.routes)
}
if !routeEqual(nw.routes[0], netlink.Route{Dst: subnet1.ToIPNet(), Gw: gw2.ToIP(), LinkIndex: lo.Attrs().Index}) {
t.Fatal(nw.routes[0])
}
}<|fim▁end|> | t.Fatal(nw.routes[0])
}
// change gateway of previous route
nw.handleSubnetEvents([]subnet.Event{ |
<|file_name|>Main.java<|end_file_name|><|fim▁begin|>package cz.devaty.projects.gio;
import java.io.BufferedReader;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.util.ArrayList;
import java.util.stream.Collectors;
public class Main {
/**
* předmět a -> 3., 7.O
* předmět b -> 4., 8.O
* předmět c -> 3., 7.O, 4., 8.O
*/
private static String FILE = "a.csv";
private static char[] groups = {'A', 'B',}; //'C', 'D', 'E'};
private static ArrayList<Student> students;
private static ArrayList<Seminar> seminars;
public static void main(String[] args) {
try {
loadData();
seminars = seminars.stream().distinct().collect(Collectors.toCollection(ArrayList::new));
computeGroups();
} catch (FileNotFoundException e) {
System.out.println("Error loading data");
} catch (CloneNotSupportedException e) {
e.printStackTrace();
}
}
private static void computeGroups() throws CloneNotSupportedException {
//variace s opakováním
for (int i = 0; i < 12; i++) {
seminars.remove(0);
}
ArrayList<ArrayList<Seminar>> variations = new ArrayList<>();
brute(variations, 0);
}
private static void brute(ArrayList<ArrayList<Seminar>> variations, int count) throws CloneNotSupportedException {
if (count < seminars.size()) {
for (int i = 0; i < groups.length; i++) {
seminars.get(count).setGroup(groups[i]);
brute(variations, count + 1);
}<|fim▁hole|> //defensive copy
ArrayList<Seminar> sem = new ArrayList<>();
for (int i = 0; i < seminars.size(); i++) {
sem.add(seminars.get(i).clone());
}
variations.add(sem);
}
}
private static double countConflicts(int lastIndex) {
double result = 0;
for (int i = 0; i < lastIndex; i++) {
result += Student.conflictsPerStudent(students.get(i));
}
return result;
}
private static void loadData() throws FileNotFoundException {
seminars = new ArrayList<>();
students = new ArrayList<>();
BufferedReader in = new BufferedReader(new FileReader(FILE));
while (true) {
try {
String s = in.readLine();
if (s == null) return;
String[] line = s.split(";");
ArrayList<Seminar> sem = new ArrayList<>();
for (int i = 2; i < line.length; i++) {
sem.add(new Seminar(line[i]));
seminars.add(new Seminar(line[i]));
}
students.add(new Student(line[1], line[2], sem));
} catch (IOException e) {
return;
}
}
}
}<|fim▁end|> | } else { |
<|file_name|>cross-global-for-in.js<|end_file_name|><|fim▁begin|><|fim▁hole|>var ArrayIteratorPrototype = Object.getPrototypeOf(arrayIter);
var arrayIterProtoBase = Object.getPrototypeOf(ArrayIteratorPrototype);
var IteratorPrototype = arrayIterProtoBase;
delete IteratorPrototype.next;
var obj = global.eval('({a: 1})')
for (var x in obj) {}
assertEq(x, "a");<|fim▁end|> | var global = newGlobal();
var arrayIter = (new global.Array())[global.Symbol.iterator](); |
<|file_name|>image.go<|end_file_name|><|fim▁begin|>package astar
import (
"os"
"image"
//"fmt"
)
import _ "image/png"
func openImage(filename string) (image.Image) {
f, err := os.Open(filename)
if err != nil {
return nil
}
defer f.Close()
img, _, _ := image.Decode(f)
return img
}
func parseImage(img image.Image) MapData {
max := uint32(65536-1) // 2^16-1
bounds := img.Bounds()
map_data := NewMapData(bounds.Max.X, bounds.Max.Y)
for y := bounds.Min.Y; y < bounds.Max.Y; y++ {
for x := bounds.Min.X; x < bounds.Max.X; x++ {
r, g, b, a := img.At(x, y).RGBA()
if(r == max && g == max && b == max && a == max) {
map_data[x][bounds.Max.Y-1-y] = LAND
//fmt.Printf(".")
} else {
map_data[x][bounds.Max.Y-1-y] = WALL
//fmt.Printf("#")<|fim▁hole|> //fmt.Println("")
}
return map_data
}
func GetMapFromImage(filename string) MapData {
img := openImage(filename)
if(img == nil) {
return nil
}
return parseImage(img)
}<|fim▁end|> | }
} |
<|file_name|>theodolite.module.ts<|end_file_name|><|fim▁begin|>import { NgModule } from '@angular/core';
import { TheodoliteComponent } from './theodolite/theodolite.component';
import { MarkdownSlideComponent } from './slide/markdown/markdownSlide.component';
import { CodeSlideComponent } from './slide/code/codeSlide.component';
import { PresentationComponent } from './presentation/presentation.component';
import { MarkdownService } from './markdown/markdown.service';
import { ParseService } from './parse/parse.service';
import { BrowserModule } from '@angular/platform-browser';<|fim▁hole|>import { AboutComponent } from './about/about.component';
import { ModalComponent } from './modal/modal.component';
import { SlideComponent } from './slide/slide.component';
import { DefaultValuePipe } from './common/default.pipe';
import { HighlightService } from './highlight/highlight.service';
import { EventsService } from './events/events.service';
import { ProgressComponent } from './progress/progress.component';
import { HmsPipe } from "./common/hms.pipe";
import {TimerComponent} from "./timer/timer.component";
import {PugSlideComponent} from "./slide/pug/pugSlide.component";
@NgModule({
imports: [ BrowserModule ],
declarations: [ TheodoliteComponent, PresentationComponent, SlideComponent, MarkdownSlideComponent,
CodeSlideComponent, ControlsComponent, AboutComponent, ModalComponent, DefaultValuePipe,
ProgressComponent, HmsPipe, TimerComponent, PugSlideComponent ],
bootstrap: [ TheodoliteComponent ],
providers: [ EventsService, HighlightService, MarkdownService, ParseService ]
})
export class TheodoliteModule {
constructor() {
}
}<|fim▁end|> | import { ControlsComponent } from './controls/controls.component'; |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>extern crate gl_generator;
extern crate khronos_api;
use std::env;
use std::fs::File;
use std::io::BufReader;
use std::io::Write;
use std::path::Path;
use gl_generator::generators::Generator;
mod textures;
fn main() {
let dest = env::var("OUT_DIR").unwrap();
let dest = Path::new(&dest);
textures::build_texture_file(&mut File::create(&dest.join("textures.rs")).unwrap());
// There is a `#[derive(Clone)]` line in the bindings that triggers a stack overflow
// in rustc (https://github.com/rust-lang/rust/issues/26467).
// Therefore we write the bindings to memory first, then remove this line, and then copy
// to the file.
let mut gl_bindings = Vec::new();
generate_gl_bindings(&mut gl_bindings);
let gl_bindings = String::from_utf8(gl_bindings).unwrap();
let gl_bindings = gl_bindings.replace("#[derive(Clone)]", "");
let mut file_output = File::create(&dest.join("gl_bindings.rs")).unwrap();
file_output.write_all(&gl_bindings.into_bytes()).unwrap();
}
fn generate_gl_bindings<W>(dest: &mut W) where W: Write {
let gl_registry = {
let reader = BufReader::new(khronos_api::GL_XML);
let ns = gl_generator::registry::Ns::Gl;
let filter = gl_generator::registry::Filter {
fallbacks: gl_generator::Fallbacks::None,
api: gl_generator::registry::Ns::Gl.to_string(),
extensions: vec![
"GL_AMD_depth_clamp_separate".to_string(),
"GL_APPLE_vertex_array_object".to_string(),
"GL_ARB_bindless_texture".to_string(),
"GL_ARB_buffer_storage".to_string(),
"GL_ARB_compute_shader".to_string(),
"GL_ARB_copy_buffer".to_string(),
"GL_ARB_debug_output".to_string(),
"GL_ARB_depth_texture".to_string(),
"GL_ARB_direct_state_access".to_string(),
"GL_ARB_draw_buffers".to_string(),
"GL_ARB_ES2_compatibility".to_string(),
"GL_ARB_ES3_compatibility".to_string(),
"GL_ARB_ES3_1_compatibility".to_string(),
"GL_ARB_ES3_2_compatibility".to_string(),
"GL_ARB_framebuffer_sRGB".to_string(),
"GL_ARB_geometry_shader4".to_string(),
"GL_ARB_gpu_shader_fp64".to_string(),
"GL_ARB_gpu_shader_int64".to_string(),
"GL_ARB_invalidate_subdata".to_string(),
"GL_ARB_multi_draw_indirect".to_string(),
"GL_ARB_occlusion_query".to_string(),
"GL_ARB_pixel_buffer_object".to_string(),
"GL_ARB_robustness".to_string(),
"GL_ARB_shader_image_load_store".to_string(),
"GL_ARB_shader_objects".to_string(),
"GL_ARB_texture_buffer_object".to_string(),
"GL_ARB_texture_float".to_string(),
"GL_ARB_texture_multisample".to_string(),
"GL_ARB_texture_rg".to_string(),
"GL_ARB_texture_rgb10_a2ui".to_string(),
"GL_ARB_transform_feedback3".to_string(),
"GL_ARB_vertex_buffer_object".to_string(),
"GL_ARB_vertex_shader".to_string(),
"GL_ATI_draw_buffers".to_string(),
"GL_ATI_meminfo".to_string(),
"GL_EXT_debug_marker".to_string(),
"GL_EXT_direct_state_access".to_string(),
"GL_EXT_framebuffer_blit".to_string(),
"GL_EXT_framebuffer_multisample".to_string(),
"GL_EXT_framebuffer_object".to_string(),
"GL_EXT_framebuffer_sRGB".to_string(),
"GL_EXT_gpu_shader4".to_string(),
"GL_EXT_packed_depth_stencil".to_string(),
"GL_EXT_provoking_vertex".to_string(),
"GL_EXT_texture_array".to_string(),
"GL_EXT_texture_buffer_object".to_string(),
"GL_EXT_texture_compression_s3tc".to_string(),
"GL_EXT_texture_filter_anisotropic".to_string(),
"GL_EXT_texture_integer".to_string(),
"GL_EXT_texture_sRGB".to_string(),
"GL_EXT_transform_feedback".to_string(),
"GL_GREMEDY_string_marker".to_string(),
"GL_KHR_robustness".to_string(),
"GL_NVX_gpu_memory_info".to_string(),
"GL_NV_conditional_render".to_string(),
"GL_NV_vertex_attrib_integer_64bit".to_string(),
],
version: "4.5".to_string(),
profile: "compatibility".to_string(),
};
gl_generator::registry::Registry::from_xml(reader, ns, Some(filter))
};
let gles_registry = {
let reader = BufReader::new(khronos_api::GL_XML);
let ns = gl_generator::registry::Ns::Gles2;
let filter = gl_generator::registry::Filter {
fallbacks: gl_generator::Fallbacks::None,
api: gl_generator::registry::Ns::Gles2.to_string(),
extensions: vec![
"GL_ANGLE_framebuffer_multisample".to_string(),
"GL_APPLE_framebuffer_multisample".to_string(),
"GL_APPLE_sync".to_string(),
"GL_ARM_rgba8".to_string(),
"GL_EXT_buffer_storage".to_string(),
"GL_EXT_disjoint_timer_query".to_string(),
"GL_EXT_multi_draw_indirect".to_string(),
"GL_EXT_multisampled_render_to_texture".to_string(),
"GL_EXT_occlusion_query_boolean".to_string(),
"GL_EXT_primitive_bounding_box".to_string(),
"GL_EXT_robustness".to_string(),
"GL_KHR_debug".to_string(),
"GL_NV_copy_buffer".to_string(),
"GL_NV_framebuffer_multisample".to_string(),
"GL_NV_pixel_buffer_object".to_string(),
"GL_OES_depth_texture".to_string(),
"GL_OES_draw_elements_base_vertex".to_string(),
"GL_OES_packed_depth_stencil".to_string(),
"GL_OES_primitive_bounding_box".to_string(),
"GL_OES_rgb8_rgba8".to_string(),
"GL_OES_texture_buffer".to_string(),
"GL_OES_texture_npot".to_string(),
"GL_OES_vertex_array_object".to_string(),
"GL_OES_vertex_type_10_10_10_2".to_string(),
],
version: "3.2".to_string(),
profile: "compatibility".to_string(),
};
<|fim▁hole|> gl_generator::StructGenerator.write(&(gl_registry + gles_registry),
gl_generator::registry::Ns::Gl, dest).unwrap();
}<|fim▁end|> | gl_generator::registry::Registry::from_xml(reader, ns, Some(filter))
};
|
<|file_name|>dnssec_keyscan.go<|end_file_name|><|fim▁begin|>package dns
import (
"crypto"
"crypto/dsa"
"crypto/ecdsa"
"crypto/rsa"
"io"
"math/big"
"strconv"
"strings"
)
// NewPrivateKey returns a PrivateKey by parsing the string s.
// s should be in the same form of the BIND private key files.
func (k *DNSKEY) NewPrivateKey(s string) (crypto.PrivateKey, error) {
if s == "" || s[len(s)-1] != '\n' { // We need a closing newline
return k.ReadPrivateKey(strings.NewReader(s+"\n"), "")
}
return k.ReadPrivateKey(strings.NewReader(s), "")
}
// ReadPrivateKey reads a private key from the io.Reader q. The string file is
// only used in error reporting.
// The public key must be known, because some cryptographic algorithms embed
// the public inside the privatekey.
func (k *DNSKEY) ReadPrivateKey(q io.Reader, file string) (crypto.PrivateKey, error) {
m, err := parseKey(q, file)
if m == nil {
return nil, err
}
if _, ok := m["private-key-format"]; !ok {
return nil, ErrPrivKey
}
if m["private-key-format"] != "v1.2" && m["private-key-format"] != "v1.3" {
return nil, ErrPrivKey
}
// TODO(mg): check if the pubkey matches the private key
algo, err := strconv.Atoi(strings.SplitN(m["algorithm"], " ", 2)[0])
if err != nil {
return nil, ErrPrivKey
}
switch uint8(algo) {
case DSA:
priv, err := readPrivateKeyDSA(m)
if err != nil {
return nil, err
}
pub := k.publicKeyDSA()
if pub == nil {
return nil, ErrKey
}
priv.PublicKey = *pub
return priv, nil
case RSAMD5:
fallthrough
case RSASHA1:
fallthrough
case RSASHA1NSEC3SHA1:
fallthrough
case RSASHA256:
fallthrough
case RSASHA512:
priv, err := readPrivateKeyRSA(m)
if err != nil {
return nil, err
}
pub := k.publicKeyRSA()
if pub == nil {
return nil, ErrKey
}
priv.PublicKey = *pub
return priv, nil
case ECCGOST:
return nil, ErrPrivKey
case ECDSAP256SHA256:
fallthrough
case ECDSAP384SHA384:
priv, err := readPrivateKeyECDSA(m)
if err != nil {
return nil, err
}
pub := k.publicKeyECDSA()
if pub == nil {
return nil, ErrKey
}
priv.PublicKey = *pub
return priv, nil
default:
return nil, ErrPrivKey
}
}
// Read a private key (file) string and create a public key. Return the private key.
func readPrivateKeyRSA(m map[string]string) (*rsa.PrivateKey, error) {
p := new(rsa.PrivateKey)
p.Primes = []*big.Int{nil, nil}
for k, v := range m {
switch k {
case "modulus", "publicexponent", "privateexponent", "prime1", "prime2":
v1, err := fromBase64([]byte(v))
if err != nil {
return nil, err
}
switch k {
case "modulus":
p.PublicKey.N = big.NewInt(0)
p.PublicKey.N.SetBytes(v1)
case "publicexponent":
i := big.NewInt(0)
i.SetBytes(v1)
p.PublicKey.E = int(i.Int64()) // int64 should be large enough
case "privateexponent":
p.D = big.NewInt(0)
p.D.SetBytes(v1)
case "prime1":
p.Primes[0] = big.NewInt(0)
p.Primes[0].SetBytes(v1)
case "prime2":
p.Primes[1] = big.NewInt(0)
p.Primes[1].SetBytes(v1)
}
case "exponent1", "exponent2", "coefficient":
// not used in Go (yet)
case "created", "publish", "activate":
// not used in Go (yet)
}
}
return p, nil
}
func readPrivateKeyDSA(m map[string]string) (*dsa.PrivateKey, error) {
p := new(dsa.PrivateKey)
p.X = big.NewInt(0)
for k, v := range m {
switch k {
case "private_value(x)":
v1, err := fromBase64([]byte(v))
if err != nil {
return nil, err
}
p.X.SetBytes(v1)
case "created", "publish", "activate":
/* not used in Go (yet) */
}
}
return p, nil
}
func readPrivateKeyECDSA(m map[string]string) (*ecdsa.PrivateKey, error) {
p := new(ecdsa.PrivateKey)
p.D = big.NewInt(0)
// TODO: validate that the required flags are present
for k, v := range m {
switch k {
case "privatekey":
v1, err := fromBase64([]byte(v))
if err != nil {
return nil, err
}
p.D.SetBytes(v1)
case "created", "publish", "activate":
/* not used in Go (yet) */
}
}
return p, nil
}
// parseKey reads a private key from r. It returns a map[string]string,
// with the key-value pairs, or an error when the file is not correct.
func parseKey(r io.Reader, file string) (map[string]string, error) {
s := scanInit(r)
m := make(map[string]string)
c := make(chan lex)
k := ""
// Start the lexer
go klexer(s, c)
for l := range c {
// It should alternate
switch l.value {
case zKey:
k = l.token
case zValue:
if k == "" {
return nil, &ParseError{file, "no private key seen", l}
}
//println("Setting", strings.ToLower(k), "to", l.token, "b")
m[strings.ToLower(k)] = l.token
k = ""
}
}
return m, nil
}
// klexer scans the sourcefile and returns tokens on the channel c.
func klexer(s *scan, c chan lex) {
var l lex
str := "" // Hold the current read text
commt := false
key := true
x, err := s.tokenText()
defer close(c)
for err == nil {
l.column = s.position.Column
l.line = s.position.Line
switch x {
case ':':
if commt {
break
}
l.token = str
if key {
l.value = zKey
c <- l
// Next token is a space, eat it
s.tokenText()
key = false
str = ""
} else {
l.value = zValue
}
case ';':
commt = true
case '\n':
if commt {
// Reset a comment
commt = false<|fim▁hole|> c <- l
str = ""
commt = false
key = true
default:
if commt {
break
}
str += string(x)
}
x, err = s.tokenText()
}
if len(str) > 0 {
// Send remainder
l.token = str
l.value = zValue
c <- l
}
}<|fim▁end|> | }
l.value = zValue
l.token = str |
<|file_name|>struct-destructuring-cross-crate.rs<|end_file_name|><|fim▁begin|>// run-pass
// aux-build:struct_destructuring_cross_crate.rs
extern crate struct_destructuring_cross_crate;
pub fn main() {
let x = struct_destructuring_cross_crate::S { x: 1, y: 2 };
let struct_destructuring_cross_crate::S { x: a, y: b } = x;
assert_eq!(a, 1);
assert_eq!(b, 2);<|fim▁hole|>}<|fim▁end|> | |
<|file_name|>constants.py<|end_file_name|><|fim▁begin|>"""Store various constants here"""
from enum import Enum
# Maximum file upload size (in bytes).
MAX_CONTENT_LENGTH = 1 * 1024 * 1024 * 1024
# Authentication/account creation constants
PWD_HASH_ALGORITHM = 'pbkdf2_sha256'
SALT_SIZE = 24
MIN_USERNAME_LENGTH = 2
MAX_USERNAME_LENGTH = 32
MIN_PASSWORD_LENGTH = 8
MAX_PASSWORD_LENGTH = 1024
HASH_ROUNDS = 100000
PWD_RESET_KEY_LENGTH = 32
# Length of time before recovery key expires, in minutes.
PWD_RESET_KEY_EXPIRATION = 1 * 24 * 60
CREATE_ACCOUNT_KEY_LENGTH = 32
class Gender(Enum):
"""Value of members.gender if member's gender is unknown"""
NO_GENDER = None
"""Value of members.gender if member is female"""
FEMALE = 0<|fim▁hole|> MALE = 1
CONTACTS = {
'Administration': [{
'name': 'Kevin Gilmartin',
'role': 'Dean of Undergraduate Students',
'email': '[email protected]'
}, {
'name': 'Lesley Nye',
'role': 'Dean of Undergraduate Students',
'email': '[email protected]'
}, {
'name': 'Kristin Weyman',
'role': 'Associate Dean of Undergraduate Students',
'email': '[email protected]'
}, {
'name': 'Beth Larranaga',
'role': 'Office Manager',
'email': '[email protected]'
}, {
'name': 'Sara Loredo',
'role': 'Office Assistant',
'email': '[email protected]'
}],
'Student Life': [{
'name':
'Tom Mannion',
'role':
'Senior Director, Student Activities and Programs',
'email':
'[email protected]'
}, {
'name': 'Joe Shepherd',
'role': 'Vice President for Student Affairs',
'email': '[email protected]'
}, {
'name':
'Felicia Hunt',
'role':
'Assistant Vice President for Student Affairs and Residential Experience',
'email':
'[email protected]'
}, {
'name': 'Maria Katsas',
'role': 'Director of Housing',
'email': '[email protected]'
}, {
'name':
'Allie McIntosh',
'role':
'Community Educator and Deputy Title IX Coordinator',
'email':
'[email protected]'
}, {
'name': 'Jaime Reyes',
'role': 'Acting Director of Dining Services',
'email': '[email protected]'
}]
}<|fim▁end|> | """Value of members.gender if member is male""" |
<|file_name|>test_packing.py<|end_file_name|><|fim▁begin|>import pytest
import pwny
target_little_endian = pwny.Target(arch=pwny.Target.Arch.unknown, endian=pwny.Target.Endian.little)
target_big_endian = pwny.Target(arch=pwny.Target.Arch.unknown, endian=pwny.Target.Endian.big)
def test_pack():
assert pwny.pack('I', 0x41424344) == b'DCBA'
def test_pack_format_with_endian():
assert pwny.pack('>I', 0x41424344) == b'ABCD'
def test_pack_explicit_endian():
assert pwny.pack('I', 0x41424344, endian=pwny.Target.Endian.big) == b'ABCD'
def test_pack_explicit_target():
assert pwny.pack('I', 0x41424344, target=target_big_endian) == b'ABCD'
@pytest.mark.xfail(raises=NotImplementedError)
def test_pack_invalid_endian():
pwny.pack('I', 1, endian='invalid')
def test_unpack():
assert pwny.unpack('I', b'DCBA') == (0x41424344,)
def test_unpack_format_with_endian():
assert pwny.unpack('>I', b'ABCD') == (0x41424344,)
def test_unpack_explicit_endian():
assert pwny.unpack('I', b'ABCD', endian=pwny.Target.Endian.big) == (0x41424344,)
def test_unpack_explicit_target():
assert pwny.unpack('I', b'ABCD', target=target_big_endian) == (0x41424344,)
@pytest.mark.xfail(raises=NotImplementedError)
def test_unpack_invalid_endian():
pwny.unpack('I', 'AAAA', endian='invalid')
def test_pack_size():
# This tests both pack_size in general as well as not padding the byte.
assert pwny.pack_size('bq') == 9
short_signed_data = [
[8, -0x7f, b'\x81'],
[16, -0x7fff, b'\x80\x01'],
[32, -0x7fffffff, b'\x80\x00\x00\x01'],
[64, -0x7fffffffffffffff, b'\x80\x00\x00\x00\x00\x00\x00\x01'],
]
short_unsigned_data = [
[8, 0x61, b'a'],
[16, 0x6162, b'ab'],
[32, 0x61626364, b'abcd'],
[64, 0x6162636465666768, b'abcdefgh'],
]
def test_short_form_pack():
for width, num, bytestr in short_signed_data:
f = 'p%d' % width
yield check_short_form_pack, f, num, bytestr[::-1]
yield check_short_form_pack_endian, f, num, bytestr[::-1], pwny.Target.Endian.little
yield check_short_form_pack_endian, f, num, bytestr, pwny.Target.Endian.big
for width, num, bytestr in short_unsigned_data:
f = 'P%d' % width
yield check_short_form_pack, f, num, bytestr[::-1]
yield check_short_form_pack_endian, f, num, bytestr[::-1], pwny.Target.Endian.little
yield check_short_form_pack_endian, f, num, bytestr, pwny.Target.Endian.big
def test_short_form_unpack():
for width, num, bytestr in short_signed_data:
f = 'u%d' % width
yield check_short_form_unpack, f, num, bytestr[::-1]
yield check_short_form_unpack_endian, f, num, bytestr[::-1], pwny.Target.Endian.little
yield check_short_form_unpack_endian, f, num, bytestr, pwny.Target.Endian.big
for width, num, bytestr in short_unsigned_data:
f = 'U%d' % width
yield check_short_form_unpack, f, num, bytestr[::-1]
yield check_short_form_unpack_endian, f, num, bytestr[::-1], pwny.Target.Endian.little
yield check_short_form_unpack_endian, f, num, bytestr, pwny.Target.Endian.big
def test_pointer_pack():
yield check_short_form_pack, 'p', -66052, b'\xfc\xfd\xfe\xff'<|fim▁hole|> yield check_short_form_pack_endian, 'P', 4294901244, b'\xfc\xfd\xfe\xff', pwny.Target.Endian.little
yield check_short_form_pack_endian, 'P', 4294901244, b'\xff\xfe\xfd\xfc', pwny.Target.Endian.big
def test_pointer_unpack():
yield check_short_form_unpack, 'u', -66052, b'\xfc\xfd\xfe\xff'
yield check_short_form_unpack_endian, 'u', -66052, b'\xfc\xfd\xfe\xff', pwny.Target.Endian.little
yield check_short_form_unpack_endian, 'u', -66052, b'\xff\xfe\xfd\xfc', pwny.Target.Endian.big
yield check_short_form_unpack, 'U', 4294901244, b'\xfc\xfd\xfe\xff'
yield check_short_form_unpack_endian, 'U', 4294901244, b'\xfc\xfd\xfe\xff', pwny.Target.Endian.little
yield check_short_form_unpack_endian, 'U', 4294901244, b'\xff\xfe\xfd\xfc', pwny.Target.Endian.big
def check_short_form_pack(f, num, bytestr):
assert getattr(pwny, f)(num) == bytestr
def check_short_form_pack_endian(f, num, bytestr, endian):
assert getattr(pwny, f)(num, endian=endian) == bytestr
def check_short_form_unpack(f, num, bytestr):
assert getattr(pwny, f)(bytestr) == num
def check_short_form_unpack_endian(f, num, bytestr, endian):
assert getattr(pwny, f)(bytestr, endian=endian) == num<|fim▁end|> | yield check_short_form_pack_endian, 'p', -66052, b'\xfc\xfd\xfe\xff', pwny.Target.Endian.little
yield check_short_form_pack_endian, 'p', -66052, b'\xff\xfe\xfd\xfc', pwny.Target.Endian.big
yield check_short_form_pack, 'P', 4294901244, b'\xfc\xfd\xfe\xff' |
<|file_name|>delete_user_form.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from django import forms
from datetime import date
from dateutil.relativedelta import *
from django.contrib.auth.models import User
from custom_form_app.forms.base_form_class import *
from account_app.models import *
from website.exceptions import *
import logging, sys
# force utf8 read data
reload(sys);
sys.setdefaultencoding("utf8")
# Get an instance of a logger
logger = logging.getLogger(__name__)
class DeleteUserForm(forms.Form, FormCommonUtils):
user_id = forms.IntegerField(label='User id', required=True)
# list of validator for this form
custom_validation_list = (
'check_all_fields_valid',
)
def __init__(self, *args, **kwargs):
# parent forms.Form init
super(DeleteUserForm, self).__init__(*args, **kwargs)
FormCommonUtils.__init__(self)
# current form instance
self.validation_form = super(DeleteUserForm, self)
def clean(self):
super(DeleteUserForm, self).clean_form_custom()
return True<|fim▁hole|> logged_user_id = self.request_data.user.id
user_id = self.form_validated_data.get("user_id")
account_obj = Account()
try:
account_obj.delete_user(user_id=user_id, logged_user_id=logged_user_id)
except UserDeleteDoesNotExistsError:
logger.error("Errore nell'eliminazione dell'account, l'id utente non esiste: " + str(self.form_validated_data) + " | error code: " + str(UserDeleteDoesNotExistsError.get_error_code))
self._errors = {"__all__": ["Errore nell'eliminazione dell'account. Sii gentile, segnala il problema (Codice " + str(UserDeleteDoesNotExistsError.get_error_code) + ")"]}
except UserDeleteIdDoesNotMatchError:
logger.error("Errore nell'eliminazione dell'account, l'id utente non matcha con l'id utente in sessione: " + str(self.form_validated_data) + " | error code: " + str(UserDeleteIdDoesNotMatchError.get_error_code))
self._errors = {"__all__": ["Errore nell'eliminazione dell'account. Sii gentile, segnala il problema (Codice " + str(UserDeleteIdDoesNotMatchError.get_error_code) + ")"]}
else:
return_var = True
return return_var
def form_actions(self):
"""Function to perform form actions"""
return_var = False
# delete user and related data
if self.delete_user():
return_var = True
return return_var<|fim▁end|> |
def delete_user(self):
"""Function to delete user"""
return_var = False |
<|file_name|>Header.js<|end_file_name|><|fim▁begin|>import React, {PropTypes} from 'react';
import { Link, IndexLink } from 'react-router';
// import {Navbar, Nav, NavItem, NavDropdown, MenuItem} from 'react-bootstrap';
class Header extends React.Component {
constructor(props) {
super(props);
}
render() {
return (
<div id="container" className="link_box">
<div id="itemA"></div>
<div id="itemB">
<Link to="/">
<img src=" http://cdn.dscount.com/images_2016/top/ntop_all02.jpg" alt="dscount"/>
</Link>
</div>
<div id="itemC">
<Link to="/">
<img src=" http://cdn.dscount.com/images_2016/top/btn_dscoupon02.jpg" alt="dscount"/>
</Link>
</div>
<div id="itemD"></div>
<div id="itemE">
<Link to="" >ログイン</Link>
</div>
<div id="itemF">
<Link to="/signup">会員登録</Link>
</div>
<div id="itemG">
<a href="#" >マイページ{" "}▼</a>
</div>
<div id="itemH">
<a href="#" >お客様センター{" "}▼</a>
</div>
<div id="itemI">
<a href="#" >
<span className="glyphicon glyphicon-shopping-cart">(0)</span>
</a>
</div>
<div id="itemJ">
<a href="#" >
<span className="glyphicon glyphicon-heart">(1)</span>
</a>
</div>
<form id="itemK">
<div className="field">
<button className="drop-down-btn" type="button" id="search">総合検索{" "}▼</button>
<input className="header-search-input" type="text" id="searchterm" placeholder=" what do you want ?" />
<button className="search-submit" type="button" id="search">
<span className="glyphicon glyphicon-search"></span>
</button><|fim▁hole|> </div>
</form>
<div id="itemL"></div>
</div>
);
}
}
export default Header;<|fim▁end|> | |
<|file_name|>protocol.py<|end_file_name|><|fim▁begin|># Copyright (C) 2005-2010 MISG/ICTI/EIA-FR
# See LICENSE for details.
"""
Factories for AMQ clients, Thrift clients and SMAC Clients and servers.
@author: Jonathan Stoppani <[email protected]>
"""
import weakref
from twisted.internet.protocol import ReconnectingClientFactory
from twisted.internet import defer, error
from txamqp.protocol import AMQClient
from txamqp.contrib.thrift.client import ThriftTwistedDelegate
from txamqp.queue import TimeoutDeferredQueue, Closed
from txamqp.contrib.thrift.transport import TwistedAMQPTransport
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol
from smac.python import log
from smac.amqp.models import Exchange, Queue, IAddress
from smac.conf import topology
from smac.modules import utils
class SMACServerFactory(object):
iprot_factory = TBinaryProtocol.TBinaryProtocolFactory()
oprot_factory = TBinaryProtocol.TBinaryProtocolFactory()
def __init__(self, client, channel=None):
self.client = client
self.channel = channel or 1
if client.check_0_8():
self.reply_to = "reply to"
else:
self.reply_to = "reply-to"
@defer.inlineCallbacks
def build_server(self, delegate, processor, handler, address, queues=None, standalone=True):
processor_name = processor.__name__
log.debug("Creating new server for {0} with ID {1}".format(
processor_name, address.instance))
address = IAddress(address)
if not queues:
queues = topology.queues
if isinstance(self.channel, int):
channel = yield self.client.channel(self.channel)
yield channel.channel_open()
else:
# Assume it's already open!
channel = self.channel
deferreds = []
# Declare all exchanges
exchanges = {}
for k, e in topology.exchanges.iteritems():
e = Exchange(channel, **e)
e.format_name(**dict(address))
e.declare()
exchanges[k] = e
self.responses = Exchange(channel, **topology.exchanges['responses'])
# Declare all queues
qs = []
for q in queues:
q = q.copy()
bindings = q.pop('bindings')
q = Queue(channel, **q)
q.format_name(**dict(address))
q.declare()
deferreds += [q.bind(exchanges[e], k.format(**dict(address))) for e, k in bindings]
qs.append(q)
# Wait for declarations and bindings
yield defer.DeferredList(deferreds)
log.debug("All queues and needed exchanges declared and bound, start listening")
tags = []
for queue in qs:
tag = yield queue.consume()
tags.append(tag)
@defer.inlineCallbacks
def destroy(ref):
log.debug("Server for {0} garbage collected, removing " \
"subscriptions".format(processor_name))
try:
yield defer.DeferredList([channel.basic_cancel(t) for t in tags])
except Exception as e:
pass
if not standalone:
handler = weakref.proxy(handler, destroy)
processor = processor.Processor(handler)
for tag in tags:
queue = yield self.client.queue(tag)
self.get_next_message(channel, queue, processor, delegate)
def parse_message(self, msg, channel, queue, processor, delegate):
tag = msg.delivery_tag
try:
sender = msg.content[self.reply_to]
except KeyError:
sender = None
transport_in = TTransport.TMemoryBuffer(msg.content.body)
transport_out = TwistedAMQPTransport(channel, str(self.responses), sender)
iprot = self.iprot_factory.getProtocol(transport_in)
oprot = self.oprot_factory.getProtocol(transport_out)
d = processor.process(iprot, oprot)
d.addErrback(delegate.processing_error)
channel.basic_ack(tag, True)
self.get_next_message(channel, queue, processor, delegate)
def get_next_message(self, channel, queue, processor, delegate):
d = queue.get()
d.addCallback(self.parse_message, channel, queue, processor, delegate)
d.addErrback(self.catch_closed_queue, delegate)
d.addErrback(delegate.queue_error)
def catch_closed_queue(self, failure, delegate):
failure.trap(Closed)
delegate.queue_closed(failure)
class SMACClientFactory(object):
iprot_factory = TBinaryProtocol.TBinaryProtocolFactory()
oprot_factory = TBinaryProtocol.TBinaryProtocolFactory()
def __init__(self, client, channel=None):
self.client = client
self.client_lock = defer.DeferredLock()
self.clients = {}
if client.check_0_8():
self.reply_to = "reply to"
else:
self.reply_to = "reply-to"
self.channel = channel or 1
@defer.inlineCallbacks
def build_client(self, address, service=None, distribution=None, cache=True):
yield self.client_lock.acquire()
try:
address = IAddress(address)
if not service:
service = utils.get_module_from_address(address)
service_name = service.__name__ + address.routing_key
distribution = distribution or address.distribution
if not distribution:
raise ValueError("The distribution mode was not defined and " \
"could not be inferred from the address.")
key = (service, address.routing_key, distribution)
try:
client = self.clients[key]
except KeyError:
log.debug("Creating new client for {0} with routing key {1} and distribution {2}".format(
service.__name__, address.routing_key, distribution))
if isinstance(self.channel, int):
channel = yield self.client.channel(self.channel)
yield channel.channel_open()
else:
# Assume it's already open!
channel = self.channel
response_exchange = Exchange(channel, **topology.exchanges['responses'])
response_queue = Queue(channel, exclusive=True, auto_delete=True)
yield response_queue.declare()
yield response_queue.bind(response_exchange)
consumer_tag = yield response_queue.consume()<|fim▁hole|>
service_exchange = Exchange(channel, **topology.exchanges[distribution])
service_exchange.format_name(**dict(address))
yield service_exchange.declare()
amqp_transport = TwistedAMQPTransport(channel, str(service_exchange),
address.routing_key, service_name,
str(response_queue), self.reply_to)
client = service.Client(amqp_transport, self.oprot_factory)
client.address = address
client.factory = self
if cache:
weak_client = client
self.clients[key] = client
else:
@defer.inlineCallbacks
def destroy(ref):
log.debug("Client for {0} garbage collected, removing " \
"subscriptions".format(service_name))
try:
yield channel.basic_cancel(consumer_tag)
except Exception as e:
pass
weak_client = weakref.proxy(client, destroy)
queue = yield self.client.queue(consumer_tag)
self.get_next_message(channel, queue, weak_client)
queue = yield self.client.get_return_queue(service_name)
self.get_next_unroutable_message(channel, queue, weak_client)
else:
log.debug("Using cached client for {0} with routing key {1} and distribution {2}".format(
service.__name__, address.routing_key, distribution))
finally:
self.client_lock.release()
defer.returnValue(client)
def parse_message(self, msg, channel, queue, client):
tag = msg.delivery_tag
transport = TTransport.TMemoryBuffer(msg.content.body)
iprot = self.iprot_factory.getProtocol(transport)
(fname, mtype, rseqid) = iprot.readMessageBegin()
if rseqid not in client._reqs:
log.warn('Missing rseqid! fname = %r, rseqid = %s, mtype = %r, routing key = %r, client = %r, msg.content.body = %r' % (fname, rseqid, mtype, msg.routing_key, client, msg.content.body))
method = getattr(client, 'recv_' + fname)
method(iprot, mtype, rseqid)
channel.basic_ack(tag, True)
self.get_next_message(channel, queue, client)
def unrouteable_message(self, msg, channel, queue, client):
transport = TTransport.TMemoryBuffer(msg.content.body)
iprot = self.iprot_factory.getProtocol(transport)
(fname, mtype, rseqid) = iprot.readMessageBegin()
try:
d = client._reqs.pop(rseqid)
except KeyError:
# KeyError will occur if the remote Thrift method is oneway,
# since there is no outstanding local request deferred for
# oneway calls.
pass
else:
type = TTransport.TTransportException.NOT_OPEN,
msg = 'Unrouteable message, routing key = %r calling function %r' % (msg.routing_key, fname)
d.errback(TTransport.TTransportException(type, msg))
self.get_next_unroutable_message(channel, queue, client)
def get_next_unroutable_message(self, channel, queue, client):
d = queue.get()
d.addCallback(self.unrouteable_message, channel, queue, client)
d.addErrback(self.catch_closed_queue)
d.addErrback(self.handle_queue_error)
def get_next_message(self, channel, queue, client):
d = queue.get()
d.addCallback(self.parse_message, channel, queue, client)
d.addErrback(self.catch_closed_queue)
d.addErrback(self.handle_queue_error)
def catch_closed_queue(self, failure):
failure.trap(Closed)
self.handle_closed_queue(failure)
def handle_queue_error(self, failure):
log.err("Error in queue")
log.err(failure)
pass
def handle_closed_queue(self, failure):
log.debug("Queue closed")
class ThriftAMQClient(AMQClient, object):
def __init__(self, *args, **kwargs):
super(ThriftAMQClient, self).__init__(*args, **kwargs)
self.return_queues_lock = defer.DeferredLock()
self.return_queues = {}
@defer.inlineCallbacks
def get_return_queue(self, key):
yield self.return_queues_lock.acquire()
try:
try:
q = self.return_queues[key]
except KeyError:
q = TimeoutDeferredQueue()
self.return_queues[key] = q
finally:
self.return_queues_lock.release()
defer.returnValue(q)
thriftBasicReturnQueue = get_return_queue # compatibility with
# ThriftTwistedDelegate
class AMQClientFactory(ReconnectingClientFactory, object):
"""
Factory for AMQP connections intended to be used by thrift clients.
Overriding the C{protocol} property with a more general C{AMQClient} class
should allow a more generic use of the factory.
"""
protocol = ThriftAMQClient
def __init__(self, spec, vhost):
self.spec = spec
self.vhost = vhost
self.closed = False
def buildProtocol(self, _):
client = self.protocol(ThriftTwistedDelegate(), self.vhost, self.spec)
client.factory = self
return client
def clientConnectionLost(self, connector, reason):
if self.closed:
log.info("Connection to the AMQP broker closed.")
return
log.error('Connection to AMQP broker lost. Reason {0}'.format(reason))
super(AMQClientFactory, self).clientConnectionLost(connector, reason)
def clientConnectionFailed(self, connector, reason):
log.error('Connection to AMQP broker failed. Reason {0}'.format(reason))
super(AMQClientFactory, self).clientConnectionFailed(connector, reason)<|fim▁end|> | |
<|file_name|>square_free_string.py<|end_file_name|><|fim▁begin|>class SquareFreeString:
def isSquareFree(self, s):
for i in range(0, len(s)):
for length in range(1, len(s)):<|fim▁hole|> if second == first:
return "not square-free"
return "square-free"<|fim▁end|> | first = s[i:length + i]
second = s[i+length:i+length+length] |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Copyright 2013 University of Maryland. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE.TXT file.
import os
from framework.Targets import ApacheTarget
class Target(ApacheTarget):
def get_path(filename):
return os.path.dirname(os.path.realpath(__file__)) + '/' + filename<|fim▁hole|> application_dir_mapping = [get_path("application"), "/var/www"]
chroot_environment = "Debian5"<|fim▁end|> |
name = "eXtplorer 2.1" |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.