prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>UserAgent.java<|end_file_name|><|fim▁begin|>package com.yh.admin.bo;
import java.util.Date;
import com.yh.platform.core.bo.BaseBo;
public class UserAgent extends BaseBo {
private static final long serialVersionUID = 2715416587055228708L;
private Long userAgentOid;
private Long systemPositionOid;
private String userId; // 被代理人
private String agentUserId; // 指定的代理人
private Date effectiveDate; // 有效开始期
private Date expiredDt;
private String isActive;
public Long getSystemPositionOid() {
return systemPositionOid;
}
public void setSystemPositionOid(Long systemPositionOid) {
this.systemPositionOid = systemPositionOid;
}
public Long getUserAgentOid() {
return userAgentOid;
}
public void setUserAgentOid(Long userAgentOid) {
this.userAgentOid = userAgentOid;
}
public String getUserId() {
return userId;
}
<|fim▁hole|> public void setUserId(String userId) {
this.userId = userId;
}
public String getAgentUserId() {
return agentUserId;
}
public void setAgentUserId(String agentUserId) {
this.agentUserId = agentUserId;
}
public Date getExpiredDt() {
return expiredDt;
}
public void setExpiredDt(Date expiredDt) {
this.expiredDt = expiredDt;
}
public String getIsActive() {
return isActive;
}
public void setIsActive(String isActive) {
this.isActive = isActive;
}
public Date getEffectiveDate() {
return effectiveDate;
}
public void setEffectiveDate(Date effectiveDate) {
this.effectiveDate = effectiveDate;
}
}<|fim▁end|> | |
<|file_name|>GAPhysicsBaseTemp.cpp<|end_file_name|><|fim▁begin|>#include "GAPhysicsBaseTemp.h"
//------------------------------GAPhysicsBase------------------------------------------
GAPhysicsBase::GAPhysicsBase()
{
}
int GAPhysicsBase::testCollideWith(GAPhysicsBase* object2,GAVector3& collidePoint)
{
return 0;
}
//--------------------------------GALine-----------------------------------------------
GALine::GALine()
{
p1=GAVector3(0,0,0);
p2=GAVector3(1,1,1);
}
GALine::GALine(GAVector3 p1_,GAVector3 p2_)
{
p1=p1_;
p2=p2_;
}
//-------------------------------GASegment---------------------------------------------
GASegment::GASegment()
{
pstart=GAVector3(0,0,0);
pend=GAVector3(1,1,1);
}
GASegment::GASegment(GAVector3 pstart_,GAVector3 pend_)
{
pstart=pstart_;
pend=pend_;
}
//-------------------------------GAPlane-----------------------------------------------
GAPlane::GAPlane()
{
}
int GAPlane::testCollideWith(GAPhysicsBase* object2,GAVector3& collidePoint)<|fim▁hole|>}
//-----------------------------------GACube--------------------------------------------
GACube::GACube()
{
}
int GACube::testCollideWith(GAPhysicsBase* object2,GAVector3& collidePoint)
{
return 0;
}
int GACube::testCollideWithCube(GACube* object2,GAVector3& collidePoint)
{
return 0;
}
//--------------------------------GACylinder-------------------------------------------
//--------------------------------GASphere---------------------------------------------
//--------------------------------GACapsule--------------------------------------------<|fim▁end|> | {
return 0; |
<|file_name|>KubernetesModelUtil.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2017 Google, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and<|fim▁hole|> *
*/
package com.netflix.spinnaker.clouddriver.kubernetes.provider;
import com.netflix.spinnaker.clouddriver.model.HealthState;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
@Slf4j
public class KubernetesModelUtil {
public static long translateTime(String time) {
return KubernetesModelUtil.translateTime(time, "yyyy-MM-dd'T'HH:mm:ssX");
}
public static long translateTime(String time, String format) {
try {
return StringUtils.isNotEmpty(time)
? (new SimpleDateFormat(format).parse(time)).getTime()
: 0;
} catch (ParseException e) {
log.error("Failed to parse kubernetes timestamp", e);
return 0;
}
}
public static HealthState getHealthState(List<Map<String, Object>> health) {
return someUpRemainingUnknown(health)
? HealthState.Up
: someSucceededRemainingUnknown(health)
? HealthState.Succeeded
: anyStarting(health)
? HealthState.Starting
: anyDown(health)
? HealthState.Down
: anyFailed(health)
? HealthState.Failed
: anyOutOfService(health) ? HealthState.OutOfService : HealthState.Unknown;
}
private static boolean stateEquals(Map<String, Object> health, HealthState state) {
Object healthState = health.get("state");
return healthState != null && healthState.equals(state.name());
}
private static boolean someUpRemainingUnknown(List<Map<String, Object>> healthsList) {
List<Map<String, Object>> knownHealthList =
healthsList.stream()
.filter(h -> !stateEquals(h, HealthState.Unknown))
.collect(Collectors.toList());
return !knownHealthList.isEmpty()
&& knownHealthList.stream().allMatch(h -> stateEquals(h, HealthState.Up));
}
private static boolean someSucceededRemainingUnknown(List<Map<String, Object>> healthsList) {
List<Map<String, Object>> knownHealthList =
healthsList.stream()
.filter(h -> !stateEquals(h, HealthState.Unknown))
.collect(Collectors.toList());
return !knownHealthList.isEmpty()
&& knownHealthList.stream().allMatch(h -> stateEquals(h, HealthState.Succeeded));
}
private static boolean anyDown(List<Map<String, Object>> healthsList) {
return healthsList.stream().anyMatch(h -> stateEquals(h, HealthState.Down));
}
private static boolean anyStarting(List<Map<String, Object>> healthsList) {
return healthsList.stream().anyMatch(h -> stateEquals(h, HealthState.Starting));
}
private static boolean anyFailed(List<Map<String, Object>> healthsList) {
return healthsList.stream().anyMatch(h -> stateEquals(h, HealthState.Failed));
}
private static boolean anyOutOfService(List<Map<String, Object>> healthsList) {
return healthsList.stream().anyMatch(h -> stateEquals(h, HealthState.OutOfService));
}
}<|fim▁end|> | * limitations under the License. |
<|file_name|>codes.js<|end_file_name|><|fim▁begin|>// This module is compiled away!
//
// micromark works based on character codes.
// This module contains constants for the ASCII block and the replacement
// character.
// A couple of them are handled in a special way, such as the line endings
// (CR, LF, and CR+LF, commonly known as end-of-line: EOLs), the tab (horizontal
// tab) and its expansion based on what column it’s at (virtual space),
// and the end-of-file (eof) character.
// As values are preprocessed before handling them, the actual characters LF,
// CR, HT, and NUL (which is present as the replacement character), are
// guaranteed to not exist.
//
// Unicode basic latin block.
exports.carriageReturn = -5
exports.lineFeed = -4
exports.carriageReturnLineFeed = -3
exports.horizontalTab = -2
exports.virtualSpace = -1
exports.eof = null
exports.nul = 0
exports.soh = 1
exports.stx = 2
exports.etx = 3
exports.eot = 4
exports.enq = 5
exports.ack = 6
exports.bel = 7
exports.bs = 8
exports.ht = 9 // `\t`
exports.lf = 10 // `\n`
exports.vt = 11 // `\v`
exports.ff = 12 // `\f`
exports.cr = 13 // `\r`
exports.so = 14
exports.si = 15
exports.dle = 16
exports.dc1 = 17
exports.dc2 = 18
exports.dc3 = 19
exports.dc4 = 20
exports.nak = 21
exports.syn = 22
exports.etb = 23
exports.can = 24
exports.em = 25
exports.sub = 26
exports.esc = 27
exports.fs = 28
exports.gs = 29
exports.rs = 30
exports.us = 31
exports.space = 32
exports.exclamationMark = 33 // `!`
exports.quotationMark = 34 // `"`
exports.numberSign = 35 // `#`
exports.dollarSign = 36 // `$`
exports.percentSign = 37 // `%`
exports.ampersand = 38 // `&`
exports.apostrophe = 39 // `'`
exports.leftParenthesis = 40 // `(`
exports.rightParenthesis = 41 // `)`
exports.asterisk = 42 // `*`
exports.plusSign = 43 // `+`
exports.comma = 44 // `,`
exports.dash = 45 // `-`
exports.dot = 46 // `.`
exports.slash = 47 // `/`
exports.digit0 = 48 // `0`<|fim▁hole|>exports.digit5 = 53 // `5`
exports.digit6 = 54 // `6`
exports.digit7 = 55 // `7`
exports.digit8 = 56 // `8`
exports.digit9 = 57 // `9`
exports.colon = 58 // `:`
exports.semicolon = 59 // `;`
exports.lessThan = 60 // `<`
exports.equalsTo = 61 // `=`
exports.greaterThan = 62 // `>`
exports.questionMark = 63 // `?`
exports.atSign = 64 // `@`
exports.uppercaseA = 65 // `A`
exports.uppercaseB = 66 // `B`
exports.uppercaseC = 67 // `C`
exports.uppercaseD = 68 // `D`
exports.uppercaseE = 69 // `E`
exports.uppercaseF = 70 // `F`
exports.uppercaseG = 71 // `G`
exports.uppercaseH = 72 // `H`
exports.uppercaseI = 73 // `I`
exports.uppercaseJ = 74 // `J`
exports.uppercaseK = 75 // `K`
exports.uppercaseL = 76 // `L`
exports.uppercaseM = 77 // `M`
exports.uppercaseN = 78 // `N`
exports.uppercaseO = 79 // `O`
exports.uppercaseP = 80 // `P`
exports.uppercaseQ = 81 // `Q`
exports.uppercaseR = 82 // `R`
exports.uppercaseS = 83 // `S`
exports.uppercaseT = 84 // `T`
exports.uppercaseU = 85 // `U`
exports.uppercaseV = 86 // `V`
exports.uppercaseW = 87 // `W`
exports.uppercaseX = 88 // `X`
exports.uppercaseY = 89 // `Y`
exports.uppercaseZ = 90 // `Z`
exports.leftSquareBracket = 91 // `[`
exports.backslash = 92 // `\`
exports.rightSquareBracket = 93 // `]`
exports.caret = 94 // `^`
exports.underscore = 95 // `_`
exports.graveAccent = 96 // `` ` ``
exports.lowercaseA = 97 // `a`
exports.lowercaseB = 98 // `b`
exports.lowercaseC = 99 // `c`
exports.lowercaseD = 100 // `d`
exports.lowercaseE = 101 // `e`
exports.lowercaseF = 102 // `f`
exports.lowercaseG = 103 // `g`
exports.lowercaseH = 104 // `h`
exports.lowercaseI = 105 // `i`
exports.lowercaseJ = 106 // `j`
exports.lowercaseK = 107 // `k`
exports.lowercaseL = 108 // `l`
exports.lowercaseM = 109 // `m`
exports.lowercaseN = 110 // `n`
exports.lowercaseO = 111 // `o`
exports.lowercaseP = 112 // `p`
exports.lowercaseQ = 113 // `q`
exports.lowercaseR = 114 // `r`
exports.lowercaseS = 115 // `s`
exports.lowercaseT = 116 // `t`
exports.lowercaseU = 117 // `u`
exports.lowercaseV = 118 // `v`
exports.lowercaseW = 119 // `w`
exports.lowercaseX = 120 // `x`
exports.lowercaseY = 121 // `y`
exports.lowercaseZ = 122 // `z`
exports.leftCurlyBrace = 123 // `{`
exports.verticalBar = 124 // `|`
exports.rightCurlyBrace = 125 // `}`
exports.tilde = 126 // `~`
exports.del = 127
// Unicode Specials block.
exports.byteOrderMarker = 65279
// Unicode Specials block.
exports.replacementCharacter = 65533 // `�`<|fim▁end|> | exports.digit1 = 49 // `1`
exports.digit2 = 50 // `2`
exports.digit3 = 51 // `3`
exports.digit4 = 52 // `4` |
<|file_name|>goldsaxenginestart.py<|end_file_name|><|fim▁begin|><|fim▁hole|>/*email to provide support at [email protected], [email protected], For donations please write to [email protected]*/<|fim▁end|> | /*Owner & Copyrights: Vance King Saxbe. A.*/from GoldSaxEngineChinaMarkets import goldsaxenginechinamarkets
goldsaxenginechinamarkets.start()
|
<|file_name|>analytics.py<|end_file_name|><|fim▁begin|>import asyncio
import collections
import logging
import aiohttp
import typing
from lbry import utils
from lbry.conf import Config<|fim▁hole|>from lbry.extras import system_info
ANALYTICS_ENDPOINT = 'https://api.segment.io/v1'
ANALYTICS_TOKEN = 'Ax5LZzR1o3q3Z3WjATASDwR5rKyHH0qOIRIbLmMXn2H='
# Things We Track
SERVER_STARTUP = 'Server Startup'
SERVER_STARTUP_SUCCESS = 'Server Startup Success'
SERVER_STARTUP_ERROR = 'Server Startup Error'
DOWNLOAD_STARTED = 'Download Started'
DOWNLOAD_ERRORED = 'Download Errored'
DOWNLOAD_FINISHED = 'Download Finished'
HEARTBEAT = 'Heartbeat'
CLAIM_ACTION = 'Claim Action' # publish/create/update/abandon
NEW_CHANNEL = 'New Channel'
CREDITS_SENT = 'Credits Sent'
UPNP_SETUP = "UPnP Setup"
BLOB_BYTES_UPLOADED = 'Blob Bytes Uploaded'
TIME_TO_FIRST_BYTES = "Time To First Bytes"
log = logging.getLogger(__name__)
def _event_properties(installation_id: str, session_id: str,
event_properties: typing.Optional[typing.Dict]) -> typing.Dict:
properties = {
'lbry_id': installation_id,
'session_id': session_id,
}
properties.update(event_properties or {})
return properties
def _download_properties(conf: Config, external_ip: str, resolve_duration: float,
total_duration: typing.Optional[float], download_id: str, name: str,
outpoint: str, active_peer_count: int, tried_peers_count: int,
added_fixed_peers: bool, fixed_peer_delay: float, sd_hash: str,
sd_download_duration: typing.Optional[float] = None,
head_blob_hash: typing.Optional[str] = None,
head_blob_length: typing.Optional[int] = None,
head_blob_download_duration: typing.Optional[float] = None,
error: typing.Optional[str] = None) -> typing.Dict:
return {
"external_ip": external_ip,
"download_id": download_id,
"total_duration": round(total_duration, 4),
"resolve_duration": None if not resolve_duration else round(resolve_duration, 4),
"error": error,
'name': name,
"outpoint": outpoint,
"node_rpc_timeout": conf.node_rpc_timeout,
"peer_connect_timeout": conf.peer_connect_timeout,
"blob_download_timeout": conf.blob_download_timeout,
"use_fixed_peers": len(conf.reflector_servers) > 0,
"fixed_peer_delay": fixed_peer_delay,
"added_fixed_peers": added_fixed_peers,
"active_peer_count": active_peer_count,
"tried_peers_count": tried_peers_count,
"sd_blob_hash": sd_hash,
"sd_blob_duration": None if not sd_download_duration else round(sd_download_duration, 4),
"head_blob_hash": head_blob_hash,
"head_blob_length": head_blob_length,
"head_blob_duration": None if not head_blob_download_duration else round(head_blob_download_duration, 4)
}
def _make_context(platform):
# see https://segment.com/docs/spec/common/#context
# they say they'll ignore fields outside the spec, but evidently they don't
context = {
'app': {
'version': platform['lbrynet_version'],
'build': platform['build'],
},
# TODO: expand os info to give linux/osx specific info
'os': {
'name': platform['os_system'],
'version': platform['os_release']
},
}
if 'desktop' in platform and 'distro' in platform:
context['os']['desktop'] = platform['desktop']
context['os']['distro'] = platform['distro']
return context
class AnalyticsManager:
def __init__(self, conf: Config, installation_id: str, session_id: str):
self.conf = conf
self.cookies = {}
self.url = ANALYTICS_ENDPOINT
self._write_key = utils.deobfuscate(ANALYTICS_TOKEN)
self._enabled = conf.share_usage_data
self._tracked_data = collections.defaultdict(list)
self.context = _make_context(system_info.get_platform())
self.installation_id = installation_id
self.session_id = session_id
self.task: asyncio.Task = None
self.external_ip: typing.Optional[str] = None
@property
def is_started(self):
return self.task is not None
async def start(self):
if self._enabled and self.task is None:
self.external_ip = await utils.get_external_ip()
self.task = asyncio.create_task(self.run())
async def run(self):
while True:
await self._send_heartbeat()
await asyncio.sleep(1800)
self.external_ip = await utils.get_external_ip()
def stop(self):
if self.task is not None and not self.task.done():
self.task.cancel()
async def _post(self, data: typing.Dict):
request_kwargs = {
'method': 'POST',
'url': self.url + '/track',
'headers': {'Connection': 'Close'},
'auth': aiohttp.BasicAuth(self._write_key, ''),
'json': data,
'cookies': self.cookies
}
try:
async with utils.aiohttp_request(**request_kwargs) as response:
self.cookies.update(response.cookies)
except Exception as e:
log.debug('Encountered an exception while POSTing to %s: ', self.url + '/track', exc_info=e)
async def track(self, event: typing.Dict):
"""Send a single tracking event"""
if self._enabled:
log.debug('Sending track event: %s', event)
await self._post(event)
async def send_upnp_setup_success_fail(self, success, status):
await self.track(
self._event(UPNP_SETUP, {
'success': success,
'status': status,
})
)
async def send_server_startup(self):
await self.track(self._event(SERVER_STARTUP))
async def send_server_startup_success(self):
await self.track(self._event(SERVER_STARTUP_SUCCESS))
async def send_server_startup_error(self, message):
await self.track(self._event(SERVER_STARTUP_ERROR, {'message': message}))
async def send_time_to_first_bytes(self, resolve_duration: typing.Optional[float],
total_duration: typing.Optional[float], download_id: str,
name: str, outpoint: str, found_peers_count: int,
tried_peers_count: int, added_fixed_peers: bool,
fixed_peers_delay: float, sd_hash: str,
sd_download_duration: typing.Optional[float] = None,
head_blob_hash: typing.Optional[str] = None,
head_blob_length: typing.Optional[int] = None,
head_blob_duration: typing.Optional[int] = None,
error: typing.Optional[str] = None):
await self.track(self._event(TIME_TO_FIRST_BYTES, _download_properties(
self.conf, self.external_ip, resolve_duration, total_duration, download_id, name, outpoint,
found_peers_count, tried_peers_count, added_fixed_peers, fixed_peers_delay, sd_hash,
sd_download_duration, head_blob_hash, head_blob_length, head_blob_duration, error
)))
async def send_download_finished(self, download_id, name, sd_hash):
await self.track(
self._event(
DOWNLOAD_FINISHED, {
'download_id': download_id,
'name': name,
'stream_info': sd_hash
}
)
)
async def send_claim_action(self, action):
await self.track(self._event(CLAIM_ACTION, {'action': action}))
async def send_new_channel(self):
await self.track(self._event(NEW_CHANNEL))
async def send_credits_sent(self):
await self.track(self._event(CREDITS_SENT))
async def _send_heartbeat(self):
await self.track(self._event(HEARTBEAT))
def _event(self, event, properties: typing.Optional[typing.Dict] = None):
return {
'userId': 'lbry',
'event': event,
'properties': _event_properties(self.installation_id, self.session_id, properties),
'context': self.context,
'timestamp': utils.isonow()
}<|fim▁end|> | |
<|file_name|>resetpassword.js<|end_file_name|><|fim▁begin|>var mysql = require('mysql');
var bcrypt = require('bcryptjs');
var connection = mysql.createConnection({
host: process.env.FOLIO_HOST,
user: process.env.FOLIO_USER,
database: process.env.FOLIO_DATABASE,
password: process.env.FOLIO_PASSWORD
});
console.log("[*] Database connection open")
console.log("[*] Resetting username and password");
connection.query("TRUNCATE users;");
console.log("[*] Creating default user, admin (username), password (password)");
var passwordhash = bcrypt.hashSync("password", 10);<|fim▁hole|>
connection.end();
console.log("[*] Finished!");<|fim▁end|> | console.log("\t[>] Password hash: " + passwordhash);
connection.query("INSERT INTO users(username, passwordhash) values('admin', '" + passwordhash + "');"); |
<|file_name|>MediaTypeCache.java<|end_file_name|><|fim▁begin|>/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.tomcat.util.http.parser;
import java.io.IOException;
import java.io.StringReader;
import org.apache.tomcat.util.collections.ConcurrentCache;
/**
* Caches the results of parsing content-type headers.
*/
public class MediaTypeCache {
private final ConcurrentCache<String,String[]> cache;
public MediaTypeCache(int size) {
cache = new ConcurrentCache<>(size);
<|fim▁hole|> /**
* Looks in the cache and returns the cached value if one is present. If no
* match exists in the cache, a new parser is created, the input parsed and
* the results placed in the cache and returned to the user.
*
* @param input The content-type header value to parse
* @return The results are provided as a two element String array. The
* first element is the media type less the charset and
* the second element is the charset
*/
public String[] parse(String input) {
String[] result = cache.get(input);
if (result != null) {
return result;
}
MediaType m = null;
try {
m = MediaType.parseMediaType(new StringReader(input));
} catch (IOException e) {
// Ignore - return null
}
if (m != null) {
result = new String[] {m.toStringNoCharset(), m.getCharset()};
cache.put(input, result);
}
return result;
}
}<|fim▁end|> | }
|
<|file_name|>group-forms-csv.component.ts<|end_file_name|><|fim▁begin|>import { Breadcrumb } from './../../shared/_components/breadcrumb/breadcrumb.component';
import { Component, OnInit, AfterViewInit, ElementRef, ViewChild } from '@angular/core';
import { ActivatedRoute, Router } from '@angular/router';
import { GroupsService } from '../services/groups.service';
import { UserService } from '../../core/auth/_services/user.service';
import { HttpClient } from '@angular/common/http';
import { WindowRef } from 'src/app/core/window-ref.service';
import { TangerineFormsService } from '../services/tangerine-forms.service';
import { _TRANSLATE } from 'src/app/shared/_services/translation-marker';
import { TangyErrorHandler } from 'src/app/shared/_services/tangy-error-handler.service';
import { ServerConfigService } from 'src/app/shared/_services/server-config.service';
@Component({
selector: 'app-group-forms-csv',
templateUrl: './group-forms-csv.component.html',
styleUrls: ['./group-forms-csv.component.css']
})
export class GroupFormsCsvComponent implements OnInit, AfterViewInit {
title = _TRANSLATE('Download CSVs')
breadcrumbs:Array<Breadcrumb> = []
forms;
groupId;
group;
groupLabel;
responses;
selectedTabIndex;
enabledModules;
copyFormId;
archivedForms;
activeForms;
groupUrl;
formsJsonURL;
@ViewChild('copyFormOverlay', {static: true}) copyFormOverlay: ElementRef;
constructor(
private route: ActivatedRoute,
private windowRef: WindowRef,
private groupsService: GroupsService,
private userService: UserService,
private tangerineForms: TangerineFormsService,
private errorHandler: TangyErrorHandler,
private serverConfig: ServerConfigService,
private router: Router,
private http: HttpClient
) { }
async ngOnInit() {
this.breadcrumbs = [
<Breadcrumb>{
label: _TRANSLATE('Download CSVs'),
url: 'download-csv'
}
]
this.route.params.subscribe(async params => {<|fim▁hole|> this.groupId = params.groupId;
this.group = await this.groupsService.getGroupInfo(this.groupId);
this.groupLabel = this.group.label;
this.formsJsonURL = `./forms.json`;
});
try {
await this.getForms();
this.groupUrl = `${this.windowRef.nativeWindow.location.origin}${this.windowRef.nativeWindow.location.pathname}`;
} catch (error) {
this.errorHandler.handleError(_TRANSLATE('Could Not Contact Server.'));
}
}
async ngAfterViewInit() {
// This is needed to ensure angular binds to selected Tab. The settimeout does the trick
const config = await this.serverConfig.getServerConfig()
this.enabledModules = config.enabledModules;
}
async getForms() {
const config = await this.serverConfig.getServerConfig()
const appendedForms = [
{id: 'participant',title:_TRANSLATE('Participant')},
{id: 'event-form',title:_TRANSLATE('Event Form')},
{id: 'case-event',title: _TRANSLATE('Case Event')}];
this.forms = (await this.tangerineForms.getFormsInfo(this.groupId)).map(formInfo => ({
...formInfo,
printUrl: `${this.windowRef.nativeWindow.location.origin}${this.windowRef.nativeWindow.location.pathname}/#/tangy-form-editor/${this.groupId}/${formInfo.id}/print`
}));;
if(config.enabledModules.includes('case')){
this.forms = [...this.forms, ...appendedForms]
}
this.activeForms = this.forms.filter(form => !form.archived);
this.archivedForms = this.forms.filter(form => form.archived);
}
}<|fim▁end|> | |
<|file_name|>asyncio.py<|end_file_name|><|fim▁begin|># -*- coding: iso-8859-1 -*-
ur"""AsyncIO objects wrap the Win32 Overlapped API. They are instantiated by
passing a handle which has been opened for Overlapped IO. They can be waited
on by the functions in the :mod:`ipc` module and are True when complete,
False otherwise.
"""
import pywintypes
import winerror
import win32event
import win32file
from winsys import constants, core, exc, ipc, utils
class x_asyncio (exc.x_winsys):
pass
WINERROR_MAP = {
}
wrapped = exc.wrapper (WINERROR_MAP, x_asyncio)
class AsyncIO (core._WinSysObject):
def __init__ (self):
core._WinSysObject.__init__ (self)
self.event = ipc.event (needs_manual_reset=True)
self.overlapped = wrapped (win32file.OVERLAPPED)
self.overlapped.hEvent = self.event.pyobject ()
<|fim▁hole|> def pyobject (self):
ur"""Return the pyobject of the underlying event so that this object can
be waited on by the :func:`ipc.all` or :func:`ipc.any` functions
"""
return self.event.pyobject ()
def is_complete (self):
ur":returns: `True` if the IO has completed"
return self.event.isSet ()
__nonzero__ = is_complete
def wait (self):
ur"""Wait for the IO to complete in such a way that the wait can
be interrupted by a KeyboardInterrupt.
"""
while not self.event.wait (timeout_s=0.5):
pass
class AsyncHandler (AsyncIO):
BUFFER_SIZE = 4096
def __init__ (self, handle, buffer_size=BUFFER_SIZE):
AsyncIO.__init__ (self)
self.handle = handle
class AsyncWriter (AsyncHandler):
def __init__ (self, handle, data):
AsyncHandler.__init__ (self, handle)
self.data = data
wrapped (win32file.WriteFile, self.handle, data, self.overlapped)
class AsyncReader (AsyncHandler):
BUFFER_SIZE = 4096
def __init__ (self, handle):
AsyncHandler.__init__ (self, handle)
self.buffer = win32file.AllocateReadBuffer (self.BUFFER_SIZE)
wrapped (win32file.ReadFile, self.handle, self.buffer, self.overlapped)
def data (self):
ur"""Wait until the IO has completed and return the data from the read. This
is expected to be called after is_complete is true.
"""
n_bytes = win32file.GetOverlappedResult (self.handle, self.overlapped, True)
return str (self.buffer)[:n_bytes]<|fim▁end|> | |
<|file_name|>HeapPerformanceTest.java<|end_file_name|><|fim▁begin|>/*
* This file is part of ELKI:
* Environment for Developing KDD-Applications Supported by Index-Structures
*
* Copyright (C) 2019
* ELKI Development Team
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package elki.utilities.datastructures.heap;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.PriorityQueue;
import java.util.Queue;
import java.util.Random;
import org.junit.Ignore;
import org.junit.Test;
/**
* Unit test to ensure that our heap is not significantly worse than SUN javas
* regular PriorityQueue.
*
* @author Erich Schubert
* @since 0.7.0
*/
public class HeapPerformanceTest {
private final int queueSize = 200000;
private final int preiterations = 20;
private final int iterations = 200;
private final long seed = 123456L;
@Ignore
@Test
public void testRuntime() throws Exception {
// prepare the data set
final List<Integer> elements = new ArrayList<>(queueSize);
{
final Random random = new Random(seed);
for(int i = 0; i < queueSize; i++) {
elements.add(i);
}
Collections.shuffle(elements, random);
}
// Pretest, to trigger hotspot compiler, hopefully.
{
for(int j = 0; j < preiterations; j++) {
ComparableMinHeap<Integer> pq = new ComparableMinHeap<>();
testHeap(elements, pq);
}
for(int j = 0; j < preiterations; j++) {
PriorityQueue<Integer> pq = new PriorityQueue<>();
testQueue(elements, pq);
}
}
long pqstart = System.nanoTime();
{
for(int j = 0; j < iterations; j++) {
PriorityQueue<Integer> pq = new PriorityQueue<>();
testQueue(elements, pq);
}
}
long pqtime = System.nanoTime() - pqstart;
long hstart = System.nanoTime();
{
for(int j = 0; j < iterations; j++) {
ComparableMinHeap<Integer> pq = new ComparableMinHeap<>();
testHeap(elements, pq);
}
}
long htime = System.nanoTime() - hstart;
// System.err.println("Heap performance test: us: " + htime*1E-9 + " java: " + pqtime*1E-9);
assertTrue("Heap performance regression - run test individually, since the hotspot optimizations may make the difference! " + htime + " >>= " + pqtime, htime < 1.1 * pqtime);
// 1.1 allows some difference in measuring, which can occur e.g. due to Jacoco instrumentation
}
private void testHeap(final List<Integer> elements, ComparableMinHeap<Integer> pq) {
// Insert all
for(int i = 0; i < elements.size(); i++) {
pq.add(elements.get(i));
}
// Poll first half.
final int half = elements.size() >> 1;
for(int i = 0; i < half; i++) {
assertEquals((int) pq.poll(), i);
// assertEquals((int) pq.poll(), queueSize - 1 - i);
}
assertEquals("Heap not half-empty?", elements.size() - half, pq.size());
pq.clear();
}
private void testQueue(final List<Integer> elements, Queue<Integer> pq) {
// Insert all
for(int i = 0; i < elements.size(); i++) {<|fim▁hole|> // Poll first half.
final int half = elements.size() >> 1;
for(int i = 0; i < half; i++) {
assertEquals((int) pq.poll(), i);
// assertEquals((int) pq.poll(), queueSize - 1 - i);
}
assertEquals("Heap not half-empty?", elements.size() - half, pq.size());
pq.clear();
}
}<|fim▁end|> | pq.add(elements.get(i));
} |
<|file_name|>config.py<|end_file_name|><|fim▁begin|>import os
basedir = os.path.abspath(os.path.dirname(__file__))
WTF_CSRF_ENABLED = True
SECRET_KEY = '33stanlake#'
DEBUG = True
APP_TITLE = 'Cloud of Reproducible Records API'
VERSION = '0.1-dev'<|fim▁hole|> 'db': 'corr-production',
'host': '0.0.0.0',
'port': 27017
}
# STORMPATH_API_KEY_FILE = '~/.stormpath/apiKey.properties'
# STORMPATH_APPLICATION = 'sumatra-cloud'
# STORMPATH_REDIRECT_URL = '/dashboard'<|fim▁end|> |
MONGODB_SETTINGS = { |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>module.exports = function(sails) {
var Agenda = require('agenda'),
util = require('util'),
_ = require('lodash'),
os = require("os"),
agenda = new Agenda()
agenda.sails = sails;
var stopServer = function() {
agenda.stop(function() {
console.log("agenda stopped");
});
};
sails.on("lower", stopServer);
sails.on("lowering", stopServer);
// return hook
return {
// expose agenda in sails.hooks.jobs.agenda
jobs: agenda,
// Defaults config
defaults: {
jobs: {
"globalJobsObjectName": "Jobs",
"jobsDirectory": "api/jobs",
"db": {
"address" : "localhost:27017/jobs",
"collection" : "agendaJobs"
},
"name": os.hostname() + '-' + process.pid,
"processEvery": "1 minutes",
"maxConcurrency": 20,
"defaultConcurrency": 5,
"defaultLockLifetime": 10000,
}
},
// Runs automatically when the hook initializes
initialize: function (cb) {
var hook = this
, config = sails.config.jobs
// init agenda
agenda
.database(config.db.address, config.db.collection)
.name(config.name)
.processEvery(config.processEvery)
.maxConcurrency(config.maxConcurrency)
.defaultConcurrency(config.defaultConcurrency)
.defaultLockLifetime(config.defaultLockLifetime)
global[config.globalJobsObjectName] = agenda;
// Enable jobs using coffeescript
try {
require('coffee-script/register');
} catch(e0) {
try {
var path = require('path');
var appPath = sails.config.appPath || process.cwd();
require(path.join(appPath, 'node_modules/coffee-script/register'));
} catch(e1) {
sails.log.verbose('Please run `npm install coffee-script` to use coffescript (skipping for now)');
}
}
<|fim▁hole|> dirname : sails.config.appPath + '/' + config.jobsDirectory,
filter : /(.+Job).(?:js|coffee)$/,
excludeDirs : /^\.(git|svn)$/,
optional : true
});
// init jobs
hook.initJobs(jobs);
// Lets wait on some of the sails core hooks to
// finish loading before we load our hook
// that talks about cats.
var eventsToWaitFor = [];
if (sails.hooks.orm)
eventsToWaitFor.push('hook:orm:loaded');
if (sails.hooks.pubsub)
eventsToWaitFor.push('hook:pubsub:loaded');
sails.after(eventsToWaitFor, function(){
// if (jobs.length > 0) {
// start agenda
agenda.start();
sails.log.verbose("sails jobs started")
// }
// Now we will return the callback and our hook
// will be usable.
return cb();
});
},
/**
* Function that initialize jobs
*/
initJobs: function(jobs, namespace) {
var hook = this
if (!namespace) namespace = "jobs";
sails.log.verbose("looking for job in " + namespace + "... ")
_.forEach(jobs, function(job, name){
if (typeof job === 'function') {
var log = ""
, _job = job(agenda)
, _dn = namespace + "." + name
, _name = _job.name || _dn.substr(_dn.indexOf('.') +1);
if (_job.disabled) {
log += "-> Disabled Job '" + _name + "' found in '" + namespace + "." + name + "'.";
} else {
var options = (typeof _job.options === 'object')?_job.options:{}
, freq = typeof _job.frequency == 'undefined'?sails.config.jobs.processEvery:_job.frequency
, error = false;
if (typeof _job.run === "function")
agenda.define(_name, options, _job.run);
log += "-> Job '" + _name + "' found in '" + namespace + "." + name + "', defined in agenda";
if (typeof freq === 'string') {
freq = freq.trim().toLowerCase();
if (freq.indexOf('every') == 0) {
var interval = freq.substr(6).trim();
agenda.every(interval, _name, _job.data);
log += " and will run " + freq;
} else if (freq.indexOf('schedule') == 0) {
var when = freq.substr(9).trim();
agenda.schedule(when, _name, _job.data);
log += " and scheduled " + when;
} else if (freq === 'now') {
agenda.now(_name, _job.data);
log += " and started";
} else {
error = true;
log += " but frequency is not supported";
}
}
}
log += ".";
if (error) sails.log.error(log);
else sails.log.verbose(log);
} else {
hook.initJobs(job, namespace + "." + name);
}
})
}
}
};<|fim▁end|> | // Find all jobs
var jobs = require('include-all')({ |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from action import *
from conn import *
from logic import *
from log import *
from protocol import *
from schedule import *
from server import *<|fim▁hole|><|fim▁end|> | from util import * |
<|file_name|>deployment.go<|end_file_name|><|fim▁begin|>package context
import (
"github.com/Everlane/evan/common"
"github.com/satori/go.uuid"
)
// Stores state relating to a deployment.
type Deployment struct {
uuid uuid.UUID
application common.Application
environment string
strategy common.Strategy
ref string
sha1 string
flags map[string]interface{}
store common.Store
// Internal state
currentState common.DeploymentState
currentPhase common.Phase
lastError error
}
// Create a deployment for the given application to an environment.
func NewDeployment(app common.Application, environment string, strategy common.Strategy, ref string, flags map[string]interface{}) *Deployment {
return &Deployment{
uuid: uuid.NewV1(),
application: app,
environment: environment,
strategy: strategy,
ref: ref,
flags: flags,
currentState: common.DEPLOYMENT_PENDING,
}
}
func NewBareDeployment() *Deployment {
return &Deployment{
flags: make(map[string]interface{}),
}
}
func (deployment *Deployment) UUID() uuid.UUID {
return deployment.uuid
}
func (deployment *Deployment) Application() common.Application {
return deployment.application
}
func (deployment *Deployment) Environment() string {
return deployment.environment
}
func (deployment *Deployment) Strategy() common.Strategy {
return deployment.strategy
}
func (deployment *Deployment) Ref() string {
return deployment.ref
}
func (deployment *Deployment) SHA1() string {
return deployment.sha1
}
func (deployment *Deployment) SetSHA1(sha1 string) {
deployment.sha1 = sha1
}
func (deployment *Deployment) MostPreciseRef() string {
if deployment.sha1 != "" {
return deployment.sha1
} else {
return deployment.ref
}
}
func (deployment *Deployment) SetStoreAndSave(store common.Store) error {
deployment.store = store
return store.SaveDeployment(deployment)
}
// Will panic if it is unable to save. This will be called *after*
// `SetStoreAndSave` should have been called, so we're assuming that if that
// worked then this should also work.
func (deployment *Deployment) setStateAndSave(state common.DeploymentState) {
deployment.currentState = state
err := deployment.store.SaveDeployment(deployment)
if err != nil {
panic(err)
}
}
func (deployment *Deployment) Flags() map[string]interface{} {
return deployment.flags
}
func (deployment *Deployment) HasFlag(key string) bool {
_, present := deployment.flags[key]
return present
}
func (deployment *Deployment) Flag(key string) interface{} {
return deployment.flags[key]
}
func (deployment *Deployment) SetFlag(key string, value interface{}) {
deployment.flags[key] = value
}
// Looks for the "force" boolean in the `flags`.
func (deployment *Deployment) IsForce() bool {
if force, ok := deployment.Flag("force").(bool); ok {
return force
} else {
return false
}
}
func (deployment *Deployment) Status() common.DeploymentStatus {
var phase common.Phase
if deployment.currentState == common.RUNNING_PHASE {
phase = deployment.currentPhase
}
return common.DeploymentStatus{
State: deployment.currentState,
Phase: phase,
Error: nil,
}
}
func (deployment *Deployment) CheckPreconditions() error {
deployment.setStateAndSave(common.RUNNING_PRECONDITIONS)<|fim▁hole|> if err != nil {
return err
}
}
return nil
}
// Internal implementation of running phases. Manages setting
// `deployment.currentPhase` to the phase currently executing.
func (deployment *Deployment) runPhases(preloadResults PreloadResults) error {
phases := deployment.strategy.Phases()
for _, phase := range phases {
deployment.currentPhase = phase
preloadResult := preloadResults.Get(phase)
err := phase.Execute(deployment, preloadResult)
if err != nil {
return err
}
}
return nil
}
// Runs all the phases configured in the `Strategy`. Sets `currentState` and
// `currentPhase` fields as appropriate. If an error occurs it will also set
// the `lastError` field to that error.
func (deployment *Deployment) RunPhases() error {
results, err := deployment.RunPhasePreloads()
if err != nil {
deployment.lastError = err
deployment.setStateAndSave(common.DEPLOYMENT_ERROR)
return err
}
deployment.setStateAndSave(common.RUNNING_PHASE)
err = deployment.runPhases(results)
if err != nil {
deployment.lastError = err
deployment.setStateAndSave(common.DEPLOYMENT_ERROR)
return err
} else {
deployment.setStateAndSave(common.DEPLOYMENT_DONE)
return nil
}
}
type preloadResult struct {
data interface{}
err error
}
type PreloadResults map[common.Phase]interface{}
func (results PreloadResults) Get(phase common.Phase) interface{} {
return results[phase]
}
func (results PreloadResults) Set(phase common.Phase, data interface{}) {
results[phase] = data
}
// Phases can expose preloads to gather any additional information they may
// need before executing. This will run those preloads in parallel.
func (deployment *Deployment) RunPhasePreloads() (PreloadResults, error) {
preloadablePhases := make([]common.PreloadablePhase, 0)
for _, phase := range deployment.strategy.Phases() {
if phase.CanPreload() {
preloadablePhases = append(preloadablePhases, phase.(common.PreloadablePhase))
}
}
resultChan := make(chan preloadResult)
for _, phase := range preloadablePhases {
go func() {
data, err := phase.Preload(deployment)
resultChan <- preloadResult{data: data, err: err}
}()
}
results := make(PreloadResults)
for _, phase := range preloadablePhases {
result := <-resultChan
if result.err != nil {
return nil, result.err
} else {
results.Set(phase.(common.Phase), result.data)
}
}
return results, nil
}<|fim▁end|> |
preconditions := deployment.strategy.Preconditions()
for _, precondition := range preconditions {
err := precondition.Status(deployment) |
<|file_name|>testing-topo.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
"Assignment 5 - This defines a topology for running a firewall. It is not \
necessarily the topology that will be used for grading, so feel free to \
edit and create new topologies and share them."
from mininet.topo import Topo
from mininet.net import Mininet
from mininet.node import CPULimitedHost, RemoteController
from mininet.util import custom
from mininet.link import TCLink
from mininet.cli import CLI
class FWTopo(Topo):
''' Creates the following topoplogy:
e1 e2 e3<|fim▁hole|> | | |
w1 w2 w3
'''
def __init__(self, cpu=.1, bw=10, delay=None, **params):
super(FWTopo,self).__init__()
# Host in link configuration
hconfig = {'cpu': cpu}
lconfig = {'bw': bw, 'delay': delay}
# Create the firewall switch
s1 = self.addSwitch('s1')
# Create East hosts and links)
e1 = self.addHost('e1', **hconfig)
e2 = self.addHost('e2', **hconfig)
e3 = self.addHost('e3', **hconfig)
self.addLink(s1, e1, port1=1, port2=1, **lconfig)
self.addLink(s1, e2, port1=2, port2=1, **lconfig)
self.addLink(s1, e3, port1=3, port2=1, **lconfig)
# Create West hosts and links)
w1 = self.addHost('w1', **hconfig)
w2 = self.addHost('w2', **hconfig)
w3 = self.addHost('w3', **hconfig)
self.addLink(s1, w1, port1=4, port2=1, **lconfig)
self.addLink(s1, w2, port1=5, port2=1, **lconfig)
self.addLink(s1, w3, port1=6, port2=1, **lconfig)
def main():
print "Starting topology"
topo = FWTopo()
net = Mininet(topo=topo, link=TCLink, controller=RemoteController, autoSetMacs=True)
net.start()
try:
from unit_tests import run_tests
raw_input('Unit tests to be run next. Make sure your firewall is running, then press a key')
run_tests(net)
except ImportError:
raise
CLI(net)
if __name__ == '__main__':
main()<|fim▁end|> | | | |
\ | /
firwall (s1)
/ | \ |
<|file_name|>4chan.py<|end_file_name|><|fim▁begin|>import re
import json
import requests<|fim▁hole|>
from util import irc
from util.handler_utils import cmdhook, authenticate, get_target
from qtbot3_common.types.message import Message
def scrape(board: str, filtertext: str):
try:
data = requests.get("http://boards.4chan.org/{board}/catalog".format(board=board)).text
match = re.match(".*var catalog = (?P<catalog>\{.*\});.*", data)
if not match:
print("Couldn't scrape catalog")
catalog = json.loads(match.group('catalog'))
for number, thread in catalog['threads'].items():
sub, teaser = thread['sub'], thread['teaser']
if filtertext in sub.lower() or filtertext in teaser.lower():
yield(number, thread)
except Exception as ex:
print("scraping exception:", ex)
@cmdhook('4chan (?P<board>[^\s]+) (?P<filtertext>.+)')
@authenticate
def handle_scrape(message: Message, match, nick: str):
board = match['board']
filtertext = match['filtertext']
print("searching 4chan's {board} board for {filtertext}...".format(**match))
baseurl = "http://boards.4chan.org/{board}/thread/{number}/{semantic_url}"
lines = []
for number, thread in scrape(board, filtertext):
title = (thread['sub'] + ': ' + baseurl).format(number=number, board=board, **thread)
lines.append(title + ' - ' + thread['teaser'])
target = get_target(message, nick)
return [irc.chat_message(target, line) for line in lines[:3]]<|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Copyright 2016 Amazon Web Services, Inc. or its affiliates. All Rights Reserved.
# This file is licensed to you under the AWS Customer Agreement (the "License").
# You may not use this file except in compliance with the License.
# A copy of the License is located at http://aws.amazon.com/agreement/ .
# This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, express or implied.
# See the License for the specific language governing permissions and limitations under the License.
from botocore.vendored import requests
import json
SUCCESS = "SUCCESS"
FAILED = "FAILED"
def send(event, context, responseStatus, responseData, physicalResourceId=None):
responseUrl = event['ResponseURL']
print responseUrl
responseBody = {}
responseBody['Status'] = responseStatus
responseBody['Reason'] = 'See the details in CloudWatch Log Stream: ' + context.log_stream_name
responseBody['PhysicalResourceId'] = physicalResourceId or context.log_stream_name
responseBody['StackId'] = event['StackId']
responseBody['RequestId'] = event['RequestId']
responseBody['LogicalResourceId'] = event['LogicalResourceId']
responseBody['Data'] = responseData
<|fim▁hole|> headers = {
'content-type' : '',
'content-length' : str(len(json_responseBody))
}
try:
response = requests.put(responseUrl,
data=json_responseBody,
headers=headers)
print "Status code: " + response.reason
except Exception as e:
print "send(..) failed executing requests.put(..): " + str(e)<|fim▁end|> | json_responseBody = json.dumps(responseBody)
print "Response body:\n" + json_responseBody
|
<|file_name|>Heap.py<|end_file_name|><|fim▁begin|>class Heap(object):
def __init__(self, data=[]):
if len(data) == 0:
self.data = [None] * 100
else:
self.data = data
self.__size = sum([1 if item is not None else 0 for item in self.data])
self.__heapify()
def size(self):
return self.__size
def empty(self):
return self.__size == 0
def get_max(self):
return self.data[0]
def delete_max(self):
max_data = self.data[0]
self.__swap(0, self.__size - 1)
self.data[self.__size - 1] = None
self.__size -= 1
self.__percolate_down(0)
return max_data
def insert(self, number):
if self.__size == len(self.data):
self.__expand()
self.__size += 1
self.data[self.__size - 1] = number
return self.__percolate_up(self.__size - 1)
@staticmethod
def heap_sort(data):
heap = Heap(data)
index = heap.size() - 1
while not heap.empty():
heap.data[index] = heap.delete_max()
index -= 1
return heap.data
def __percolate_down(self, i):
initial_value = self.data[i]
current_index = i
potential_parent = self.__proper_parent(current_index)
while self.data[potential_parent] > self.data[current_index]:
self.data[current_index] = self.data[potential_parent]
current_index = potential_parent
potential_parent = self.__proper_parent(current_index)
self.data[current_index] = initial_value
return current_index
def __percolate_up(self, i):
if not self.__has_parent(i):
return 0
initial_value = self.data[i]
parent_indexes = []
<|fim▁hole|> parent_indexes.append(current_index)
h += 1
lo = 0
hi = len(parent_indexes) - 1
while lo + 1 < hi:
mi = (lo + hi) / 2
if self.data[parent_indexes[mi]] <= self.data[i]:
lo = mi
else:
hi = mi
parent_indexes.insert(0, i)
lo = lo + 1
index = 0
while index < lo:
self.data[parent_indexes[index]] = self.data[parent_indexes[index + 1]]
index += 1
self.data[parent_indexes[lo]] = initial_value
return parent_indexes[lo]
def __expand(self):
new_data = [None] * (self.__size * 2)
for i in range(self.__size):
new_data[i] = self.data[i]
self.data = new_data
def __heapify(self):
i = self.__last_internal()
while self.__in_heap(i):
self.__percolate_down(i)
i -= 1
def __swap(self, i , j):
temp = self.data[i]
self.data[i] = self.data[j]
self.data[j] = temp
def __in_heap(self, i):
return 0 <= i < self.size()
def __parent(self, i):
return (i - 1) >> 1
def __last_internal(self):
return self.__parent(self.size() - 1)
def __left_child(self, i):
return (i << 1) + 1
def __right_child(self, i):
return (i + 1) << 1
def __has_parent(self, i):
return 0 < i
def __has_left_child(self, i):
return self.__in_heap(self.__left_child(i))
def __has_right_child(self, i):
return self.__in_heap(self.__right_child(i))
def __bigger(self, i, j):
return i if self.data[i] > self.data[j] else j
def __proper_parent(self, i):
return self.__bigger(self.__bigger(self.__left_child(i), self.__right_child(i)), i) if self.__has_right_child(i) else \
self.__bigger(self.__left_child(i), i) if self.__has_left_child(i) else \
i<|fim▁end|> | h = 1
current_index = i
while self.__has_parent(current_index):
current_index = ((i + 1) >> h) - 1 |
<|file_name|>gridMaterial.ts<|end_file_name|><|fim▁begin|>import { serializeAsTexture, serialize, expandToProperty, serializeAsColor3, SerializationHelper } from "babylonjs/Misc/decorators";
import { Matrix, Vector4, Vector3 } from "babylonjs/Maths/math.vector";
import { Color3 } from "babylonjs/Maths/math.color";
import { BaseTexture } from "babylonjs/Materials/Textures/baseTexture";
import { MaterialDefines } from "babylonjs/Materials/materialDefines";
import { MaterialHelper } from "babylonjs/Materials/materialHelper";
import { PushMaterial } from "babylonjs/Materials/pushMaterial";
import { MaterialFlags } from "babylonjs/Materials/materialFlags";
import { VertexBuffer } from "babylonjs/Meshes/buffer";
import { AbstractMesh } from "babylonjs/Meshes/abstractMesh";
import { SubMesh } from "babylonjs/Meshes/subMesh";
import { Mesh } from "babylonjs/Meshes/mesh";
import { Scene } from "babylonjs/scene";
import { _TypeStore } from 'babylonjs/Misc/typeStore';
import "./grid.fragment";
import "./grid.vertex";
class GridMaterialDefines extends MaterialDefines {
public OPACITY = false;
public TRANSPARENT = false;
public FOG = false;
public PREMULTIPLYALPHA = false;
public UV1 = false;
public UV2 = false;
public INSTANCES = false;
public THIN_INSTANCES = false;
constructor() {
super();
this.rebuild();
}
}
/**
* The grid materials allows you to wrap any shape with a grid.
* Colors are customizable.
*/
export class GridMaterial extends PushMaterial {
/**
* Main color of the grid (e.g. between lines)
*/
@serializeAsColor3()
public mainColor = Color3.Black();
/**
* Color of the grid lines.
*/
@serializeAsColor3()
public lineColor = Color3.Teal();
/**
* The scale of the grid compared to unit.
*/
@serialize()
public gridRatio = 1.0;
/**
* Allows setting an offset for the grid lines.
*/
@serializeAsColor3()
public gridOffset = Vector3.Zero();
/**
* The frequency of thicker lines.
*/
@serialize()
public majorUnitFrequency = 10;
/**
* The visibility of minor units in the grid.
*/
@serialize()
public minorUnitVisibility = 0.33;
/**
* The grid opacity outside of the lines.
*/
@serialize()
public opacity = 1.0;
/**
* Determine RBG output is premultiplied by alpha value.
*/
@serialize()
public preMultiplyAlpha = false;
@serializeAsTexture("opacityTexture")
private _opacityTexture: BaseTexture;
@expandToProperty("_markAllSubMeshesAsTexturesDirty")
public opacityTexture: BaseTexture;
private _gridControl: Vector4 = new Vector4(this.gridRatio, this.majorUnitFrequency, this.minorUnitVisibility, this.opacity);
/**
* constructor
* @param name The name given to the material in order to identify it afterwards.
* @param scene The scene the material is used in.
*/
constructor(name: string, scene: Scene) {
super(name, scene);
}
/**
* Returns wehter or not the grid requires alpha blending.
*/
public needAlphaBlending(): boolean {
return this.opacity < 1.0 || this._opacityTexture && this._opacityTexture.isReady();
}
public needAlphaBlendingForMesh(mesh: AbstractMesh): boolean {
return this.needAlphaBlending();
}
public isReadyForSubMesh(mesh: AbstractMesh, subMesh: SubMesh, useInstances?: boolean): boolean {
if (this.isFrozen) {
if (subMesh.effect && subMesh.effect._wasPreviouslyReady) {
return true;
}
}
if (!subMesh._materialDefines) {
subMesh._materialDefines = new GridMaterialDefines();
}
var defines = <GridMaterialDefines>subMesh._materialDefines;
var scene = this.getScene();
if (this._isReadyForSubMesh(subMesh)) {
return true;
}
if (defines.TRANSPARENT !== (this.opacity < 1.0)) {
defines.TRANSPARENT = !defines.TRANSPARENT;
defines.markAsUnprocessed();
}
if (defines.PREMULTIPLYALPHA != this.preMultiplyAlpha) {
defines.PREMULTIPLYALPHA = !defines.PREMULTIPLYALPHA;
defines.markAsUnprocessed();
}
// Textures
if (defines._areTexturesDirty) {
defines._needUVs = false;
if (scene.texturesEnabled) {
if (this._opacityTexture && MaterialFlags.OpacityTextureEnabled) {
if (!this._opacityTexture.isReady()) {
return false;
} else {
defines._needUVs = true;
defines.OPACITY = true;
}
}
}
}
MaterialHelper.PrepareDefinesForMisc(mesh, scene, false, false, this.fogEnabled, false, defines);
// Values that need to be evaluated on every frame
MaterialHelper.PrepareDefinesForFrameBoundValues(scene, scene.getEngine(), defines, !!useInstances);
// Get correct effect
if (defines.isDirty) {
defines.markAsProcessed();
scene.resetCachedMaterial();
// Attributes
MaterialHelper.PrepareDefinesForAttributes(mesh, defines, false, false);
var attribs = [VertexBuffer.PositionKind, VertexBuffer.NormalKind];
<|fim▁hole|>
if (defines.UV1) {
attribs.push(VertexBuffer.UVKind);
}
if (defines.UV2) {
attribs.push(VertexBuffer.UV2Kind);
}
MaterialHelper.PrepareAttributesForInstances(attribs, defines);
// Defines
var join = defines.toString();
subMesh.setEffect(scene.getEngine().createEffect("grid",
attribs,
["projection", "mainColor", "lineColor", "gridControl", "gridOffset", "vFogInfos", "vFogColor", "world", "view",
"opacityMatrix", "vOpacityInfos"],
["opacitySampler"],
join,
undefined,
this.onCompiled,
this.onError), defines);
}
if (!subMesh.effect || !subMesh.effect.isReady()) {
return false;
}
defines._renderId = scene.getRenderId();
subMesh.effect._wasPreviouslyReady = true;
return true;
}
public bindForSubMesh(world: Matrix, mesh: Mesh, subMesh: SubMesh): void {
var scene = this.getScene();
var defines = <GridMaterialDefines>subMesh._materialDefines;
if (!defines) {
return;
}
var effect = subMesh.effect;
if (!effect) {
return;
}
this._activeEffect = effect;
// Matrices
if (!defines.INSTANCES || defines.THIN_INSTANCE) {
this.bindOnlyWorldMatrix(world);
}
this._activeEffect.setMatrix("view", scene.getViewMatrix());
this._activeEffect.setMatrix("projection", scene.getProjectionMatrix());
// Uniforms
if (this._mustRebind(scene, effect)) {
this._activeEffect.setColor3("mainColor", this.mainColor);
this._activeEffect.setColor3("lineColor", this.lineColor);
this._activeEffect.setVector3("gridOffset", this.gridOffset);
this._gridControl.x = this.gridRatio;
this._gridControl.y = Math.round(this.majorUnitFrequency);
this._gridControl.z = this.minorUnitVisibility;
this._gridControl.w = this.opacity;
this._activeEffect.setVector4("gridControl", this._gridControl);
if (this._opacityTexture && MaterialFlags.OpacityTextureEnabled) {
this._activeEffect.setTexture("opacitySampler", this._opacityTexture);
this._activeEffect.setFloat2("vOpacityInfos", this._opacityTexture.coordinatesIndex, this._opacityTexture.level);
this._activeEffect.setMatrix("opacityMatrix", this._opacityTexture.getTextureMatrix());
}
}
// Fog
MaterialHelper.BindFogParameters(scene, mesh, this._activeEffect);
this._afterBind(mesh, this._activeEffect);
}
/**
* Dispose the material and its associated resources.
* @param forceDisposeEffect will also dispose the used effect when true
*/
public dispose(forceDisposeEffect?: boolean): void {
super.dispose(forceDisposeEffect);
}
public clone(name: string): GridMaterial {
return SerializationHelper.Clone(() => new GridMaterial(name, this.getScene()), this);
}
public serialize(): any {
var serializationObject = SerializationHelper.Serialize(this);
serializationObject.customType = "BABYLON.GridMaterial";
return serializationObject;
}
public getClassName(): string {
return "GridMaterial";
}
public static Parse(source: any, scene: Scene, rootUrl: string): GridMaterial {
return SerializationHelper.Parse(() => new GridMaterial(source.name, scene), source, scene, rootUrl);
}
}
_TypeStore.RegisteredTypes["BABYLON.GridMaterial"] = GridMaterial;<|fim▁end|> | |
<|file_name|>remutex.rs<|end_file_name|><|fim▁begin|>// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![unstable(feature = "reentrant_mutex", reason = "new API",
issue = "27738")]
use prelude::v1::*;
use fmt;
use marker;
use ops::Deref;
use sys_common::poison::{self, TryLockError, TryLockResult, LockResult};
use sys::mutex as sys;
/// A re-entrant mutual exclusion
///
/// This mutex will block *other* threads waiting for the lock to become
/// available. The thread which has already locked the mutex can lock it
/// multiple times without blocking, preventing a common source of deadlocks.
pub struct ReentrantMutex<T> {
inner: Box<sys::ReentrantMutex>,
poison: poison::Flag,
data: T,
}
unsafe impl<T: Send> Send for ReentrantMutex<T> {}
unsafe impl<T: Send> Sync for ReentrantMutex<T> {}
/// An RAII implementation of a "scoped lock" of a mutex. When this structure is
/// dropped (falls out of scope), the lock will be unlocked.
///
/// The data protected by the mutex can be accessed through this guard via its
/// Deref implementation.
///
/// # Mutability
///
/// Unlike `MutexGuard`, `ReentrantMutexGuard` does not implement `DerefMut`,
/// because implementation of the trait would violate Rust’s reference aliasing
/// rules. Use interior mutability (usually `RefCell`) in order to mutate the
/// guarded data.
#[must_use]
pub struct ReentrantMutexGuard<'a, T: 'a> {
// funny underscores due to how Deref currently works (it disregards field
// privacy).
__lock: &'a ReentrantMutex<T>,
__poison: poison::Guard,
}
impl<'a, T> !marker::Send for ReentrantMutexGuard<'a, T> {}
impl<T> ReentrantMutex<T> {
/// Creates a new reentrant mutex in an unlocked state.
pub fn new(t: T) -> ReentrantMutex<T> {
unsafe {
let mut mutex = ReentrantMutex {
inner: box sys::ReentrantMutex::uninitialized(),
poison: poison::Flag::new(),
data: t,
};
mutex.inner.init();
mutex
}
}
/// Acquires a mutex, blocking the current thread until it is able to do so.
///
/// This function will block the caller until it is available to acquire the mutex.
/// Upon returning, the thread is the only thread with the mutex held. When the thread
/// calling this method already holds the lock, the call shall succeed without
/// blocking.
///
/// # Failure
///
/// If another user of this mutex panicked while holding the mutex, then
/// this call will return failure if the mutex would otherwise be
/// acquired.
pub fn lock(&self) -> LockResult<ReentrantMutexGuard<T>> {
unsafe { self.inner.lock() }
ReentrantMutexGuard::new(&self)
}
/// Attempts to acquire this lock.
///
/// If the lock could not be acquired at this time, then `Err` is returned.
/// Otherwise, an RAII guard is returned.
///
/// This function does not block.
///
/// # Failure
///
/// If another user of this mutex panicked while holding the mutex, then
/// this call will return failure if the mutex would otherwise be
/// acquired.
pub fn try_lock(&self) -> TryLockResult<ReentrantMutexGuard<T>> {
if unsafe { self.inner.try_lock() } {
Ok(try!(ReentrantMutexGuard::new(&self)))
} else {
Err(TryLockError::WouldBlock)
}
}
}
impl<T> Drop for ReentrantMutex<T> {
fn drop(&mut self) {
// This is actually safe b/c we know that there is no further usage of
// this mutex (it's up to the user to arrange for a mutex to get
// dropped, that's not our job)
unsafe { self.inner.destroy() }
}
}
impl<T: fmt::Debug + 'static> fmt::Debug for ReentrantMutex<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self.try_lock() {
Ok(guard) => write!(f, "ReentrantMutex {{ data: {:?} }}", &*guard),
Err(TryLockError::Poisoned(err)) => {
write!(f, "ReentrantMutex {{ data: Poisoned({:?}) }}", &**err.get_ref())
},
Err(TryLockError::WouldBlock) => write!(f, "ReentrantMutex {{ <locked> }}")
}
}
}
impl<'mutex, T> ReentrantMutexGuard<'mutex, T> {
fn new(lock: &'mutex ReentrantMutex<T>)
-> LockResult<ReentrantMutexGuard<'mutex, T>> {
poison::map_result(lock.poison.borrow(), |guard| {
ReentrantMutexGuard {
__lock: lock,
__poison: guard,
}
})
}
}
impl<'mutex, T> Deref for ReentrantMutexGuard<'mutex, T> {
type Target = T;
fn deref(&self) -> &T {
&self.__lock.data
}
}
impl<'a, T> Drop for ReentrantMutexGuard<'a, T> {
#[inline]
fn drop(&mut self) {
unsafe {
self.__lock.poison.done(&self.__poison);
self.__lock.inner.unlock();
}
}
}
#[cfg(test)]
mod tests {
use prelude::v1::*;
use sys_common::remutex::{ReentrantMutex, ReentrantMutexGuard};
use cell::RefCell;
use sync::Arc;
use boxed;
use thread;
#[test]
fn smoke() {
let m = ReentrantMutex::new(());
{
let a = m.lock().unwrap();
{
let b = m.lock().unwrap();
{
let c = m.lock().unwrap();
assert_eq!(*c, ());
}
assert_eq!(*b, ());
}
assert_eq!(*a, ());
}
}
#[test]
fn is_mutex() {
let m = Arc::new(ReentrantMutex::new(RefCell::new(0)));<|fim▁hole|> assert_eq!(*lock.borrow(), 4950);
});
for i in 0..100 {
let lock = m.lock().unwrap();
*lock.borrow_mut() += i;
}
drop(lock);
child.join().unwrap();
}
#[test]
fn trylock_works() {
let m = Arc::new(ReentrantMutex::new(()));
let m2 = m.clone();
let lock = m.try_lock().unwrap();
let lock2 = m.try_lock().unwrap();
thread::spawn(move || {
let lock = m2.try_lock();
assert!(lock.is_err());
}).join().unwrap();
let lock3 = m.try_lock().unwrap();
}
pub struct Answer<'a>(pub ReentrantMutexGuard<'a, RefCell<u32>>);
impl<'a> Drop for Answer<'a> {
fn drop(&mut self) {
*self.0.borrow_mut() = 42;
}
}
#[test]
fn poison_works() {
let m = Arc::new(ReentrantMutex::new(RefCell::new(0)));
let mc = m.clone();
let result = thread::spawn(move ||{
let lock = mc.lock().unwrap();
*lock.borrow_mut() = 1;
let lock2 = mc.lock().unwrap();
*lock.borrow_mut() = 2;
let answer = Answer(lock2);
panic!("What the answer to my lifetimes dilemma is?");
drop(answer);
}).join();
assert!(result.is_err());
let r = m.lock().err().unwrap().into_inner();
assert_eq!(*r.borrow(), 42);
}
}<|fim▁end|> | let m2 = m.clone();
let lock = m.lock().unwrap();
let child = thread::spawn(move || {
let lock = m2.lock().unwrap(); |
<|file_name|>Palindrome.py<|end_file_name|><|fim▁begin|><|fim▁hole|> return str[:1]
assert(firstCharacter("abc") is "a")
def lastCharacter(str):
return str[-1:]
assert(lastCharacter("abc") is "c")
def middleCharacters(str):
return str[1:-1]
assert(middleCharacters("abc") == "b")
assert(middleCharacters("abcde") == "bcd")
def isPalindrome(str):
if len(str) <= 1:
return True
if firstCharacter(str) != lastCharacter(str):
return False
return isPalindrome(middleCharacters(str))
assert(isPalindrome("a") == True)
assert(isPalindrome("taste") == False)
assert(isPalindrome("roror") == True)<|fim▁end|> | def firstCharacter(str): |
<|file_name|>resource_alicloud_mns_topic_subscription_test.go<|end_file_name|><|fim▁begin|>package alicloud
import (
"fmt"
"testing"
"github.com/alibaba/terraform-provider/alicloud/connectivity"
"github.com/dxh031/ali_mns"
"github.com/hashicorp/terraform/helper/resource"
"github.com/hashicorp/terraform/terraform"
)
func TestAccAlicloudMnsTopicSubscription_basic(t *testing.T) {
var attr ali_mns.TopicAttribute
var subscriptionAttr ali_mns.SubscriptionAttribute
resource.Test(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
Providers: testAccProviders,
CheckDestroy: testAccCheckMNSTopicSubscriptionDestroy,
Steps: []resource.TestStep{
{
Config: testAccMNSTopicSubscriptionConfig,
Check: resource.ComposeTestCheckFunc(
testAccMNSTopicExist("alicloud_mns_topic.topic", &attr),
testAccMNSTopicSubscriptionExist("alicloud_mns_topic_subscription.subscription", &subscriptionAttr),
resource.TestCheckResourceAttr("alicloud_mns_topic_subscription.subscription", "name", "tf-testAccMNSTopicSubscriptionConfig"),
resource.TestCheckResourceAttr("alicloud_mns_topic_subscription.subscription", "endpoint", "http://www.test.com/test"),
resource.TestCheckResourceAttr("alicloud_mns_topic_subscription.subscription", "notify_content_format", "SIMPLIFIED"),
),
},
{
Config: testAccMNSTopicSubscriptionConfigUpdate,
Check: resource.ComposeTestCheckFunc(
testAccMNSTopicExist("alicloud_mns_topic.topic", &attr),
testAccMNSTopicSubscriptionExist("alicloud_mns_topic_subscription.subscription", &subscriptionAttr),
resource.TestCheckResourceAttr("alicloud_mns_topic_subscription.subscription", "name", "tf-testAccMNSTopicSubscriptionConfig"),
resource.TestCheckResourceAttr("alicloud_mns_topic_subscription.subscription", "notify_strategy", "EXPONENTIAL_DECAY_RETRY"),
resource.TestCheckResourceAttr("alicloud_mns_topic_subscription.subscription", "endpoint", "http://www.test.com/test"),
resource.TestCheckResourceAttr("alicloud_mns_topic_subscription.subscription", "notify_content_format", "SIMPLIFIED"),
),
},
},
})
}
func testAccMNSTopicSubscriptionExist(n string, attr *ali_mns.SubscriptionAttribute) resource.TestCheckFunc {
return func(s *terraform.State) error {
rs, ok := s.RootModule().Resources[n]
if !ok {
return fmt.Errorf("Not found: %s", n)
}
if rs.Primary.ID == "" {
return fmt.Errorf("No MNSTopicSubscription ID is set")
}
client := testAccProvider.Meta().(*connectivity.AliyunClient)
mnsService := MnsService{}
topicName, name := mnsService.GetTopicNameAndSubscriptionName(rs.Primary.ID)
raw, err := client.WithMnsSubscriptionManagerByTopicName(topicName, func(subscriptionManager ali_mns.AliMNSTopic) (interface{}, error) {
return subscriptionManager.GetSubscriptionAttributes(name)
})
if err != nil {
return err
}
instance, _ := raw.(ali_mns.SubscriptionAttribute)
if instance.SubscriptionName != name {
return fmt.Errorf("mns subscription %s not found", n)
}
*attr = instance
return nil
}
}
func testAccCheckMNSTopicSubscriptionDestroy(s *terraform.State) error {
client := testAccProvider.Meta().(*connectivity.AliyunClient)
mnsService := MnsService{}
for _, rs := range s.RootModule().Resources {
if rs.Type != "alicloud_mns_topic_subscription" {
continue
}
topicName, name := mnsService.GetTopicNameAndSubscriptionName(rs.Primary.ID)
_, err := client.WithMnsSubscriptionManagerByTopicName(topicName, func(subscriptionManager ali_mns.AliMNSTopic) (interface{}, error) {
return subscriptionManager.GetSubscriptionAttributes(name)
})
if err != nil {
if mnsService.SubscriptionNotExistFunc(err) {
continue
}
return err
}
return fmt.Errorf("MNS topic subscription %s still exist", name)
}
return nil
}
const testAccMNSTopicSubscriptionConfig = `variable "name" {<|fim▁hole|> default = "tf-testAccMNSTopicSubscriptionConfig"
}
resource "alicloud_mns_topic" "topic"{
name="${var.name}"
}
resource "alicloud_mns_topic_subscription" "subscription"{
topic_name="${alicloud_mns_topic.topic.name}"
name="${var.subscriptionName}"
endpoint="http://www.test.com/test"
notify_strategy="BACKOFF_RETRY"
notify_content_format="SIMPLIFIED"
}`
const testAccMNSTopicSubscriptionConfigUpdate = `variable "name" {
default = "tf-testAccMNSTopicConfig"
}
variable "subscriptionName" {
default = "tf-testAccMNSTopicSubscriptionConfig"
}
resource "alicloud_mns_topic" "topic"{
name="${var.name}"
maximum_message_size=12357
logging_enabled=true
}
resource "alicloud_mns_topic_subscription" "subscription"{
topic_name="${alicloud_mns_topic.topic.name}"
name="${var.subscriptionName}"
endpoint="http://www.test.com/test"
notify_strategy="EXPONENTIAL_DECAY_RETRY"
notify_content_format="SIMPLIFIED"
}`<|fim▁end|> | default = "tf-testAccMNSTopicConfig"
}
variable "subscriptionName" { |
<|file_name|>extract_translator_comments.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
import re
source_files = {}
comment_re = re.compile(r'\s+#\s+T:\s+(.+)\s*$')
def get_file(file):
if not file in source_files:
#~ print 'Extracting comments from', file
source_files[file] = open(file).readlines()
source_files[file].append('')
return source_files[file]
def extract_comment(file, line):
lines = get_file(file)
line -= 1 # list is 0 based
match = comment_re.search(lines[line])
if match:
# comment on same line
return match.group(1)
else:
# search next line(s) for a comment
i = line+1
while i < len(lines):
if '_(' in lines[i] or 'gettext(' in lines[i]:<|fim▁hole|> return match.group(1)
i += 1
return None
def extract_comments(sources):
sources = [s.split(':') for s in sources]
comments = []
for file, line in sources:
comment = extract_comment(file, int(line))
if comment and comment not in comments:
comments.append(comment)
if comments:
return ' | \n'.join(['#. '+c for c in comments])+'\n'
else:
print 'No translator comment for:'
for file, line in sources:
print '\t%s line %s' % (file, line)
return ''
def add_comments(file):
messages = open(file).readlines()
fh = open(file, 'w')
while messages:
line = messages.pop(0)
if line.startswith('#: '):
lines = [line]
sources = line[3:].strip().split()
while messages[0].startswith('#: '):
line = messages.pop(0)
lines.append(line)
sources += line[3:].strip().split()
fh.write(extract_comments(sources))
fh.writelines(lines)
elif line.startswith('#. '):
pass
else:
fh.write(line)
if __name__ == '__main__':
add_comments('translations/zim.pot')<|fim▁end|> | break
else:
match = comment_re.search(lines[i])
if match: |
<|file_name|>lt.rs<|end_file_name|><|fim▁begin|>#![feature(core)]
extern crate core;
#[cfg(test)]
mod tests {
// pub trait FixedSizeArray<T> {
// /// Converts the array to immutable slice
// fn as_slice(&self) -> &[T];
// /// Converts the array to mutable slice
// fn as_mut_slice(&mut self) -> &mut [T];
// }
// macro_rules! array_impls {
// ($($N:expr)+) => {
// $(
// #[unstable(feature = "core")]
// impl<T> FixedSizeArray<T> for [T; $N] {
// #[inline]
// fn as_slice(&self) -> &[T] {
// &self[..]
// }
// #[inline]
// fn as_mut_slice(&mut self) -> &mut [T] {
// &mut self[..]
// }
// }
//
// #[unstable(feature = "array_as_ref",
// reason = "should ideally be implemented for all fixed-sized arrays")]
// impl<T> AsRef<[T]> for [T; $N] {
// #[inline]
// fn as_ref(&self) -> &[T] {
// &self[..]
// }
// }
//
// #[unstable(feature = "array_as_ref",
// reason = "should ideally be implemented for all fixed-sized arrays")]
// impl<T> AsMut<[T]> for [T; $N] {
// #[inline]
// fn as_mut(&mut self) -> &mut [T] {
// &mut self[..]
// }
// }
//
// #[stable(feature = "rust1", since = "1.0.0")]
// impl<T:Copy> Clone for [T; $N] {
// fn clone(&self) -> [T; $N] {
// *self
// }
// }
//
// #[stable(feature = "rust1", since = "1.0.0")]
// impl<T: Hash> Hash for [T; $N] {
// fn hash<H: hash::Hasher>(&self, state: &mut H) {
// Hash::hash(&self[..], state)
// }
// }
//
// #[stable(feature = "rust1", since = "1.0.0")]
// impl<T: fmt::Debug> fmt::Debug for [T; $N] {
// fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
// fmt::Debug::fmt(&&self[..], f)
// }
// }
//
// #[stable(feature = "rust1", since = "1.0.0")]
// impl<'a, T> IntoIterator for &'a [T; $N] {
// type Item = &'a T;
// type IntoIter = Iter<'a, T>;
//
// fn into_iter(self) -> Iter<'a, T> {
// self.iter()
// }
// }
//
// #[stable(feature = "rust1", since = "1.0.0")]
// impl<'a, T> IntoIterator for &'a mut [T; $N] {
// type Item = &'a mut T;
// type IntoIter = IterMut<'a, T>;
//
// fn into_iter(self) -> IterMut<'a, T> {
// self.iter_mut()
// }
// }
//
// // NOTE: some less important impls are omitted to reduce code bloat
// __impl_slice_eq1! { [A; $N], [B; $N] }
// __impl_slice_eq2! { [A; $N], [B] }
// __impl_slice_eq2! { [A; $N], &'b [B] }
// __impl_slice_eq2! { [A; $N], &'b mut [B] }
// // __impl_slice_eq2! { [A; $N], &'b [B; $N] }
// // __impl_slice_eq2! { [A; $N], &'b mut [B; $N] }
//
// #[stable(feature = "rust1", since = "1.0.0")]
// impl<T:Eq> Eq for [T; $N] { }
//
// #[stable(feature = "rust1", since = "1.0.0")]
// impl<T:PartialOrd> PartialOrd for [T; $N] {
// #[inline]
// fn partial_cmp(&self, other: &[T; $N]) -> Option<Ordering> {
// PartialOrd::partial_cmp(&&self[..], &&other[..])
// }
// #[inline]
// fn lt(&self, other: &[T; $N]) -> bool {
// PartialOrd::lt(&&self[..], &&other[..])
// }
// #[inline]
// fn le(&self, other: &[T; $N]) -> bool {
// PartialOrd::le(&&self[..], &&other[..])
// }
// #[inline]
// fn ge(&self, other: &[T; $N]) -> bool {
// PartialOrd::ge(&&self[..], &&other[..])
// }
// #[inline]
// fn gt(&self, other: &[T; $N]) -> bool {
// PartialOrd::gt(&&self[..], &&other[..])
// }
// }
//
// #[stable(feature = "rust1", since = "1.0.0")]
// impl<T:Ord> Ord for [T; $N] {
// #[inline]
// fn cmp(&self, other: &[T; $N]) -> Ordering {
// Ord::cmp(&&self[..], &&other[..])
// }
// }
// )+
// }
// }
// array_impls! {
// 0 1 2 3 4 5 6 7 8 9
// 10 11 12 13 14 15 16 17 18 19
// 20 21 22 23 24 25 26 27 28 29
// 30 31 32
// }
type T = i32;
type A = T;
type B = T;
#[test]
fn lt_test1() {
let array_a: [A; 26] = [
0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
20, 21, 22, 23, 24, 25
];
let array_b: [B; 26] = [
1, 2, 3, 4, 5, 6, 7, 8, 9, 10,
11, 12, 13, 14, 15, 16, 17, 18, 19, 20,
21, 22, 23, 24, 25, 26
];
<|fim▁hole|> assert_eq!(array_a.lt(&array_b), true);
assert_eq!(array_a < array_b, true);
}
#[test]
fn lt_test2() {
let array_a: [A; 26] = [
0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
20, 21, 22, 23, 24, 25
];
let array_b: [B; 26] = [
0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
20, 21, 22, 23, 24, 25
];
assert_eq!(array_a.lt(&array_b), false);
assert_eq!(array_a < array_b, false);
}
#[test]
fn lt_test3() {
let array_a: [A; 26] = [
1, 2, 3, 4, 5, 6, 7, 8, 9, 10,
11, 12, 13, 14, 15, 16, 17, 18, 19, 20,
21, 22, 23, 24, 25, 26
];
let array_b: [B; 26] = [
0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
10, 11, 12, 13, 14, 15, 16, 17, 18, 19,
20, 21, 22, 23, 24, 25
];
assert_eq!(array_a.lt(&array_b), false);
assert_eq!(array_a < array_b, false);
}
}<|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from todoman import version # type: ignore<|fim▁hole|>
__version__ = version.version
__documentation__ = "https://todoman.rtfd.org/en/latest/"<|fim▁end|> | |
<|file_name|>etcd.go<|end_file_name|><|fim▁begin|><|fim▁hole|> metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/runtime"
apirequest "k8s.io/apiserver/pkg/endpoints/request"
"k8s.io/apiserver/pkg/registry/generic"
"k8s.io/apiserver/pkg/registry/generic/registry"
"k8s.io/apiserver/pkg/registry/rest"
kapi "k8s.io/kubernetes/pkg/api"
quotaapi "github.com/openshift/origin/pkg/quota/apis/quota"
"github.com/openshift/origin/pkg/quota/registry/clusterresourcequota"
"github.com/openshift/origin/pkg/util/restoptions"
)
type REST struct {
*registry.Store
}
// NewREST returns a RESTStorage object that will work against ClusterResourceQuota objects.
func NewREST(optsGetter restoptions.Getter) (*REST, *StatusREST, error) {
store := ®istry.Store{
Copier: kapi.Scheme,
NewFunc: func() runtime.Object { return "aapi.ClusterResourceQuota{} },
NewListFunc: func() runtime.Object { return "aapi.ClusterResourceQuotaList{} },
PredicateFunc: clusterresourcequota.Matcher,
QualifiedResource: quotaapi.Resource("clusterresourcequotas"),
CreateStrategy: clusterresourcequota.Strategy,
UpdateStrategy: clusterresourcequota.Strategy,
DeleteStrategy: clusterresourcequota.Strategy,
}
options := &generic.StoreOptions{RESTOptions: optsGetter, AttrFunc: clusterresourcequota.GetAttrs}
if err := store.CompleteWithOptions(options); err != nil {
return nil, nil, err
}
statusStore := *store
statusStore.CreateStrategy = nil
statusStore.DeleteStrategy = nil
statusStore.UpdateStrategy = clusterresourcequota.StatusStrategy
return &REST{store}, &StatusREST{store: &statusStore}, nil
}
// StatusREST implements the REST endpoint for changing the status of a resourcequota.
type StatusREST struct {
store *registry.Store
}
// StatusREST implements Patcher
var _ = rest.Patcher(&StatusREST{})
func (r *StatusREST) New() runtime.Object {
return "aapi.ClusterResourceQuota{}
}
// Get retrieves the object from the storage. It is required to support Patch.
func (r *StatusREST) Get(ctx apirequest.Context, name string, options *metav1.GetOptions) (runtime.Object, error) {
return r.store.Get(ctx, name, options)
}
// Update alters the status subset of an object.
func (r *StatusREST) Update(ctx apirequest.Context, name string, objInfo rest.UpdatedObjectInfo) (runtime.Object, bool, error) {
return r.store.Update(ctx, name, objInfo)
}<|fim▁end|> | package etcd
import ( |
<|file_name|>process-one-mail.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python -S
#
# Copyright 2011 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
<|fim▁hole|>import sys
from lp.services.config import config
from lp.services.mail.helpers import save_mail_to_librarian
from lp.services.mail.incoming import handle_one_mail
from lp.services.mail.signedmessage import signed_message_from_string
from lp.services.scripts.base import LaunchpadScript
class ProcessMail(LaunchpadScript):
usage = """%prog [options] [MAIL_FILE]
Process one incoming email, read from the specified file or from stdin.
Any mail generated in response is printed to stdout.
""" + __doc__
def main(self):
self.txn.begin()
# NB: This somewhat duplicates handleMail, but there it's mixed in
# with handling a mailbox, which we're avoiding here.
if len(self.args) >= 1:
from_file = file(self.args[0], 'rb')
else:
from_file = sys.stdin
self.logger.debug("reading message from %r" % (from_file,))
raw_mail = from_file.read()
self.logger.debug("got %d bytes" % len(raw_mail))
file_alias = save_mail_to_librarian(raw_mail)
self.logger.debug("saved to librarian as %r" % (file_alias,))
parsed_mail = signed_message_from_string(raw_mail)
# Kinda kludgey way to cause sendmail to just print it.
config.sendmail_to_stdout = True
handle_one_mail(
self.logger, parsed_mail,
file_alias, file_alias.http_url,
signature_timestamp_checker=None)
self.logger.debug("mail handling complete")
self.txn.commit()
if __name__ == '__main__':
script = ProcessMail('process-one-mail', dbuser=config.processmail.dbuser)
# No need to lock; you can run as many as you want as they use no global
# resources (like a mailbox).
script.run(use_web_security=True)<|fim▁end|> | """Process one email message, read from stdin."""
import _pythonpath
|
<|file_name|>resources.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Resource object code
#
# Created by: The Resource Compiler for PyQt5 (Qt v5.7.1)
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore
qt_resource_data = b"\
\x00\x00\x05\x96\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x18\x00\x00\x00\x18\x08\x06\x00\x00\x00\xe0\x77\x3d\xf8\
\x00\x00\x00\x04\x73\x42\x49\x54\x08\x08\x08\x08\x7c\x08\x64\x88\
\x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0d\xd7\x00\x00\x0d\xd7\
\x01\x42\x28\x9b\x78\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\
\x74\x77\x61\x72\x65\x00\x77\x77\x77\x2e\x69\x6e\x6b\x73\x63\x61\
\x70\x65\x2e\x6f\x72\x67\x9b\xee\x3c\x1a\x00\x00\x00\x18\x74\x45\
\x58\x74\x54\x69\x74\x6c\x65\x00\x47\x49\x53\x20\x69\x63\x6f\x6e\
\x20\x74\x68\x65\x6d\x65\x20\x30\x2e\x32\xee\x53\xa0\xa0\x00\x00\
\x00\x18\x74\x45\x58\x74\x41\x75\x74\x68\x6f\x72\x00\x52\x6f\x62\
\x65\x72\x74\x20\x53\x7a\x63\x7a\x65\x70\x61\x6e\x65\x6b\x5f\x56\
\xb1\x08\x00\x00\x00\x27\x74\x45\x58\x74\x44\x65\x73\x63\x72\x69\
\x70\x74\x69\x6f\x6e\x00\x68\x74\x74\x70\x3a\x2f\x2f\x72\x6f\x62\
\x65\x72\x74\x2e\x73\x7a\x63\x7a\x65\x70\x61\x6e\x65\x6b\x2e\x70\
\x6c\x90\x59\x48\x60\x00\x00\x00\x18\x74\x45\x58\x74\x43\x72\x65\
\x61\x74\x69\x6f\x6e\x20\x54\x69\x6d\x65\x00\x32\x30\x30\x38\x2d\
\x31\x32\x2d\x31\x32\x58\x2e\x3b\xbf\x00\x00\x00\x52\x74\x45\x58\
\x74\x43\x6f\x70\x79\x72\x69\x67\x68\x74\x00\x43\x43\x20\x41\x74\
\x74\x72\x69\x62\x75\x74\x69\x6f\x6e\x2d\x53\x68\x61\x72\x65\x41\
\x6c\x69\x6b\x65\x20\x68\x74\x74\x70\x3a\x2f\x2f\x63\x72\x65\x61\
\x74\x69\x76\x65\x63\x6f\x6d\x6d\x6f\x6e\x73\x2e\x6f\x72\x67\x2f\
\x6c\x69\x63\x65\x6e\x73\x65\x73\x2f\x62\x79\x2d\x73\x61\x2f\x33\
\x2e\x30\x2f\x5e\x83\x5a\xbc\x00\x00\x04\x16\x49\x44\x41\x54\x48\
\x89\x95\x93\x6d\x6c\x53\x55\x18\xc7\x7f\xf7\xde\xde\x7b\xdb\xb5\
\xb7\x4c\xde\xc6\x14\x37\x32\x12\x08\x0b\x64\xf8\x01\xb2\xb8\xad\
\xc9\x3e\x30\x12\x83\xd9\x10\x05\xb7\x25\x0c\x21\x42\x0c\x0c\x48\
\xcc\x28\x16\x0d\x01\x92\x45\x8a\x1f\xe4\x25\x82\xa0\x4b\xc0\x28\
\xcc\x28\x26\xc2\x17\x08\xfb\x00\x63\xc9\xfa\xc1\xc0\x4c\xd4\xac\
\xdb\x9c\x61\x71\x05\x9c\xb3\x8c\x5b\xa0\x5d\x6f\x8f\x1f\x48\x47\
\x5b\xee\xdc\xfc\x27\x27\xf7\xe5\xf9\x9f\xf3\x3b\xcf\xf3\x9c\x23\
\xbd\xb5\x61\xdd\x25\x24\x0a\x99\x4a\x82\xc8\xb7\x17\xbe\x7b\x7d\
\x4a\x5f\x8e\x1c\x48\x14\xbe\xdd\xb0\x01\xc3\x6d\x00\xf0\x30\xf6\
\x90\x0b\xdf\xb4\x63\xf3\x6f\xea\x4d\xd8\x02\x60\x62\xa1\x47\x4f\
\xc6\x67\x27\x92\xca\x3c\x21\x39\xee\x1a\x6e\x63\x24\x6d\x4a\xc7\
\x27\xd3\x9a\x1d\x07\x36\x63\x59\x01\x14\xa5\xf5\xf2\x89\xfd\x6d\<|fim▁hole|>\x74\xdc\x4e\xd5\xd5\x07\x1c\xe3\x56\xd2\x71\xf8\xe3\xc3\xa0\x28\
\xad\xb9\x71\x07\x82\x48\x46\x7d\x47\xc6\x51\x8b\x9d\x4e\x5d\x39\
\x7f\xfe\xfb\x17\x65\xac\x3f\x27\x9c\x82\x88\x1d\x40\x29\x36\x0f\
\xce\x9f\xbf\x60\x46\xb8\x37\x4c\xe7\xd7\x47\xdb\x9e\x33\x08\x21\
\xb2\x46\x65\xc3\x4e\x71\x2d\x3c\x2a\x56\x6d\xf1\xc7\x2a\x1a\x9b\
\xcb\x73\xe3\x99\xa3\xa2\xb1\xb9\x7c\xd5\x16\x7f\xec\x5a\x78\x54\
\x54\x36\xec\x14\x76\x9e\xac\x1e\xac\xd9\x71\x60\xb3\xe1\x31\xe8\
\x1f\x18\xa0\xbe\xbe\x3e\xcf\xa9\xea\x17\xab\xd7\x6f\xf7\xd8\x96\
\x66\xfd\x76\x8f\x53\xd5\x2f\xd6\xd7\xd7\xe7\xf5\x0f\x0c\x60\x78\
\x8c\xa7\xcd\xce\x51\x16\x00\xcb\x0a\xf8\xf7\xfa\xe9\xbc\x7e\x83\
\xd2\xd2\x52\xca\xca\x96\xe7\x2b\x86\xfb\x94\x6d\x69\x0c\xf7\xa9\
\xb2\xb2\xe5\xf9\xa5\xa5\xa5\x74\x5e\xbf\x81\x7f\xaf\x9f\x49\x9b\
\xfc\x6c\x96\xd2\x1a\x0c\x06\x1f\x18\x5e\x4f\x12\xa0\x6e\x6d\x9d\
\xcb\xa9\xeb\x75\xbe\xc6\x5d\xb5\x99\x36\x5f\xe3\xae\x5a\xa7\xae\
\xd7\xd5\xad\xad\x73\x01\x18\x5e\x4f\x32\x18\x0c\x3e\xb0\x6b\x72\
\x16\xe0\xf2\x89\xfd\x6d\xd1\xd8\xc8\xa2\x70\xb8\x2f\x61\x9a\x26\
\x9a\xa6\xd1\xd4\xb4\xc9\xad\x6a\xda\xd9\xf2\x86\xdd\x05\x00\xe5\
\x0d\xbb\x0b\x54\x4d\x3b\xdb\xd4\xb4\xc9\xad\x69\x1a\xa6\x69\x12\
\x0e\xf7\x25\xa2\xb1\x91\x45\xb9\x77\xe0\xf9\x0c\x80\xae\x73\x27\
\xef\xcb\x92\xdc\xd6\xd1\xd1\x11\x07\x28\x2a\x2a\xc2\xe7\xab\xca\
\x33\x9c\x7a\xbb\x24\x49\x92\xe1\xd4\xdb\x7d\xbe\xaa\xbc\xa2\xa2\
\x22\x00\x3a\x3a\x3a\xe2\xb2\x24\xb7\x75\x9d\x3b\x79\xdf\xae\x94\
\xcf\x01\x00\x1e\x25\xc7\x0e\x85\x42\x21\xcb\x34\x4d\x00\x6a\x6a\
\x56\xab\x86\xd7\x5b\xfe\xda\xb6\x7d\x31\xc3\xeb\x2d\xaf\xa9\x59\
\xad\x02\x98\xa6\x49\x28\x14\xb2\x1e\x25\xc7\x0e\xd9\xad\x03\x20\
\x09\x21\x6c\x03\xab\x36\xfb\x8f\xaf\x58\xb9\xe2\xdd\xda\xda\x5a\
\x1d\xe0\xd8\xd1\x63\x6c\xdd\xb6\x95\xd3\x9f\x9f\xa6\xf9\x9d\x4a\
\x52\x7d\x1f\x91\xf8\xbb\x0b\x91\x4a\x24\x34\x39\xf9\xd8\x66\x89\
\x90\x80\xc0\xa4\x80\x8a\x8d\xef\xcd\x75\x2a\x9e\xc1\x40\x20\x90\
\xe7\xf1\x78\xb8\xdd\xd3\xc3\xcd\xce\x4e\x2a\xab\xaa\x58\x16\xdf\
\x8d\x14\xfb\x19\x97\x51\x82\x2c\x3b\x91\x15\x27\x92\xa2\x21\xac\
\x04\x42\x58\xcc\x5a\xd8\xc8\x9d\x9f\x3e\xc0\x4a\x44\x6f\x4e\x0a\
\xb0\xcb\x22\xad\xe4\x55\x0d\x63\x6e\x05\x0e\xed\x85\x2c\xbf\xaa\
\xcf\xa6\x70\xe9\xfb\xb8\xf2\x97\x32\x32\xf0\x15\xfd\x37\x37\xda\
\xf7\x20\xad\xdc\x5e\x64\x4a\x76\x64\xdf\x3f\xe7\x8c\xc5\xcc\x7f\
\xe5\x20\xae\xfc\xa5\xc4\xcd\x41\x46\x87\x2e\x3d\xf5\xfd\x17\x20\
\xf7\x44\x65\x01\x64\x75\xe2\xdd\x53\xe0\xe3\xa5\x65\x01\x34\xf7\
\xcb\x24\xe3\xa3\xdc\xfd\xf5\x18\xc2\x7a\x3c\x35\xc0\x2e\x8b\xdc\
\x6c\xbc\xf3\xaa\x29\x5c\xd2\x8c\x43\x9f\x49\xca\x8a\xf3\x57\xdf\
\x97\x3c\x79\xd8\xff\x6c\x23\x53\x01\x72\xb3\x48\x3f\xa3\xc3\x57\
\x70\xcf\x59\x49\x74\xf8\x2a\xff\x0c\xfd\x08\x08\x22\xbf\x7c\xc2\
\x9d\x5b\xfb\x88\x0e\x5f\x21\x3a\x7c\x65\x7a\x80\xcc\x2c\x22\x91\
\x08\xa1\x50\xc8\x02\x40\xa4\x98\x55\xfc\x26\x25\xaf\x9e\xe6\x5e\
\xef\x29\x06\xbb\x77\x30\x74\xeb\x43\x44\x6a\x7c\x62\x4c\x1b\xd0\
\x75\xee\xe4\x7d\x4d\xd5\xbb\xbf\x38\x73\x06\x4d\xd5\xbb\x01\x66\
\x2d\x58\x8f\x6b\xc6\x12\x24\x49\x61\x66\xf1\x1b\xdc\xeb\xfd\xcc\
\x76\xee\xb4\x00\x00\x8a\x22\x97\xb4\xec\xd9\x83\xa2\xc8\x25\x48\
\xf4\xa8\xae\x02\x06\xbb\xb7\x73\xfb\x87\x45\xfc\x11\x6a\xb6\x9f\
\x24\xd1\xe3\x98\x2e\x00\x45\x39\x74\x24\x78\x24\x80\xa2\xb4\x92\
\x62\x28\xf2\xdb\xa7\xc7\x11\x2c\x9e\xd4\x2f\xd1\x4b\x8a\x96\x7f\
\x01\xb3\x71\xdb\xcb\x12\x7d\x31\x70\x00\x00\x00\x00\x49\x45\x4e\
\x44\xae\x42\x60\x82\
"
qt_resource_name = b"\
\x00\x07\
\x07\x3b\xe0\xb3\
\x00\x70\
\x00\x6c\x00\x75\x00\x67\x00\x69\x00\x6e\x00\x73\
\x00\x08\
\x09\xcb\x6f\x53\
\x00\x44\
\x00\x73\x00\x67\x00\x54\x00\x6f\x00\x6f\x00\x6c\x00\x73\
\x00\x0a\
\x0b\x6f\x47\xe0\
\x00\x44\
\x00\x73\x00\x67\x00\x54\x00\x6f\x00\x6f\x00\x6c\x00\x73\x00\x4f\x00\x70\
\x00\x0d\
\x01\xed\x72\x73\
\x00\x4d\
\x00\x69\x00\x6c\x00\x69\x00\x74\x00\x61\x00\x72\x00\x79\x00\x54\x00\x6f\x00\x6f\x00\x6c\x00\x73\
\x00\x13\
\x0c\xc0\x02\x64\
\x00\x6e\
\x00\x75\x00\x6d\x00\x65\x00\x72\x00\x69\x00\x63\x00\x61\x00\x6c\x00\x56\x00\x65\x00\x72\x00\x74\x00\x65\x00\x78\x00\x45\x00\x64\
\x00\x69\x00\x74\
\x00\x18\
\x0a\x0d\x3f\x47\
\x00\x76\
\x00\x65\x00\x63\x00\x74\x00\x6f\x00\x72\x00\x2d\x00\x65\x00\x64\x00\x69\x00\x74\x00\x2d\x00\x6b\x00\x65\x00\x79\x00\x62\x00\x6f\
\x00\x61\x00\x72\x00\x64\x00\x2e\x00\x70\x00\x6e\x00\x67\
"
qt_resource_struct = b"\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x02\
\x00\x00\x00\x14\x00\x02\x00\x00\x00\x01\x00\x00\x00\x03\
\x00\x00\x00\x2a\x00\x02\x00\x00\x00\x01\x00\x00\x00\x04\
\x00\x00\x00\x44\x00\x02\x00\x00\x00\x01\x00\x00\x00\x05\
\x00\x00\x00\x64\x00\x02\x00\x00\x00\x01\x00\x00\x00\x06\
\x00\x00\x00\x90\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\
"
def qInitResources():
QtCore.qRegisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
def qCleanupResources():
QtCore.qUnregisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
qInitResources()<|fim▁end|> | \x99\x31\x39\xf3\xe3\x71\x7c\x7c\xde\xec\x39\x73\x74\x21\x29\xff\
\x6f\xb7\x96\xb5\xbf\x65\x4f\xcb\x42\x2c\x2b\x90\x1b\x92\xe1\x69\
\x09\x00\x5c\xba\x7a\xf7\xf7\xc1\x41\x00\x69\xcc\x1c\x93\xd2\xa6\ |
<|file_name|>open.rs<|end_file_name|><|fim▁begin|>use editor::Editor;
use std::fs::File;
use std::io::Read;
use std::collections::VecDeque;
pub enum OpenStatus {
Ok,
NotFound,
}
impl Editor {
/// Open a file
pub fn open(&mut self, path: &str) -> OpenStatus {
self.status_bar.file = path.to_string();
if let Some(mut file) = File::open(path).ok() {
let mut con = String::new();
file.read_to_string(&mut con);
self.text = con.lines()
.map(|x| x.chars().collect::<VecDeque<char>>())
.collect::<VecDeque<VecDeque<char>>>();
OpenStatus::Ok
} else {
OpenStatus::NotFound
}
<|fim▁hole|><|fim▁end|> | }
} |
<|file_name|>test_common.py<|end_file_name|><|fim▁begin|># vim: fileencoding=UTF-8:expandtab:autoindent:ts=4:sw=4:sts=4
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
# To import from calibre, some things need to be added to `sys` first. Do not import
# anything from calibre or the plugins yet.
import glob
import os
import sys
import unittest
test_dir = os.path.dirname(os.path.abspath(__file__))
src_dir = os.path.dirname(test_dir)
test_libdir = os.path.join(
src_dir, "pylib", "python{major}".format(major=sys.version_info.major)
)
sys.path += glob.glob(os.path.join(test_libdir, "*.zip"))
try:
from unittest import mock
except ImportError:
# Python 2
import mock
from calibre_plugins.kobotouch_extended import common
from polyglot.builtins import unicode_type
LANGUAGES = ("en_CA", "fr_CA", "fr_FR", "de_DE", "ar_EG", "ru_RU")
TEST_STRINGS = [
{
"encodings": {"UTF-8", "CP1252"},
"test_strings": [
unicode_type(s) for s in ["Hello, World!", "J'ai trouvé mon livre préféré"]
],
},
{
"encodings": {"UTF-8", "CP1256"},
"test_strings": [unicode_type(s) for s in ["مرحبا بالعالم"]],
},
{
"encodings": {"UTF-8", "CP1251"},
"test_strings": [unicode_type(s) for s in ["Привет мир"]],
},
{
"encodings": {"UTF-8", "CP932"},
"test_strings": [unicode_type(s) for s in ["こんにちは世界"]],
},
]
TEST_TIME = "2020-04-01 01:02:03"
def gen_lang_code():
encodings = set()
for o in TEST_STRINGS:
encodings |= o["encodings"]
for enc in encodings:
yield enc
class TestCommon(unittest.TestCase):
orig_lang = "" # type: str
def setUp(self): # type: () -> None
self.orig_lang = os.environ.get("LANG", None)
def tearDown(self): # type: () -> None
if not self.orig_lang:
if "LANG" in os.environ:
del os.environ["LANG"]
else:
os.environ["LANG"] = self.orig_lang
self.orig_lang = ""
def test_logger_log_level(self): # type: () -> None
for envvar in ("CALIBRE_DEVELOP_FROM", "CALIBRE_DEBUG"):
if envvar in os.environ:
del os.environ[envvar]
logger = common.Logger()
self.assertEqual(logger.log_level, "INFO")
os.environ["CALIBRE_DEVELOP_FROM"] = "true"
logger = common.Logger()
self.assertEqual(logger.log_level, "DEBUG")
del os.environ["CALIBRE_DEVELOP_FROM"]
os.environ["CALIBRE_DEBUG"] = "1"
logger = common.Logger()
self.assertEqual(logger.log_level, "DEBUG")
del os.environ["CALIBRE_DEBUG"]
def _run_logger_unicode_test(self, as_bytes): # type: (bool) -> None
for o in TEST_STRINGS:
for enc in o["encodings"]:
with mock.patch(
"calibre_plugins.kobotouch_extended.common.preferred_encoding", enc
), mock.patch(
"calibre_plugins.kobotouch_extended.common.time.strftime",
mock.MagicMock(return_value=TEST_TIME),
):
logger = common.Logger()
for msg in o["test_strings"]:
test_tagged = logger._tag_args("DEBUG", msg)
self.assertListEqual(
test_tagged,
[
"{timestr} [{level}] {msg}".format(
timestr=TEST_TIME, level="DEBUG", msg=msg
),
],
)
def test_logger_ensure_unicode_from_bytes(self): # type: () -> None
self._run_logger_unicode_test(True)
self._run_logger_unicode_test(False)
@mock.patch(
"calibre_plugins.kobotouch_extended.common.Logger.print_formatted_log",
mock.MagicMock(),
)
@mock.patch(
"calibre_plugins.kobotouch_extended.common.Logger._prints", mock.MagicMock(),
)
@mock.patch(
"calibre_plugins.kobotouch_extended.common.Logger._tag_args",
mock.MagicMock(return_value="Goodbye, World"),
)
def test_logger_logs(self):
logger = common.Logger()
logger.debug("Hello, World")
logger.print_formatted_log.assert_called_with("DEBUG", "Hello, World")
logger("Hello, World")
logger.print_formatted_log.assert_called_with("INFO", "Hello, World")
logger.print_formatted_log.reset_mock()
logger._prints.reset_mock()
logger._tag_args.reset_mock()
logger.exception("Oh noes!")
logger._tag_args.assert_called_with("ERROR", "Oh noes!")
self.assertEqual(logger._prints.call_count, 2)<|fim▁hole|>
if __name__ == "__main__":
unittest.main(module="test_common", verbosity=2)<|fim▁end|> | |
<|file_name|>test_tab.py<|end_file_name|><|fim▁begin|>"""
Tests for wiki views.
"""
from django.conf import settings
from django.test.client import RequestFactory
from lms.djangoapps.courseware.tabs import get_course_tab_list
from common.djangoapps.student.tests.factories import AdminFactory, UserFactory
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory
class WikiTabTestCase(ModuleStoreTestCase):
"""Test cases for Wiki Tab."""
def setUp(self):
super().setUp()
self.course = CourseFactory.create()
self.instructor = AdminFactory.create()
self.user = UserFactory()
def get_wiki_tab(self, user, course):
"""Returns true if the "Wiki" tab is shown."""
request = RequestFactory().request()
all_tabs = get_course_tab_list(user, course)
wiki_tabs = [tab for tab in all_tabs if tab.name == 'Wiki']
return wiki_tabs[0] if len(wiki_tabs) == 1 else None
def test_wiki_enabled_and_public(self):
"""
Test wiki tab when Enabled setting is True and the wiki is open to<|fim▁hole|> """
settings.WIKI_ENABLED = True
self.course.allow_public_wiki_access = True
assert self.get_wiki_tab(self.user, self.course) is not None
def test_wiki_enabled_and_not_public(self):
"""
Test wiki when it is enabled but not open to the public
"""
settings.WIKI_ENABLED = True
self.course.allow_public_wiki_access = False
assert self.get_wiki_tab(self.user, self.course) is None
assert self.get_wiki_tab(self.instructor, self.course) is not None
def test_wiki_enabled_false(self):
"""Test wiki tab when Enabled setting is False"""
settings.WIKI_ENABLED = False
assert self.get_wiki_tab(self.user, self.course) is None
assert self.get_wiki_tab(self.instructor, self.course) is None
def test_wiki_visibility(self):
"""Test toggling of visibility of wiki tab"""
settings.WIKI_ENABLED = True
self.course.allow_public_wiki_access = True
wiki_tab = self.get_wiki_tab(self.user, self.course)
assert wiki_tab is not None
assert wiki_tab.is_hideable
wiki_tab.is_hidden = True
assert wiki_tab['is_hidden']
wiki_tab['is_hidden'] = False
assert not wiki_tab.is_hidden<|fim▁end|> | the public. |
<|file_name|>editor.js<|end_file_name|><|fim▁begin|>//= require ./ace/ace
//= require ./ace/mode-ruby
//= require ./ace/theme-tomorrow
//= require ./ace/ext-whitespace
$(function() {<|fim▁hole|> editor.getSession().setTabSize(2);
editor.getSession().setUseSoftTabs(true);
$("form").submit(function() {
$("#content").val(editor.getValue());
});
});<|fim▁end|> | var editor = ace.edit("editor");
editor.setTheme("ace/theme/tomorrow");
editor.getSession().setMode("ace/mode/ruby"); |
<|file_name|>ThirdBit.js<|end_file_name|><|fim▁begin|><|fim▁hole|>function solve(args) {
var $number = +args[0];
var $significantBit = $number => (($number >> 3) & 1);
console.log($significantBit($number));
}
solve(['15']);
solve(['1024']);<|fim▁end|> | 'use strict';
|
<|file_name|>configuration.go<|end_file_name|><|fim▁begin|>package conf
import (
"bytes"
"fmt"
"io/ioutil"
"os"
"github.com/BurntSushi/toml"
"github.com/ottom8/hadoop-ottom8r/logger"
"github.com/ottom8/hadoop-ottom8r/util"
)
const (
authConfigFile = "auth.toml"
encryptKey = "H@d00pS3cret3ncryptK3yS3cur3RlLg"
)
type flagOptions struct {
BackupPath string
ConfigFile string
DebugMode bool
LogLevel string
LogFile string
Mock bool
Encrypt bool
}
// AppConfig defines all configuration for this app
type AppConfig struct {
AuthConfig
TomlConfig
}
// AuthConfig defines Auth file structure
type AuthConfig struct {
NifiToken string `toml:"nifi_token"`
}
// TomlConfig defines TOML file structure
type TomlConfig struct {
Connection ConnectionInfo
Backup BackupInfo
}
// BackupInfo defines the config file backup section
type BackupInfo struct {
BackupPath string `toml:"backup_path"`
ConfigFile string `toml:"config_file"`
DebugMode bool `toml:"debug_mode"`
LogLevel string `toml:"log_level"`
LogFile string `toml:"log_file"`
Mock bool `toml:"mock"`
}
// ConnectionInfo defines the config file connection section
type ConnectionInfo struct {
NifiHost string `toml:"nifi_host"`
NifiUser string `toml:"nifi_user"`
NifiPass string `toml:"nifi_pass"`
NifiCert string `toml:"nifi_cert"`
}
// Configurator is an interface for configuration related use.
type Configurator interface {
Read()
Write()
}
//// String method returns the flagOptions object as a string.
//func (fo *flagOptions) String() string {
// return fmt.Sprintf("%+v", fo)
//}
// String method returns the AppConfig object as a string.
func (ac *AppConfig) String() string {
return "AppConfig: {" + ac.TomlConfig.String() + " " + ac.AuthConfig.String() + "}"
}
// String method returns the TomlConfig object as a string.
func (tc *TomlConfig) String() string {
return "TomlConfig: {" + tc.Backup.String() + " " + tc.Connection.String() + "}"
}
// String method returns the BackupInfo object as a string.
func (bi BackupInfo) String() string {
return "Backup: {" + logger.OutputStruct(bi) + "}"
}
// String method returns the ConnectionInfo object as a string.
func (ci ConnectionInfo) String() string {
return fmt.Sprintf("Connection: {NifiHost:%s NifiCert:%s NifiUser:%s NifiPass:********}",
ci.NifiHost, ci.NifiCert, ci.NifiUser)
}
// String method returns the AuthConfig object as a string.
func (ac AuthConfig) String() string {
return fmt.Sprintf("AuthConfig: {NifiToken:%s}", ac.NifiToken)
}
// Read info from associated config file
func (ac *AuthConfig) Read() {
_, err := os.Stat(authConfigFile)
if err != nil {
logger.Info(fmt.Sprintf("AuthConfig file %s is missing, creating. ",
authConfigFile))
ac.Write()
}
if _, err := toml.DecodeFile(authConfigFile, ac); err != nil {
logger.Fatal(fmt.Sprint(err))
}
logger.Debug(fmt.Sprint(ac))
}
// Write out new auth config file
func (ac *AuthConfig) Write() {
buf := new(bytes.Buffer)
if err := toml.NewEncoder(buf).Encode(ac); err != nil {
logger.Fatal(fmt.Sprint(err))
}
logger.Debug(buf.String())
err := ioutil.WriteFile(authConfigFile, buf.Bytes(), 0644)
if err != nil {<|fim▁hole|>}
// Read info from associated config file
func (tc *TomlConfig) Read() {
_, err := os.Stat(tc.Backup.ConfigFile)
if err != nil {
logger.Fatal(fmt.Sprint("Config file is missing: ", tc.Backup.ConfigFile))
}
if _, err := toml.DecodeFile(tc.Backup.ConfigFile, tc); err != nil {
logger.Fatal(fmt.Sprint(err))
}
logger.Debug(fmt.Sprint(tc))
}
// Write out new TOML config file
func (tc *TomlConfig) Write() {
buf := new(bytes.Buffer)
if err := toml.NewEncoder(buf).Encode(tc); err != nil {
logger.Fatal(fmt.Sprint(err))
}
logger.Debug(buf.String())
err := ioutil.WriteFile(tc.Backup.ConfigFile, buf.Bytes(), 0644)
if err != nil {
logger.Fatal(fmt.Sprint(err))
}
logger.Info("Wrote new TOML config file.")
}
// Encrypt performs encryption on plaintext passwords in config file.
func (tc *TomlConfig) Encrypt() {
tc.SetNifiPass(tc.Connection.NifiPass)
}
// GetNifiHost returns the NifiHost config
func (tc *TomlConfig) GetNifiHost() string {
return tc.Connection.NifiHost
}
// GetNifiUser returns the NifiUser config
func (tc *TomlConfig) GetNifiUser() string {
return tc.Connection.NifiUser
}
// GetNifiPass decrypts and returns the NifiPass config
func (tc *TomlConfig) GetNifiPass() string {
return util.Base64Decrypt(tc.Connection.NifiPass, encryptKey)
}
// SetNifiPass encrypts and stores the NifiPass config
func (tc *TomlConfig) SetNifiPass(pass string) {
outbound := util.Base64Encrypt(pass, encryptKey)
if outbound != tc.Connection.NifiPass {
tc.Connection.NifiPass = outbound
tc.Write()
}
}
// GetNifiCert returns the NifiCert config
func (tc *TomlConfig) GetNifiCert() string {
return tc.Connection.NifiCert
}
// GetNifiToken returns the NifiToken config
func (ac *AuthConfig) GetNifiToken() string {
return ac.NifiToken
}
// SetNifiToken updates the NifiToken config
func (ac *AuthConfig) SetNifiToken(token string) {
if token != ac.NifiToken {
ac.NifiToken = token
ac.Write()
}
}
func GetFlags(arguments map[string]interface{}) *flagOptions {
flags := &flagOptions{
ConfigFile: arguments["--config"].(string),
DebugMode: arguments["--debug"].(bool),
LogLevel: arguments["--loglevel"].(string),
LogFile: arguments["--logfile"].(string),
Mock: arguments["--mock"].(bool),
Encrypt: arguments["--encrypt"].(bool),
}
return flags
}<|fim▁end|> | logger.Fatal(fmt.Sprint(err))
}
logger.Info("Wrote new auth config file.") |
<|file_name|>reordersam.py<|end_file_name|><|fim▁begin|>import os
from pypers.core.step import CmdLineStep
class ReorderSam(CmdLineStep):
spec = {
"version": "0.0.1",
"descr": [
"Runs ReorderSam to reorder chromosomes into GATK order"
],
"args":
{
"inputs": [
{
"name" : "input_bam",
"type" : "file",
"iterable" : True,
"descr" : "the input bam file",
},
{
"name" : "reference",
"type" : "ref_genome",
"tool" : "reordersam",
"descr" : "Reference whole genome fasta"
}
],
"outputs": [
{
"name" : "output_bam",
"type" : "file",
"value" : "dummy",
"descr" : "the reordered output bam",
}
],
"params": [
{
"name" : "jvm_args",
"value" : "-Xmx{{jvm_memory}}g -Djava.io.tmpdir={{output_dir}}",
"descr" : "java virtual machine arguments",
"readonly" : True
}
]<|fim▁hole|> "cmd": [
"/usr/bin/java {{jvm_args}} -jar /software/pypers/picard-tools/picard-tools-1.119/picard-tools-1.119/ReorderSam.jar",
" I={{input_bam}} O={{output_bam}} CREATE_INDEX=True R={{reference}}"
],
"requirements": {
"memory": '8'
}
}
def preprocess(self):
"""
Set output bam name
"""
file_name = os.path.basename(self.input_bam)
self.output_bam = file_name.replace('.bam','.reord.bam')
super(ReorderSam, self).preprocess()<|fim▁end|> | }, |
<|file_name|>stitch_imgs.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import argparse
import Image, ImageDraw, ImageChops
import os
import sys
import util
# stitches a bunch of images together into a grid
parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('--x', type=int, default=3, help="output columns")
parser.add_argument('--y', type=int, default=2, help="output rows")
parser.add_argument('--nth', type=int, default=1, help="only process every nth frame")
parser.add_argument('--max-frame', type=int, default=0,
help="if >0 ignore frames past this")
parser.add_argument('--output-dir', type=str, default="/tmp/stitch",
help="where to output stitched imgs")
parser.add_argument('dirs', nargs='+')
opts = parser.parse_args()
print opts
X, Y = opts.x, opts.y
W, H = 160, 120
pixel_buffer = 3
util.make_dir(opts.output_dir)
imgs_per_directory = {}<|fim▁hole|> i = sorted(os.listdir(directory))
if opts.max_frame > 0:
i = i[:opts.max_frame]
imgs_per_directory[directory] = i
print "imgs per dir", directory, len(i)
max_imgs = max(max_imgs, len(i))
i = 0
while i <= max_imgs:
print i, "/", max_imgs
background = Image.new('RGB',
((W*X)+(X*pixel_buffer), (H*Y)+(Y*pixel_buffer)),
(0, 0, 0))
for n, directory in enumerate(opts.dirs):
imgs = imgs_per_directory[directory]
img_file = imgs[min(len(imgs)-1, i)]
img = Image.open("%s/%s" % (directory, img_file))
gx, gy = n%X, n/X
x_offset = (gx*W)+(gx*pixel_buffer)
y_offset = (gy*H)+(gy*pixel_buffer)
background.paste(img, (x_offset, y_offset))
background.save("%s/stitched_%03d.png" % (opts.output_dir, i))
i += opts.nth
print "mencoder mf://%s/ -ovc lavc -mf fps=10 -o stitched.avi" % opts.output_dir<|fim▁end|> | max_imgs = 0
assert len(opts.dirs) == X * Y, opts.dirs
for directory in opts.dirs: |
<|file_name|>preview.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# This helps you preview the apps and extensions docs.
#
# ./preview.py --help
#
# There are two modes: server- and render- mode. The default is server, in which
# a webserver is started on a port (default 8000). Navigating to paths on
# http://localhost:8000, for example
#
# http://localhost:8000/extensions/tabs.html
#
# will render the documentation for the extension tabs API.
#
# On the other hand, render mode statically renders docs to stdout. Use this
# to save the output (more convenient than needing to save the page in a
# browser), handy when uploading the docs somewhere (e.g. for a review),
# and for profiling the server. For example,
#
# ./preview.py -r extensions/tabs.html
#
# will output the documentation for the tabs API on stdout and exit immediately.
# NOTE: RUN THIS FIRST. Or all third_party imports will fail.
import build_server
# Copy all the files necessary to run the server. These are cleaned up when the
# server quits.
build_server.main()
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
import logging
import optparse
import posixpath
import time
from local_renderer import LocalRenderer
class _RequestHandler(BaseHTTPRequestHandler):
'''A HTTPRequestHandler that outputs the docs page generated by Handler.<|fim▁hole|> posixpath.abspath('')):
return
# Rewrite paths that would otherwise be served from app.yaml.
self.path = {
'/robots.txt': '../../server2/robots.txt',
'/favicon.ico': '../../server2/chrome-32.ico',
'/apple-touch-icon-precomposed.png': '../../server2/chrome-128.png'
}.get(self.path, self.path)
response = LocalRenderer.Render(self.path, headers=dict(self.headers))
self.protocol_version = 'HTTP/1.1'
self.send_response(response.status)
for k, v in response.headers.iteritems():
self.send_header(k, v)
self.end_headers()
self.wfile.write(response.content.ToString())
if __name__ == '__main__':
parser = optparse.OptionParser(
description='Runs a server to preview the extension documentation.',
usage='usage: %prog [option]...')
parser.add_option('-a', '--address', default='127.0.0.1',
help='the local interface address to bind the server to')
parser.add_option('-p', '--port', default='8000',
help='port to run the server on')
parser.add_option('-r', '--render', default='',
help='statically render a page and print to stdout rather than starting '
'the server, e.g. apps/storage.html. The path may optionally end '
'with #n where n is the number of times to render the page before '
'printing it, e.g. apps/storage.html#50, to use for profiling.')
parser.add_option('-s', '--stat',
help='Print profile stats at the end of the run using the given '
'profiling option (like "tottime"). -t is ignored if this is set.')
parser.add_option('-t', '--time', action='store_true',
help='Print the time taken rendering rather than the result.')
(opts, argv) = parser.parse_args()
if opts.render:
if opts.render.find('#') >= 0:
(path, iterations) = opts.render.rsplit('#', 1)
extra_iterations = int(iterations) - 1
else:
path = opts.render
extra_iterations = 0
if opts.stat:
import cProfile, pstats, StringIO
pr = cProfile.Profile()
pr.enable()
elif opts.time:
start_time = time.time()
response = LocalRenderer.Render(path)
if response.status != 200:
print('Error status: %s' % response.status)
exit(1)
for _ in range(extra_iterations):
LocalRenderer.Render(path)
if opts.stat:
pr.disable()
s = StringIO.StringIO()
pstats.Stats(pr, stream=s).sort_stats(opts.stat).print_stats()
print(s.getvalue())
elif opts.time:
print('Took %s seconds' % (time.time() - start_time))
else:
print(response.content.ToString())
exit()
print('Starting previewserver on port %s' % opts.port)
print('')
print('The extension documentation can be found at:')
print('')
print(' http://localhost:%s/extensions/' % opts.port)
print('')
print('The apps documentation can be found at:')
print('')
print(' http://localhost:%s/apps/' % opts.port)
print('')
logging.getLogger().setLevel(logging.INFO)
server = HTTPServer((opts.address, int(opts.port)), _RequestHandler)
try:
server.serve_forever()
finally:
server.socket.close()<|fim▁end|> | '''
def do_GET(self):
# Sanitize path to guarantee that it stays within the server.
if not posixpath.abspath(self.path.lstrip('/')).startswith( |
<|file_name|>copyable.js<|end_file_name|><|fim▁begin|>/**
@module ember
@submodule ember-runtime
*/
import { get } from "ember-metal/property_get";
import { set } from "ember-metal/property_set";
import { required } from "ember-metal/mixin";
import { Freezable } from "ember-runtime/mixins/freezable";
import { Mixin } from 'ember-metal/mixin';
import { fmt } from "ember-runtime/system/string";
import EmberError from 'ember-metal/error';
/**
Implements some standard methods for copying an object. Add this mixin to
any object you create that can create a copy of itself. This mixin is
added automatically to the built-in array.
You should generally implement the `copy()` method to return a copy of the
receiver.
Note that `frozenCopy()` will only work if you also implement
`Ember.Freezable`.
@class Copyable
@namespace Ember
@since Ember 0.9
*/
export default Mixin.create({
/**
Override to return a copy of the receiver. Default implementation raises
an exception.
@method copy
@param {Boolean} deep if `true`, a deep copy of the object should be made
@return {Object} copy of receiver
*/
copy: required(Function),
/**
If the object implements `Ember.Freezable`, then this will return a new
copy if the object is not frozen and the receiver if the object is frozen.
Raises an exception if you try to call this method on a object that does
not support freezing.
You should use this method whenever you want a copy of a freezable object<|fim▁hole|>
@method frozenCopy
@return {Object} copy of receiver or receiver
*/
frozenCopy: function() {
if (Freezable && Freezable.detect(this)) {
return get(this, 'isFrozen') ? this : this.copy().freeze();
} else {
throw new EmberError(fmt("%@ does not support freezing", [this]));
}
}
});<|fim▁end|> | since a freezable object can simply return itself without actually
consuming more memory. |
<|file_name|>SolrPing.java<|end_file_name|><|fim▁begin|>/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.client.solrj.request;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.SolrRequest;
import org.apache.solr.client.solrj.response.SolrPingResponse;
import org.apache.solr.common.params.CommonParams;
import org.apache.solr.common.params.ModifiableSolrParams;
/**
* Verify that there is a working Solr core at the URL of a {@link org.apache.solr.client.solrj.SolrClient}.
* To use this class, the solrconfig.xml for the relevant core must include the
* request handler for <code>/admin/ping</code>.
*
* @since solr 1.3
*/
public class SolrPing extends SolrRequest<SolrPingResponse> {
/** serialVersionUID. */
private static final long serialVersionUID = 5828246236669090017L;
/** Request parameters. */
private ModifiableSolrParams params;
/**
* Create a new SolrPing object.
*/
public SolrPing() {
super(METHOD.GET, CommonParams.PING_HANDLER);
params = new ModifiableSolrParams();
}
@Override
protected SolrPingResponse createResponse(SolrClient client) {
return new SolrPingResponse();
}
@Override
public ModifiableSolrParams getParams() {
return params;
}
/**
* Remove the action parameter from this request. This will result in the same
* behavior as {@code SolrPing#setActionPing()}. For Solr server version 4.0
* and later.
*
* @return this
*/
public SolrPing removeAction() {
params.remove(CommonParams.ACTION);
return this;
}
/**
* Set the action parameter on this request to enable. This will delete the
* health-check file for the Solr core. For Solr server version 4.0 and later.
*
* @return this
*/
public SolrPing setActionDisable() {
params.set(CommonParams.ACTION, CommonParams.DISABLE);
return this;
}
/**
* Set the action parameter on this request to enable. This will create the<|fim▁hole|> public SolrPing setActionEnable() {
params.set(CommonParams.ACTION, CommonParams.ENABLE);
return this;
}
/**
* Set the action parameter on this request to ping. This is the same as not
* including the action at all. For Solr server version 4.0 and later.
*
* @return this
*/
public SolrPing setActionPing() {
params.set(CommonParams.ACTION, CommonParams.PING);
return this;
}
}<|fim▁end|> | * health-check file for the Solr core. For Solr server version 4.0 and later.
*
* @return this
*/ |
<|file_name|>0001_initial.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from django.db import migrations, models
import django.db.models.deletion<|fim▁hole|>from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='VideoPipelineIntegration',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('change_date', models.DateTimeField(auto_now_add=True, verbose_name='Change date')),
('enabled', models.BooleanField(default=False, verbose_name='Enabled')),
('api_url', models.URLField(help_text='edx-video-pipeline API URL.', verbose_name='Internal API URL')),
('service_username', models.CharField(default=u'video_pipeline_service_user', help_text='Username created for Video Pipeline Integration, e.g. video_pipeline_service_user.', max_length=100)),
('changed_by', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, editable=False, to=settings.AUTH_USER_MODEL, null=True, verbose_name='Changed by')),
],
options={
'ordering': ('-change_date',),
'abstract': False,
},
),
]<|fim▁end|> | |
<|file_name|>bitcoin_es_CL.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" ?><!DOCTYPE TS><TS language="es_CL" version="2.1">
<context>
<name>AboutDialog</name>
<message>
<location filename="../forms/aboutdialog.ui" line="+14"/>
<source>About EverGreenCoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+39"/>
<source><b>EverGreenCoin</b> version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+41"/>
<source>Copyright © 2009-2014 The Bitcoin developers
Copyright © 2012-2014 The NovaCoin developers
Copyright © 2015-2017 The EverGreenCoin developers</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>
This is experimental software.
Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php.
This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young ([email protected]) and UPnP software written by Thomas Bernard.</source>
<translation>
Este es un software experimental.
Distribuido bajo la licencia MIT/X11, vea el archivo adjunto
COPYING o http://www.opensource.org/licenses/mit-license.php.
Este producto incluye software desarrollado por OpenSSL Project para su uso en
el OpenSSL Toolkit (http://www.openssl.org/), software criptográfico escrito por
Eric Young ([email protected]) y UPnP software escrito por Thomas Bernard.</translation>
</message>
</context>
<context>
<name>AddressBookPage</name>
<message>
<location filename="../forms/addressbookpage.ui" line="+14"/>
<source>Address Book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+22"/>
<source>Double-click to edit address or label</source>
<translation>Haz doble clic para editar una dirección o etiqueta</translation>
</message>
<message>
<location line="+27"/>
<source>Create a new address</source>
<translation>Crea una nueva dirección</translation>
</message>
<message>
<location line="+14"/>
<source>Copy the currently selected address to the system clipboard</source>
<translation>Copia la dirección seleccionada al portapapeles</translation>
</message>
<message>
<location line="-11"/>
<source>&New Address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-46"/>
<source>These are your EverGreenCoin addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+60"/>
<source>&Copy Address</source>
<translation>&Copia dirección</translation>
</message>
<message>
<location line="+11"/>
<source>Show &QR Code</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Sign a message to prove you own an EverGreenCoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Sign &Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Delete the currently selected address from the list</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-14"/>
<source>Verify a message to ensure it was signed with a specified EverGreenCoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Verify Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>&Delete</source>
<translation>&Borrar</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="+65"/>
<source>Copy &Label</source>
<translation>Copia &etiqueta</translation>
</message>
<message>
<location line="+2"/>
<source>&Edit</source>
<translation>&Editar</translation>
</message>
<message>
<location line="+250"/>
<source>Export Address Book Data</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation>Archivos separados por coma (*.csv)</translation>
</message>
<message>
<location line="+13"/>
<source>Error exporting</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>AddressTableModel</name>
<message>
<location filename="../addresstablemodel.cpp" line="+144"/>
<source>Label</source>
<translation>Etiqueta</translation>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation>Dirección</translation>
</message>
<message>
<location line="+36"/>
<source>(no label)</source>
<translation>(sin etiqueta)</translation>
</message>
</context>
<context>
<name>AskPassphraseDialog</name>
<message>
<location filename="../forms/askpassphrasedialog.ui" line="+26"/>
<source>Passphrase Dialog</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>Enter passphrase</source>
<translation>Introduce contraseña actual </translation>
</message>
<message>
<location line="+14"/>
<source>New passphrase</source>
<translation>Nueva contraseña</translation>
</message>
<message>
<location line="+14"/>
<source>Repeat new password</source>
<translation>Repite nueva contraseña</translation>
</message>
<message>
<location line="+33"/>
<source>Serves to disable the trivial sendmoney when OS account compromised. Provides no real security.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Unlock for staking only (not sending).</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="+35"/>
<source>Enter the new password to the wallet.<br/>Please use a passphrase of <b>10 or more random characters</b>, or <b>eight or more words</b>.</source>
<translation>Introduce la nueva contraseña para la billetera.<br/>Por favor utiliza un contraseña de<b>10 o más caracteres aleatorios</b>, u <b>ocho o más palabras</b>.</translation>
</message>
<message>
<location line="+1"/>
<source>Encrypt wallet</source>
<translation>Codificar billetera</translation>
</message>
<message>
<location line="+7"/>
<source>This operation needs your password to unlock your EverGreenCoin.</source>
<translation>Esta operación necesita la contraseña para desbloquear la billetera.</translation>
</message>
<message>
<location line="+5"/>
<source>Unlock EverGreenCoin</source>
<translation>Desbloquea billetera</translation>
</message>
<message>
<location line="+3"/>
<source>This operation needs your wallet passphrase to decrypt the wallet.</source>
<translation>Esta operación necesita la contraseña para decodificar la billetara.</translation>
</message>
<message>
<location line="+5"/>
<source>Decrypt wallet</source>
<translation>Decodificar cartera</translation>
</message>
<message>
<location line="+3"/>
<source>Change password</source>
<translation>Cambia contraseña</translation>
</message>
<message>
<location line="+1"/>
<source>Enter the old and new password to the wallet.</source>
<translation>Introduce la contraseña anterior y la nueva de cartera</translation>
</message>
<message>
<location line="+46"/>
<source>Confirm wallet encryption</source>
<translation>Confirma la codificación de cartera</translation>
</message>
<message>
<location line="+1"/>
<source>Warning: If you encrypt your wallet and lose your passphrase, you will <b>LOSE ALL OF YOUR COINS</b>!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Are you sure you wish to encrypt your wallet?</source>
<translation>¿Seguro que quieres seguir codificando la billetera?</translation>
</message>
<message>
<location line="+15"/>
<source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source>
<translation>IMPORTANTE: Cualquier versión anterior que hayas realizado de tu archivo de billetera será reemplazada por el nuevo archivo de billetera encriptado. Por razones de seguridad, los respaldos anteriores de los archivos de billetera se volverán inútiles en tanto comiences a usar la nueva billetera encriptada.</translation>
</message>
<message>
<location line="+103"/>
<location line="+24"/>
<source>Warning: The Caps Lock key is on!</source>
<translation>Precaucion: Mayúsculas Activadas</translation>
</message>
<message>
<location line="-133"/>
<location line="+60"/>
<source>Wallet encrypted</source>
<translation>Billetera codificada</translation>
</message>
<message>
<location line="-58"/>
<source>EverGreenCoin will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your coins from being stolen by malware infecting your computer.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<location line="+7"/>
<location line="+44"/>
<location line="+6"/>
<source>Wallet encryption failed</source>
<translation>Falló la codificación de la billetera</translation>
</message>
<message>
<location line="-56"/>
<source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source>
<translation>La codificación de la billetera falló debido a un error interno. Tu billetera no ha sido codificada.</translation>
</message>
<message>
<location line="+7"/>
<location line="+50"/>
<source>The supplied passphrases do not match.</source>
<translation>Las contraseñas no coinciden.</translation>
</message>
<message>
<location line="-38"/>
<source>Wallet unlock failed</source>
<translation>Ha fallado el desbloqueo de la billetera</translation>
</message>
<message>
<location line="+1"/>
<location line="+12"/>
<location line="+19"/>
<source>The passphrase entered for the wallet decryption was incorrect.</source>
<translation>La contraseña introducida para decodificar la billetera es incorrecta.</translation>
</message>
<message>
<location line="-20"/>
<source>Wallet decryption failed</source>
<translation>Ha fallado la decodificación de la billetera</translation>
</message>
<message>
<location line="+14"/>
<source>Wallet passphrase was successfully changed.</source>
<translation>La contraseña de billetera ha sido cambiada con éxito.</translation>
</message>
</context>
<context>
<name>BitcoinGUI</name>
<message>
<location filename="../bitcoingui.cpp" line="+282"/>
<source>Sign &message...</source>
<translation>Firmar &Mensaje...</translation>
</message>
<message>
<location line="+251"/>
<source>Synchronizing with network...</source>
<translation>Sincronizando con la red...</translation>
</message>
<message>
<location line="-319"/>
<source>&Overview</source>
<translation>&Vista general</translation>
</message>
<message>
<location line="+1"/>
<source>Show general overview of your EverGreenCoin balance</source>
<translation>Muestra una vista general de la billetera</translation>
</message>
<message>
<location line="+17"/>
<source>&Transactions</source>
<translation>&Transacciones</translation>
</message>
<message>
<location line="+1"/>
<source>Browse your EverGreenCoin transaction history</source>
<translation>Explora el historial de transacciónes</translation>
</message>
<message>
<location line="+5"/>
<source>&Address Book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Edit the list of your stored EverGreenCoin addresses and labels</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-13"/>
<source>&Receive coins</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show your list of EverGreenCoin addresses for receiving payments</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-7"/>
<source>&Send coins</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>E&xit</source>
<translation>&Salir</translation>
</message>
<message>
<location line="+1"/>
<source>Quit application</source>
<translation>Salir del programa</translation>
</message>
<message>
<location line="+6"/>
<source>Show information about EverGreenCoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>About &Qt</source>
<translation>Acerca de</translation>
</message>
<message>
<location line="+1"/>
<source>Show information about Qt</source>
<translation>Mostrar Información sobre QT</translation>
</message>
<message>
<location line="+2"/>
<source>&Options...</source>
<translation>&Opciones</translation>
</message>
<message>
<location line="+4"/>
<source>&Encrypt Wallet...</source>
<translation>&Codificar la billetera...</translation>
</message>
<message>
<location line="+3"/>
<source>&Backup Wallet...</source>
<translation>&Respaldar billetera...</translation>
</message>
<message>
<location line="+2"/>
<source>&Change Passphrase...</source>
<translation>&Cambiar la contraseña...</translation>
</message>
<message numerus="yes">
<location line="+259"/>
<source>~%n block(s) remaining</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+6"/>
<source>Downloaded %1 of %2 blocks of transaction history (%3% done).</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-256"/>
<source>&Export...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-64"/>
<source>Send EverGreenCoins to an EverGreenCoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+47"/>
<source>Modify configuration options for EverGreenCoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>Export the data in the current tab to a file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-14"/>
<source>Encrypt or decrypt wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Backup wallet to another location</source>
<translation>Respaldar billetera en otra ubicación</translation>
</message>
<message>
<location line="+2"/>
<source>Change the passphrase used for wallet encryption</source>
<translation>Cambiar la contraseña utilizada para la codificación de la billetera</translation>
</message>
<message>
<location line="+10"/>
<source>&Debug window</source>
<translation>Ventana &Debug</translation>
</message>
<message>
<location line="+1"/>
<source>Open debugging and diagnostic console</source>
<translation>Abre consola de depuración y diagnóstico</translation>
</message>
<message>
<location line="-5"/>
<source>&Verify message...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-202"/>
<source>EverGreenCoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Wallet</source>
<translation>Cartera</translation>
</message>
<message>
<location line="+180"/>
<source>&About EverGreenCoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>&Show / Hide</source>
<translation>&Mostrar/Ocultar</translation>
</message>
<message>
<location line="+9"/>
<source>Unlock EverGreenCoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>&Lock Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Lock wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>&File</source>
<translation>&Archivo</translation>
</message>
<message>
<location line="+8"/>
<source>&Settings</source>
<translation>&Configuración</translation>
</message>
<message>
<location line="+8"/>
<source>&Help</source>
<translation>&Ayuda</translation>
</message>
<message>
<location line="+12"/>
<source>Tabs toolbar</source>
<translation>Barra de pestañas</translation>
</message>
<message>
<location line="+8"/>
<source>Actions toolbar</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<location line="+9"/>
<source>[testnet]</source>
<translation>[red-de-pruebas]</translation>
</message>
<message>
<location line="+0"/>
<location line="+60"/>
<source>EverGreenCoin client</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+75"/>
<source>%n active connection(s) to the EverGreenCoin network</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+40"/>
<source>Downloaded %1 blocks of transaction history.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+413"/>
<source>Staking.<br>Your weight is %1<br>Network weight is %2<br>Expected time to earn reward is %3</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Not staking because wallet is locked</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Not staking because wallet is offline</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Not staking because wallet is syncing</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Not staking because you don't have mature coins</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="-403"/>
<source>%n second(s) ago</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="-312"/>
<source>About EverGreenCoin card</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show information about EverGreenCoin card</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>&Unlock Wallet...</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+297"/>
<source>%n minute(s) ago</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n hour(s) ago</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n day(s) ago</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+6"/>
<source>Up to date</source>
<translation>Actualizado</translation>
</message>
<message>
<location line="+7"/>
<source>Catching up...</source>
<translation>Recuperando...</translation>
</message>
<message>
<location line="+10"/>
<source>Last received block was generated %1.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+59"/>
<source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Confirm transaction fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+27"/>
<source>Sent transaction</source>
<translation>Transacción enviada</translation>
</message>
<message>
<location line="+1"/>
<source>Incoming transaction</source>
<translation>Transacción entrante</translation>
</message>
<message>
<location line="+1"/>
<source>Date: %1
Amount: %2
Type: %3
Address: %4
</source>
<translation>Fecha: %1
Cantidad: %2
Tipo: %3
Dirección: %4</translation>
</message>
<message>
<location line="+100"/>
<location line="+15"/>
<source>URI handling</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-15"/>
<location line="+15"/>
<source>URI can not be parsed! This can be caused by an invalid EverGreenCoin address or malformed URI parameters.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>Wallet is <b>encrypted</b> and currently <b>unlocked</b></source>
<translation>La billetera esta <b>codificada</b> y actualmente <b>desbloqueda</b></translation>
</message>
<message>
<location line="+10"/>
<source>Wallet is <b>encrypted</b> and currently <b>locked</b></source>
<translation>La billetera esta <b>codificada</b> y actualmente <b>bloqueda</b></translation>
</message>
<message>
<location line="+25"/>
<source>Backup Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Wallet Data (*.dat)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Backup Failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>There was an error trying to save the wallet data to the new location.</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+76"/>
<source>%n second(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n minute(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n hour(s)</source>
<translation><numerusform>%n hora</numerusform><numerusform>%n horas</numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n day(s)</source>
<translation><numerusform>%n día</numerusform><numerusform>%n días</numerusform></translation>
</message>
<message>
<location line="+18"/>
<source>Not staking</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoin.cpp" line="+109"/>
<source>A fatal error occurred. EverGreenCoin can no longer continue safely and will quit.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>ClientModel</name>
<message>
<location filename="../clientmodel.cpp" line="+90"/>
<source>Network Alert</source>
<translation>Alerta de Red</translation>
</message>
</context>
<context>
<name>CoinControlDialog</name>
<message>
<location filename="../forms/coincontroldialog.ui" line="+14"/>
<source>Coin Control</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Quantity:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+32"/>
<source>Bytes:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+48"/>
<source>Amount:</source>
<translation>Cantidad:</translation>
</message>
<message>
<location line="+32"/>
<source>Priority:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+48"/>
<source>Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Low Output:</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="+551"/>
<source>no</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/coincontroldialog.ui" line="+51"/>
<source>After Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Change:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+69"/>
<source>(un)select all</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Tree mode</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>List mode</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+45"/>
<source>Amount</source>
<translation>Cantidad</translation>
</message>
<message>
<location line="+5"/>
<source>Label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Address</source>
<translation>Dirección</translation>
</message>
<message>
<location line="+5"/>
<source>Date</source>
<translation>Fecha</translation>
</message>
<message>
<location line="+5"/>
<source>Confirmations</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Confirmed</source>
<translation>Confirmado</translation>
</message>
<message>
<location line="+5"/>
<source>Priority</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="-515"/>
<source>Copy address</source>
<translation>Copia dirección</translation>
</message>
<message>
<location line="+1"/>
<source>Copy label</source>
<translation>Copia etiqueta</translation>
</message>
<message>
<location line="+1"/>
<location line="+26"/>
<source>Copy amount</source>
<translation>Copiar Cantidad</translation>
</message>
<message>
<location line="-25"/>
<source>Copy transaction ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<source>Copy quantity</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Copy fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy after fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy bytes</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy priority</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy low output</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy change</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+317"/>
<source>highest</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>high</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>medium-high</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>medium</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>low-medium</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>low</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>lowest</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+155"/>
<source>DUST</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>yes</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>This label turns red, if the transaction size is bigger than 10000 bytes.
This means a fee of at least %1 per kb is required.
Can vary +/- 1 Byte per input.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Transactions with higher priority get more likely into a block.
This label turns red, if the priority is smaller than "medium".
This means a fee of at least %1 per kb is required.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>This label turns red, if any recipient receives an amount smaller than %1.
This means a fee of at least %2 is required.
Amounts below 0.546 times the minimum relay fee are shown as DUST.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>This label turns red, if the change is smaller than %1.
This means a fee of at least %2 is required.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+37"/>
<location line="+66"/>
<source>(no label)</source>
<translation>(sin etiqueta)</translation>
</message>
<message>
<location line="-9"/>
<source>change from %1 (%2)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>(change)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>EditAddressDialog</name>
<message>
<location filename="../forms/editaddressdialog.ui" line="+14"/>
<source>Edit Address</source>
<translation>Editar dirección</translation>
</message>
<message>
<location line="+11"/>
<source>&Label</source>
<translation>&Etiqueta</translation>
</message>
<message>
<location line="+10"/>
<source>The label associated with this address book entry</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Address</source>
<translation>&Dirección</translation>
</message>
<message>
<location line="+10"/>
<source>The address associated with this address book entry. This can only be modified for sending addresses.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../editaddressdialog.cpp" line="+20"/>
<source>New receiving address</source>
<translation>Nueva dirección para recibir</translation>
</message>
<message>
<location line="+4"/>
<source>New sending address</source>
<translation>Nueva dirección para enviar</translation>
</message>
<message>
<location line="+3"/>
<source>Edit receiving address</source>
<translation>Editar dirección de recepción</translation>
</message>
<message>
<location line="+4"/>
<source>Edit sending address</source>
<translation>Editar dirección de envio</translation>
</message>
<message>
<location line="+76"/>
<source>The entered address "%1" is already in the address book.</source>
<translation>La dirección introducida "%1" ya esta guardada en la libreta de direcciones.</translation>
</message>
<message>
<location line="-5"/>
<source>The entered address "%1" is not a valid EverGreenCoin address.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Could not unlock wallet.</source>
<translation>No se pudo desbloquear la billetera.</translation>
</message>
<message>
<location line="+5"/>
<source>New key generation failed.</source>
<translation>La generación de nueva clave falló.</translation>
</message>
</context>
<context>
<name>GUIUtil::HelpMessageBox</name>
<message>
<location filename="../guiutil.cpp" line="+420"/>
<location line="+12"/>
<source>EverGreenCoin-Qt</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-12"/>
<source>version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Usage:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>command-line options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>UI options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Set language, for example "de_DE" (default: system locale)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Start minimized</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show splash screen on startup (default: 1)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>OptionsDialog</name>
<message>
<location filename="../forms/optionsdialog.ui" line="+14"/>
<source>Options</source>
<translation>Opciones</translation>
</message>
<message>
<location line="+16"/>
<source>&Main</source>
<translation>&Principal</translation>
</message>
<message>
<location line="+6"/>
<source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB. Fee 0.01 recommended.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Pay transaction &fee</source>
<translation>Comisión de &transacciónes</translation>
</message>
<message>
<location line="+31"/>
<source>Reserved amount does not participate in staking and is therefore spendable at any time.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Reserve</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Automatically start EverGreenCoin after logging in to the system.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Start EverGreenCoin on system login</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Detach block and address databases at shutdown. This means they can be moved to another data directory, but it slows down shutdown. The wallet is always detached.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Detach databases at shutdown</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>&Network</source>
<translation>&Red</translation>
</message>
<message>
<location line="+6"/>
<source>Automatically open the EverGreenCoin client port on the router. This only works when your router supports UPnP and it is enabled.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Map port using &UPnP</source>
<translation>Direcciona el puerto usando &UPnP</translation>
</message>
<message>
<location line="+7"/>
<source>Connect to the EverGreenCoin network through a SOCKS proxy (e.g. when connecting through Tor).</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Connect through SOCKS proxy:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Proxy &IP:</source>
<translation>&IP Proxy:</translation>
</message>
<message>
<location line="+19"/>
<source>IP address of the proxy (e.g. 127.0.0.1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Port:</source>
<translation>&Puerto:</translation>
</message>
<message>
<location line="+19"/>
<source>Port of the proxy (e.g. 9050)</source>
<translation>Puerto del servidor proxy (ej. 9050)</translation>
</message>
<message>
<location line="+7"/>
<source>SOCKS &Version:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>SOCKS version of the proxy (e.g. 5)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+36"/>
<source>&Window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Show only a tray icon after minimizing the window.</source>
<translation>Muestra solo un ícono en la bandeja después de minimizar la ventana</translation>
</message>
<message>
<location line="+3"/>
<source>&Minimize to the tray instead of the taskbar</source>
<translation>&Minimiza a la bandeja en vez de la barra de tareas</translation>
</message>
<message>
<location line="+7"/>
<source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source>
<translation>Minimiza la ventana en lugar de salir del programa cuando la ventana se cierra. Cuando esta opción esta activa el programa solo se puede cerrar seleccionando Salir desde el menu.</translation>
</message>
<message>
<location line="+3"/>
<source>M&inimize on close</source>
<translation>M&inimiza a la bandeja al cerrar</translation>
</message>
<message>
<location line="+21"/>
<source>&Display</source>
<translation>&Mostrado</translation>
</message>
<message>
<location line="+8"/>
<source>User Interface &language:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>The user interface language can be set here. This setting will take effect after restarting EverGreenCoin.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>&Unit to show amounts in:</source>
<translation>&Unidad en la que mostrar cantitades:</translation>
</message>
<message>
<location line="+13"/>
<source>Choose the default subdivision unit to show in the interface and when sending coins.</source>
<translation>Elige la subdivisión por defecto para mostrar cantidaded en la interfaz cuando se envien monedas</translation>
</message>
<message>
<location line="+9"/>
<source>Whether to show EverGreenCoin addresses in the transaction list or not.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Display addresses in transaction list</source>
<translation>&Muestra direcciones en el listado de transaccioines</translation>
</message>
<message>
<location line="+7"/>
<source>Whether to show coin control features or not.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Display coin &control features (experts only!)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+71"/>
<source>&OK</source>
<translation>&OK</translation>
</message>
<message>
<location line="+7"/>
<source>&Cancel</source>
<translation>&Cancela</translation>
</message>
<message>
<location line="+10"/>
<source>&Apply</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../optionsdialog.cpp" line="+55"/>
<source>default</source>
<translation>predeterminado</translation>
</message>
<message>
<location line="+149"/>
<location line="+9"/>
<source>Warning</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-9"/>
<location line="+9"/>
<source>This setting will take effect after restarting EverGreenCoin.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>The supplied proxy address is invalid.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>OverviewPage</name>
<message>
<location filename="../forms/overviewpage.ui" line="+14"/>
<source>Form</source>
<translation>Formulario</translation>
</message>
<message>
<location line="+33"/>
<location line="+231"/>
<source>The displayed information may be out of date. Your wallet automatically synchronizes with the EverGreenCoin network after a connection is established, but this process has not completed yet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-160"/>
<source>Stake:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>Unconfirmed:</source>
<translation>No confirmados:</translation>
</message>
<message>
<location line="-107"/>
<source>Wallet</source>
<translation>Cartera</translation>
</message>
<message>
<location line="+49"/>
<source>Spendable:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Your current spendable balance</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+71"/>
<source>Immature:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Mined balance that has not yet matured</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+20"/>
<source>Total:</source>
<translation>Total:</translation>
</message>
<message>
<location line="+16"/>
<source>Your current total balance</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+46"/>
<source><b>Recent transactions</b></source>
<translation><b>Transacciones recientes</b></translation>
</message>
<message>
<location line="-108"/>
<source>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-29"/>
<source>Total of coins that was staked, and do not yet count toward the current balance</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../overviewpage.cpp" line="+113"/>
<location line="+1"/>
<source>out of sync</source>
<translation>desincronizado</translation>
</message>
</context>
<context>
<name>QRCodeDialog</name>
<message>
<location filename="../forms/qrcodedialog.ui" line="+14"/>
<source>QR Code Dialog</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+59"/>
<source>Request Payment</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+56"/>
<source>Amount:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-44"/>
<source>Label:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Message:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+71"/>
<source>&Save As...</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../qrcodedialog.cpp" line="+62"/>
<source>Error encoding URI into QR Code.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+40"/>
<source>The entered amount is invalid, please check.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Resulting URI too long, try to reduce the text for label / message.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Save QR Code</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>PNG Images (*.png)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>RPCConsole</name>
<message>
<location filename="../forms/rpcconsole.ui" line="+46"/>
<source>Client name</source>
<translation>Nombre del cliente</translation>
</message>
<message>
<location line="+10"/>
<location line="+23"/>
<location line="+26"/>
<location line="+23"/>
<location line="+23"/>
<location line="+36"/>
<location line="+53"/>
<location line="+23"/>
<location line="+23"/>
<location filename="../rpcconsole.cpp" line="+348"/>
<source>N/A</source>
<translation>N/A</translation>
</message>
<message>
<location line="-217"/>
<source>Client version</source>
<translation>Versión del Cliente</translation>
</message>
<message>
<location line="-45"/>
<source>&Information</source>
<translation>&Información</translation>
</message>
<message>
<location line="+68"/>
<source>Using OpenSSL version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+49"/>
<source>Startup time</source>
<translation>Tiempo de inicio</translation>
</message>
<message>
<location line="+29"/>
<source>Network</source>
<translation>Red</translation>
</message>
<message>
<location line="+7"/>
<source>Number of connections</source>
<translation>Número de conexiones</translation>
</message>
<message>
<location line="+23"/>
<source>On testnet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Block chain</source>
<translation>Bloquea cadena</translation>
</message>
<message>
<location line="+7"/>
<source>Current number of blocks</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Estimated total blocks</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Last block time</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+52"/>
<source>&Open</source>
<translation>&Abrir</translation>
</message>
<message>
<location line="+16"/>
<source>Command-line options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Show the EverGreenCoin-Qt help message to get a list with possible EverGreenCoin command-line options.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Show</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<source>&Console</source>
<translation>&Consola</translation>
</message>
<message>
<location line="-260"/>
<source>Build date</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-104"/>
<source>EverGreenCoin - Debug window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>EverGreenCoin Core</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+279"/>
<source>Debug log file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Open the EverGreenCoin debug log file from the current data directory. This can take a few seconds for large log files.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+102"/>
<source>Clear console</source>
<translation>Limpiar Consola</translation>
</message>
<message>
<location filename="../rpcconsole.cpp" line="-33"/>
<source>Welcome to the EverGreenCoin RPC console.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Use up and down arrows to navigate history, and <b>Ctrl-L</b> to clear screen.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Type <b>help</b> for an overview of available commands.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SendCoinsDialog</name>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="+14"/>
<location filename="../sendcoinsdialog.cpp" line="+182"/>
<location line="+5"/>
<location line="+5"/>
<location line="+5"/>
<location line="+6"/>
<location line="+5"/>
<location line="+5"/>
<source>Send Coins</source>
<translation>Enviar monedas</translation>
</message>
<message>
<location line="+76"/>
<source>Coin Control Features</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+20"/>
<source>Inputs...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>automatically selected</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Insufficient funds!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+77"/>
<source>Quantity:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+22"/>
<location line="+35"/>
<source>0</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-19"/>
<source>Bytes:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+51"/>
<source>Amount:</source>
<translation>Cantidad:</translation>
</message>
<message>
<location line="+22"/>
<location line="+86"/>
<location line="+86"/>
<location line="+32"/>
<source>0.00 EGC</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-191"/>
<source>Priority:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>medium</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+32"/>
<source>Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Low Output:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>no</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+32"/>
<source>After Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Change</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+50"/>
<source>custom change address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+106"/>
<source>Send to multiple recipients at once</source>
<translation>Enviar a múltiples destinatarios</translation>
</message>
<message>
<location line="+3"/>
<source>Add &Recipient</source>
<translation>&Agrega destinatario</translation>
</message>
<message>
<location line="+20"/>
<source>Remove all transaction fields</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Clear &All</source>
<translation>&Borra todos</translation>
</message>
<message>
<location line="+28"/>
<source>Balance:</source>
<translation>Balance:</translation>
</message>
<message>
<location line="+16"/>
<source>123.456 EGC</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Confirm the send action</source>
<translation>Confirma el envio</translation>
</message>
<message>
<location line="+3"/>
<source>S&end</source>
<translation>&Envía</translation>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="-173"/>
<source>Enter an EverGreenCoin address (e.g. EdFwYw4Mo2Zq6CFM2yNJgXvE2DTJxgdBRX)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Copy quantity</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy amount</source>
<translation>Copiar Cantidad</translation>
</message>
<message>
<location line="+1"/>
<source>Copy fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy after fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy bytes</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy priority</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy low output</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy change</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+86"/>
<source><b>%1</b> to %2 (%3)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Confirm send coins</source>
<translation>Confirmar el envio de monedas</translation>
</message>
<message>
<location line="+1"/>
<source>Are you sure you want to send %1?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source> and </source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>The recipient address is not valid, please recheck.</source>
<translation>La dirección de destinatarion no es valida, comprueba otra vez.</translation>
</message>
<message>
<location line="+5"/>
<source>The amount to pay must be larger than 0.</source>
<translation>La cantidad por pagar tiene que ser mayor 0.</translation>
</message>
<message>
<location line="+5"/>
<source>The amount exceeds your balance.</source>
<translation>La cantidad sobrepasa tu saldo.</translation>
</message>
<message>
<location line="+5"/>
<source>The total exceeds your balance when the %1 transaction fee is included.</source>
<translation>El total sobrepasa tu saldo cuando se incluyen %1 como tasa de envio.</translation>
</message>
<message>
<location line="+6"/>
<source>Duplicate address found, can only send to each address once per send operation.</source>
<translation>Tienes una dirección duplicada, solo puedes enviar a direcciónes individuales de una sola vez.</translation>
</message>
<message>
<location line="+5"/>
<source>Error: Transaction creation failed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+251"/>
<source>WARNING: Invalid EverGreenCoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>(no label)</source>
<translation>(sin etiqueta)</translation>
</message>
<message>
<location line="+4"/>
<source>WARNING: unknown change address</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SendCoinsEntry</name>
<message>
<location filename="../forms/sendcoinsentry.ui" line="+14"/>
<source>Form</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>A&mount:</source>
<translation>Cantidad:</translation>
</message>
<message>
<location line="+13"/>
<source>Pay &To:</source>
<translation>&Pagar a:</translation>
</message>
<message>
<location line="+24"/>
<location filename="../sendcoinsentry.cpp" line="+25"/>
<source>Enter a label for this address to add it to your address book</source>
<translation>Introduce una etiqueta a esta dirección para añadirla a tu guia</translation>
</message>
<message>
<location line="+9"/>
<source>&Label:</source>
<translation>&Etiqueta:</translation>
</message>
<message>
<location line="+18"/>
<source>The address to send the payment to (e.g. EdFwYw4Mo2Zq6CFM2yNJgXvE2DTJxgdBRX)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Choose address from address book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<location line="+7"/>
<source>Paste address from clipboard</source>
<translation>Pega dirección desde portapapeles</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<location line="+7"/>
<source>Remove this recipient</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../sendcoinsentry.cpp" line="+1"/>
<source>Enter an EverGreenCoin address (e.g. EdFwYw4Mo2Zq6CFM2yNJgXvE2DTJxgdBRX)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SignVerifyMessageDialog</name>
<message>
<location filename="../forms/signverifymessagedialog.ui" line="+14"/>
<source>Signatures - Sign / Verify a Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<location line="+124"/>
<source>&Sign Message</source>
<translation>&Firmar Mensaje</translation>
</message>
<message>
<location line="-118"/>
<source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>The address to sign the message with (e.g. EdFwYw4Mo2Zq6CFM2yNJgXvE2DTJxgdBRX)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<location line="+203"/>
<source>Choose an address from the address book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-193"/>
<location line="+203"/>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<location line="-193"/>
<source>Paste address from clipboard</source>
<translation>Pega dirección desde portapapeles</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<location line="+12"/>
<source>Enter the message you want to sign here</source>
<translation>Escriba el mensaje que desea firmar</translation>
</message>
<message>
<location line="+24"/>
<source>Copy the current signature to the system clipboard</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>Sign the message to prove you own this EverGreenCoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+17"/>
<source>Reset all sign message fields</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<location line="+146"/>
<source>Clear &All</source>
<translation>&Borra todos</translation>
</message>
<message>
<location line="-87"/>
<location line="+70"/>
<source>&Verify Message</source>
<translation>&Firmar Mensaje</translation>
</message>
<message>
<location line="-64"/>
<source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>The address the message was signed with (e.g. EdFwYw4Mo2Zq6CFM2yNJgXvE2DTJxgdBRX)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+40"/>
<source>Verify the message to ensure it was signed with the specified EverGreenCoin address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+17"/>
<source>Reset all verify message fields</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../signverifymessagedialog.cpp" line="+27"/>
<location line="+3"/>
<source>Enter an EverGreenCoin address (e.g. EdFwYw4Mo2Zq6CFM2yNJgXvE2DTJxgdBRX)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-2"/>
<source>Click "Sign Message" to generate signature</source>
<translation>Click en "Firmar Mensage" para conseguir firma</translation>
</message>
<message>
<location line="+3"/>
<source>Enter EverGreenCoin signature</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+82"/>
<location line="+81"/>
<source>The entered address is invalid.</source>
<translation>La dirección introducida "%1" no es una dirección Bitcoin valida.</translation>
</message>
<message>
<location line="-81"/>
<location line="+8"/>
<location line="+73"/>
<location line="+8"/>
<source>Please check the address and try again.</source>
<translation>Por favor, revise la dirección Bitcoin e inténtelo denuevo</translation>
</message>
<message>
<location line="-81"/>
<location line="+81"/>
<source>The entered address does not refer to a key.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-73"/>
<source>Wallet unlock was cancelled.</source>
<translation>Ha fallado el desbloqueo de la billetera</translation>
</message>
<message>
<location line="+8"/>
<source>Private key for the entered address is not available.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+12"/>
<source>Message signing failed.</source>
<translation>Firma fallida</translation>
</message>
<message>
<location line="+5"/>
<source>Message signed.</source>
<translation>Mensaje firmado</translation>
</message>
<message>
<location line="+59"/>
<source>The signature could not be decoded.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<location line="+13"/>
<source>Please check the signature and try again.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>The signature did not match the message digest.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Message verification failed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Message verified.</source>
<translation>Mensaje comprobado</translation>
</message>
</context>
<context>
<name>TransactionDesc</name>
<message>
<location filename="../transactiondesc.cpp" line="+19"/>
<source>Open until %1</source>
<translation>Abierto hasta %1</translation>
</message>
<message numerus="yes">
<location line="-2"/>
<source>Open for %n block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+8"/>
<source>conflicted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>%1/offline</source>
<translation>%1/fuera de linea</translation>
</message>
<message>
<location line="+2"/>
<source>%1/unconfirmed</source>
<translation>%1/no confirmado</translation>
</message>
<message>
<location line="+2"/>
<source>%1 confirmations</source>
<translation>%1 confirmaciónes</translation>
</message>
<message>
<location line="+18"/>
<source>Status</source>
<translation>Estado</translation>
</message>
<message numerus="yes">
<location line="+7"/>
<source>, broadcast through %n node(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+4"/>
<source>Date</source>
<translation>Fecha</translation>
</message>
<message>
<location line="+7"/>
<source>Source</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Generated</source>
<translation>Generado</translation>
</message>
<message>
<location line="+5"/>
<location line="+17"/>
<source>From</source>
<translation>De</translation>
</message>
<message>
<location line="+1"/>
<location line="+22"/>
<location line="+58"/>
<source>To</source>
<translation>A</translation>
</message>
<message>
<location line="-77"/>
<location line="+2"/>
<source>own address</source>
<translation>propia dirección</translation>
</message>
<message>
<location line="-2"/>
<source>label</source>
<translation>etiqueta</translation>
</message>
<message>
<location line="+37"/>
<location line="+12"/>
<location line="+45"/>
<location line="+17"/>
<location line="+30"/>
<source>Credit</source>
<translation>Credito</translation>
</message>
<message numerus="yes">
<location line="-102"/>
<source>matures in %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+2"/>
<source>not accepted</source>
<translation>no aceptada</translation>
</message>
<message>
<location line="+44"/>
<location line="+8"/>
<location line="+15"/>
<location line="+30"/>
<source>Debit</source>
<translation>Debito</translation>
</message>
<message>
<location line="-39"/>
<source>Transaction fee</source>
<translation>Comisión transacción</translation>
</message>
<message>
<location line="+16"/>
<source>Net amount</source>
<translation>Cantidad total</translation>
</message>
<message>
<location line="+6"/>
<source>Message</source>
<translation>Mensaje</translation>
</message>
<message>
<location line="+2"/>
<source>Comment</source>
<translation>Comentario</translation>
</message>
<message>
<location line="+2"/>
<source>Transaction ID</source>
<translation>ID de Transacción</translation>
</message>
<message>
<location line="+3"/>
<source>Generated coins must mature 60 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to "not accepted" and it won't be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Debug information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Transaction</source>
<translation>Transacción</translation>
</message>
<message>
<location line="+5"/>
<source>Inputs</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Amount</source>
<translation>Cantidad</translation>
</message>
<message>
<location line="+1"/>
<source>true</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>false</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-211"/>
<source>, has not been successfully broadcast yet</source>
<translation>, no ha sido emitido satisfactoriamente todavía</translation>
</message>
<message>
<location line="+35"/>
<source>unknown</source>
<translation>desconocido</translation>
</message>
</context>
<context>
<name>TransactionDescDialog</name>
<message>
<location filename="../forms/transactiondescdialog.ui" line="+14"/>
<source>Transaction details</source>
<translation>Detalles de transacción</translation>
</message>
<message>
<location line="+6"/>
<source>This pane shows a detailed description of the transaction</source>
<translation>Esta ventana muestra información detallada sobre la transacción</translation>
</message>
</context>
<context>
<name>TransactionTableModel</name>
<message>
<location filename="../transactiontablemodel.cpp" line="+226"/>
<source>Date</source>
<translation>Fecha</translation>
</message>
<message>
<location line="+0"/>
<source>Type</source>
<translation>Tipo</translation>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation>Dirección</translation>
</message>
<message>
<location line="+0"/>
<source>Amount</source>
<translation>Cantidad</translation>
</message>
<message>
<location line="+60"/>
<source>Open until %1</source>
<translation>Abierto hasta %1</translation>
</message>
<message>
<location line="+12"/>
<source>Confirmed (%1 confirmations)</source>
<translation>Confirmado (%1 confirmaciones)</translation>
</message>
<message numerus="yes">
<location line="-15"/>
<source>Open for %n more block(s)</source>
<translation><numerusform>Abierto para &n bloque más</numerusform><numerusform>Abierto para &n bloques más</numerusform></translation>
</message>
<message>
<location line="+6"/>
<source>Offline</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Unconfirmed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Confirming (%1 of %2 recommended confirmations)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Conflicted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Immature (%1 confirmations, will be available after %2)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>This block was not received by any other nodes and will probably not be accepted!</source>
<translation>Este bloque no ha sido recibido por otros nodos y probablemente no sea aceptado !</translation>
</message>
<message>
<location line="+3"/>
<source>Generated but not accepted</source>
<translation>Generado pero no acceptado</translation>
</message>
<message>
<location line="+42"/>
<source>Received with</source>
<translation>Recibido con</translation>
</message>
<message>
<location line="+2"/>
<source>Received from</source>
<translation>Recibido de</translation>
</message>
<message>
<location line="+3"/>
<source>Sent to</source>
<translation>Enviado a</translation>
</message>
<message>
<location line="+2"/>
<source>Payment to yourself</source>
<translation>Pagar a usted mismo</translation>
</message>
<message>
<location line="+2"/>
<source>Mined</source>
<translation>Minado</translation>
</message>
<message>
<location line="+38"/>
<source>(n/a)</source>
<translation>(n/a)</translation>
</message>
<message>
<location line="+190"/>
<source>Transaction status. Hover over this field to show number of confirmations.</source>
<translation>Estado de transacción. Pasa el raton sobre este campo para ver el numero de confirmaciónes.</translation>
</message>
<message>
<location line="+2"/>
<source>Date and time that the transaction was received.</source>
<translation>Fecha y hora cuando se recibió la transaccion</translation>
</message>
<message>
<location line="+2"/>
<source>Type of transaction.</source>
<translation>Tipo de transacción.</translation>
</message>
<message>
<location line="+2"/>
<source>Destination address of transaction.</source>
<translation>Dirección de destino para la transacción</translation>
</message>
<message>
<location line="+2"/>
<source>Amount removed from or added to balance.</source>
<translation>Cantidad restada o añadida al balance</translation>
</message>
</context>
<context>
<name>TransactionView</name>
<message>
<location filename="../transactionview.cpp" line="+55"/>
<location line="+16"/>
<source>All</source>
<translation>Todo</translation>
</message>
<message>
<location line="-15"/>
<source>Today</source>
<translation>Hoy</translation>
</message>
<message>
<location line="+1"/>
<source>This week</source>
<translation>Esta semana</translation>
</message>
<message>
<location line="+1"/>
<source>This month</source>
<translation>Esta mes</translation>
</message>
<message>
<location line="+1"/>
<source>Last month</source>
<translation>Mes pasado</translation>
</message>
<message>
<location line="+1"/>
<source>This year</source>
<translation>Este año</translation>
</message>
<message>
<location line="+1"/>
<source>Range...</source>
<translation>Rango...</translation>
</message>
<message>
<location line="+11"/>
<source>Received with</source>
<translation>Recibido con</translation>
</message>
<message>
<location line="+2"/>
<source>Sent to</source>
<translation>Enviado a</translation>
</message>
<message>
<location line="+2"/>
<source>To yourself</source>
<translation>A ti mismo</translation>
</message>
<message>
<location line="+1"/>
<source>Mined</source>
<translation>Minado</translation>
</message>
<message>
<location line="+1"/>
<source>Other</source>
<translation>Otra</translation>
</message>
<message>
<location line="+7"/>
<source>Enter address or label to search</source>
<translation>Introduce una dirección o etiqueta para buscar</translation>
</message>
<message>
<location line="+7"/>
<source>Min amount</source>
<translation>Cantidad minima</translation>
</message>
<message>
<location line="+34"/>
<source>Copy address</source>
<translation>Copia dirección</translation>
</message>
<message>
<location line="+1"/>
<source>Copy label</source>
<translation>Copia etiqueta</translation>
</message>
<message>
<location line="+1"/>
<source>Copy amount</source>
<translation>Copiar Cantidad</translation>
</message>
<message>
<location line="+1"/>
<source>Copy transaction ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Edit label</source>
<translation>Edita etiqueta</translation>
</message>
<message>
<location line="+1"/>
<source>Show transaction details</source>
<translation>Mostrar detalles de la transacción</translation>
</message>
<message>
<location line="+144"/>
<source>Export Transaction Data</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation>Archivos separados por coma (*.csv)</translation>
</message>
<message>
<location line="+8"/>
<source>Confirmed</source>
<translation>Confirmado</translation>
</message>
<message>
<location line="+1"/>
<source>Date</source>
<translation>Fecha</translation>
</message>
<message>
<location line="+1"/>
<source>Type</source>
<translation>Tipo</translation>
</message>
<message>
<location line="+1"/>
<source>Label</source>
<translation>Etiqueta</translation>
</message>
<message>
<location line="+1"/>
<source>Address</source>
<translation>Dirección</translation>
</message>
<message>
<location line="+1"/>
<source>Amount</source>
<translation>Cantidad</translation>
</message>
<message>
<location line="+1"/>
<source>ID</source>
<translation>ID</translation>
</message>
<message>
<location line="+4"/>
<source>Error exporting</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+100"/>
<source>Range:</source>
<translation>Rango:</translation>
</message>
<message>
<location line="+8"/>
<source>to</source>
<translation>para</translation>
</message>
</context>
<context>
<name>WalletModel</name>
<message>
<location filename="../walletmodel.cpp" line="+206"/>
<source>Sending...</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>bitcoin-core</name>
<message>
<location filename="../bitcoinstrings.cpp" line="+33"/>
<source>EverGreenCoin version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Usage:</source>
<translation>Uso:</translation>
</message>
<message>
<location line="+1"/>
<source>Send command to -server or evergreencoind</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>List commands</source>
<translation>Muestra comandos
</translation>
</message>
<message>
<location line="+1"/>
<source>Get help for a command</source>
<translation>Recibir ayuda para un comando
</translation>
</message>
<message>
<location line="+2"/>
<source>Options:</source>
<translation>Opciones:
</translation>
</message>
<message>
<location line="+2"/>
<source>Specify configuration file (default: evergreencoin.conf)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Specify pid file (default: evergreencoind.pid)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Specify wallet file (within data directory)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Specify data directory</source>
<translation>Especifica directorio para los datos
</translation>
</message>
<message>
<location line="+2"/>
<source>Set database cache size in megabytes (default: 25)</source>
<translation>Asigna el tamaño del caché de la base de datos en MB (25 predeterminado)</translation>
</message>
<message>
<location line="+1"/>
<source>Set database disk log size in megabytes (default: 100)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Listen for connections on <port> (default: 15714 or testnet: 25714)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Maintain at most <n> connections to peers (default: 125)</source>
<translation>Mantener al menos <n> conecciones por cliente (por defecto: 125) </translation>
</message>
<message>
<location line="+3"/>
<source>Connect to a node to retrieve peer addresses, and disconnect</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Specify your own public address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Bind to given address. Use [host]:port notation for IPv6</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Stake your coins to support network and gain reward (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Threshold for disconnecting misbehaving peers (default: 100)</source>
<translation>Umbral de desconección de clientes con mal comportamiento (por defecto: 100)</translation>
</message>
<message>
<location line="+1"/>
<source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-44"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+51"/>
<source>Detach block and address databases. Increases shutdown time (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+109"/>
<source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-5"/>
<source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds </source>
<translation type="unfinished"/>
</message>
<message>
<location line="-87"/>
<source>Listen for JSON-RPC connections on <port> (default: 15715 or testnet: 25715)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-11"/>
<source>Accept command line and JSON-RPC commands</source>
<translation>Aceptar comandos consola y JSON-RPC
</translation>
</message>
<message>
<location line="+101"/>
<source>Error: Transaction creation failed </source>
<translation type="unfinished"/>
</message>
<message>
<location line="-5"/>
<source>Error: Wallet locked, unable to create transaction </source>
<translation type="unfinished"/>
</message>
<message>
<location line="-8"/>
<source>Importing blockchain data file.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Importing bootstrap blockchain data file.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-88"/>
<source>Run in the background as a daemon and accept commands</source>
<translation>Correr como demonio y acepta comandos
</translation>
</message>
<message>
<location line="+1"/>
<source>Use the test network</source>
<translation>Usa la red de pruebas
</translation>
</message>
<message>
<location line="-24"/>
<source>Accept connections from outside (default: 1 if no -proxy or -connect)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-38"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+117"/>
<source>Error initializing database environment %s! To recover, BACKUP THAT DIRECTORY, then remove everything from it except for wallet.dat.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-20"/>
<source>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source>
<translation>Precaución: -paytxfee es muy alta. Esta es la comisión que pagarás si envias una transacción.</translation>
</message>
<message>
<location line="+61"/>
<source>Warning: Please check that your computer's date and time are correct! If your clock is wrong EverGreenCoin will not work properly.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-31"/>
<source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-18"/>
<source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-30"/>
<source>Attempt to recover private keys from a corrupt wallet.dat</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Block creation options:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-62"/>
<source>Connect only to the specified node(s)</source>
<translation>Conecta solo al nodo especificado
</translation>
</message>
<message>
<location line="+4"/>
<source>Discover own IP address (default: 1 when listening and no -externalip)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+94"/>
<source>Failed to listen on any port. Use -listen=0 if you want this.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-90"/>
<source>Find peers using DNS lookup (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Sync checkpoints policy (default: strict)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+83"/>
<source>Invalid -tor address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Invalid amount for -reservebalance=<amount></source>
<translation type="unfinished"/>
</message>
<message>
<location line="-82"/>
<source>Maximum per-connection receive buffer, <n>*1000 bytes (default: 5000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/><|fim▁hole|> <source>Maximum per-connection send buffer, <n>*1000 bytes (default: 1000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-16"/>
<source>Only connect to nodes in network <net> (IPv4, IPv6 or Tor)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+28"/>
<source>Output extra debugging information. Implies all other -debug* options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Output extra network debugging information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Prepend debug output with timestamp</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>SSL options: (see the Bitcoin Wiki for SSL setup instructions)</source>
<translation>Opciones SSL: (ver la Bitcoin Wiki para instrucciones de configuración SSL)</translation>
</message>
<message>
<location line="-74"/>
<source>Select the version of socks proxy to use (4-5, default: 5)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+41"/>
<source>Send trace/debug info to console instead of debug.log file</source>
<translation>Enviar informacion de seguimiento a la consola en vez del archivo debug.log</translation>
</message>
<message>
<location line="+1"/>
<source>Send trace/debug info to debugger</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+28"/>
<source>Set maximum block size in bytes (default: 250000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Set minimum block size in bytes (default: 0)</source>
<translation>Establezca el tamaño mínimo del bloque en bytes (por defecto: 0)</translation>
</message>
<message>
<location line="-29"/>
<source>Shrink debug.log file on client startup (default: 1 when no -debug)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-42"/>
<source>Specify connection timeout in milliseconds (default: 5000)</source>
<translation>Especifica tiempo de espera para conexion en milisegundos (predeterminado: 5000)</translation>
</message>
<message>
<location line="+109"/>
<source>Unable to sign checkpoint, wrong checkpointkey?
</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-80"/>
<source>Use UPnP to map the listening port (default: 0)</source>
<translation>Intenta usar UPnP para mapear el puerto de escucha (default: 0)</translation>
</message>
<message>
<location line="-1"/>
<source>Use UPnP to map the listening port (default: 1 when listening)</source>
<translation>Intenta usar UPnP para mapear el puerto de escucha (default: 1 when listening)</translation>
</message>
<message>
<location line="-25"/>
<source>Use proxy to reach tor hidden services (default: same as -proxy)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+42"/>
<source>Username for JSON-RPC connections</source>
<translation>Usuario para las conexiones JSON-RPC
</translation>
</message>
<message>
<location line="+47"/>
<source>Verifying database integrity...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+57"/>
<source>WARNING: syncronized checkpoint violation detected, but skipped!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Warning: Disk space is low!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-2"/>
<source>Warning: This version is obsolete, upgrade required!</source>
<translation>Advertencia: Esta versión está obsoleta, se necesita actualizar!</translation>
</message>
<message>
<location line="-48"/>
<source>wallet.dat corrupt, salvage failed</source>
<translation>wallet.dat corrompió, guardado fallido</translation>
</message>
<message>
<location line="-54"/>
<source>Password for JSON-RPC connections</source>
<translation>Contraseña para las conexiones JSON-RPC
</translation>
</message>
<message>
<location line="-84"/>
<source>%s, you must set a rpcpassword in the configuration file:
%s
It is recommended you use the following random password:
rpcuser=evergreencoinrpc
rpcpassword=%s
(you do not need to remember this password)
The username and password MUST NOT be the same.
If the file does not exist, create it with owner-readable-only file permissions.
It is also recommended to set alertnotify so you are notified of problems;
for example: alertnotify=echo %%s | mail -s "EverGreenCoin Alert" [email protected]
</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+51"/>
<source>Find peers using internet relay chat (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Sync time with other nodes. Disable if time on your system is precise e.g. syncing with NTP (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>When creating transactions, ignore inputs with value less than this (default: 0.01)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Allow JSON-RPC connections from specified IP address</source>
<translation>Permite conexiones JSON-RPC desde la dirección IP especificada
</translation>
</message>
<message>
<location line="+1"/>
<source>Send commands to node running on <ip> (default: 127.0.0.1)</source>
<translation>Envia comando al nodo situado en <ip> (predeterminado: 127.0.0.1)
</translation>
</message>
<message>
<location line="+1"/>
<source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Require a confirmations for change (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Enforce transaction scripts to use canonical PUSH operators (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Execute command when a relevant alert is received (%s in cmd is replaced by message)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Upgrade wallet to latest format</source>
<translation>Actualizar billetera al formato actual</translation>
</message>
<message>
<location line="+1"/>
<source>Set key pool size to <n> (default: 100)</source>
<translation>Ajusta el numero de claves en reserva <n> (predeterminado: 100)
</translation>
</message>
<message>
<location line="+1"/>
<source>Rescan the block chain for missing wallet transactions</source>
<translation>Rescanea la cadena de bloques para transacciones perdidas de la cartera
</translation>
</message>
<message>
<location line="+2"/>
<source>How many blocks to check at startup (default: 2500, 0 = all)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>How thorough the block verification is (0-6, default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Imports blocks from external blk000?.dat file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Use OpenSSL (https) for JSON-RPC connections</source>
<translation>Usa OpenSSL (https) para las conexiones JSON-RPC
</translation>
</message>
<message>
<location line="+1"/>
<source>Server certificate file (default: server.cert)</source>
<translation>Certificado del servidor (Predeterminado: server.cert)
</translation>
</message>
<message>
<location line="+1"/>
<source>Server private key (default: server.pem)</source>
<translation>Clave privada del servidor (Predeterminado: server.pem)
</translation>
</message>
<message>
<location line="+1"/>
<source>Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+53"/>
<source>Error: Wallet unlocked for staking only, unable to create transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>WARNING: Invalid checkpoint found! Displayed transactions may not be correct! You may need to upgrade, or notify developers.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-158"/>
<source>This help message</source>
<translation>Este mensaje de ayuda
</translation>
</message>
<message>
<location line="+95"/>
<source>Wallet %s resides outside data directory %s.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Cannot obtain a lock on data directory %s. EverGreenCoin is probably already running.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-98"/>
<source>EverGreenCoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+140"/>
<source>Unable to bind to %s on this computer (bind returned error %d, %s)</source>
<translation>No es posible escuchar en el %s en este ordenador (bind returned error %d, %s)</translation>
</message>
<message>
<location line="-130"/>
<source>Connect through socks proxy</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Allow DNS lookups for -addnode, -seednode and -connect</source>
<translation>Permite búsqueda DNS para addnode y connect
</translation>
</message>
<message>
<location line="+122"/>
<source>Loading addresses...</source>
<translation>Cargando direcciónes...</translation>
</message>
<message>
<location line="-15"/>
<source>Error loading blkindex.dat</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Error loading wallet.dat: Wallet corrupted</source>
<translation>Error cargando wallet.dat: Billetera corrupta</translation>
</message>
<message>
<location line="+4"/>
<source>Error loading wallet.dat: Wallet requires newer version of EverGreenCoin</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Wallet needed to be rewritten: restart EverGreenCoin to complete</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Error loading wallet.dat</source>
<translation>Error cargando wallet.dat</translation>
</message>
<message>
<location line="-16"/>
<source>Invalid -proxy address: '%s'</source>
<translation>Dirección -proxy invalida: '%s'</translation>
</message>
<message>
<location line="-1"/>
<source>Unknown network specified in -onlynet: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Unknown -socks proxy version requested: %i</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Cannot resolve -bind address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Cannot resolve -externalip address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-24"/>
<source>Invalid amount for -paytxfee=<amount>: '%s'</source>
<translation>Cantidad inválida para -paytxfee=<amount>: '%s'</translation>
</message>
<message>
<location line="+44"/>
<source>Error: could not start node</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Sending...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Invalid amount</source>
<translation>Cantidad inválida</translation>
</message>
<message>
<location line="+1"/>
<source>Insufficient funds</source>
<translation>Fondos insuficientes</translation>
</message>
<message>
<location line="-34"/>
<source>Loading block index...</source>
<translation>Cargando el index de bloques...</translation>
</message>
<message>
<location line="-103"/>
<source>Add a node to connect to and attempt to keep the connection open</source>
<translation>Agrega un nodo para conectarse and attempt to keep the connection open</translation>
</message>
<message>
<location line="+122"/>
<source>Unable to bind to %s on this computer. EverGreenCoin is probably already running.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-97"/>
<source>Fee per KB to add to transactions you send</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+55"/>
<source>Invalid amount for -mininput=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Loading wallet...</source>
<translation>Cargando cartera...</translation>
</message>
<message>
<location line="+8"/>
<source>Cannot downgrade wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Cannot initialize keypool</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Cannot write default address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Rescanning...</source>
<translation>Rescaneando...</translation>
</message>
<message>
<location line="+5"/>
<source>Done loading</source>
<translation>Carga completa</translation>
</message>
<message>
<location line="-167"/>
<source>To use the %s option</source>
<translation>Para utilizar la opción %s</translation>
</message>
<message>
<location line="+14"/>
<source>Error</source>
<translation>Error</translation>
</message>
<message>
<location line="+6"/>
<source>You must set rpcpassword=<password> in the configuration file:
%s
If the file does not exist, create it with owner-readable-only file permissions.</source>
<translation type="unfinished"/>
</message>
</context>
</TS><|fim▁end|> | |
<|file_name|>LatitudePlot.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# LatitudePlot.py
# Created 30 July 2013
# Created by [email protected]
import os, time, math
from datetime import datetime
from time import mktime
import xml.etree.ElementTree as ET
from PIL import Image, ImageDraw
def GetKmlFiles():
"""Locates and reads local .kml files, returns a list of kml dictionary data"""
KmlData = []
for dirname, dirnames, filenames in os.walk('.'):
for filename in filenames:
sp = filename.split('.')
if sp[len(sp)-1]== "kml": #locate kml files
print "Reading kml file " + filename
KmlData.append(ReadKmlFile(dirname, filename))
print KmlData
return KmlData
def ReadKmlFile(dirname, filename):
"""Parses a single kml file, returns a dict of format {time: [lat, long]}"""
KmlData = {}
kmltime = datetime.time
latlist = []
longlist = []
timelist = []
cnt =0
f = open(filename)
line = f.readline()
while line:
if 'when' in line:
timelist.append(time.strptime(ET.fromstring(line)[0].text,"%Y-%m-%dT%H:%M:%SZ"))
if 'coordinates' in line:
latlist.append(float(ET.fromstring(line)[0].text.split(',')[0]))
longlist.append(float(ET.fromstring(line)[0].text.split(',')[1]))
cnt+=1
if cnt % 5000 ==0:
print "Parsing " + filename + ": points found: " + str(cnt)
line = f.readline()
f.close()
return [latlist, longlist, timelist]
def DrawMapData(KmlData,InputImage, OutputImage, itop, ibottom, ileft, iright,xnudge,ynudge):
"""Draws kml line data on top of the specified image"""
im = Image.open(InputImage)
draw = ImageDraw.Draw(im)
cnt =0
for KmlD in KmlData:
for d in range(len(KmlD[0])-1):
#Get points x and y coordinates and draw line
x1=(LongToX(KmlD[0][d],ileft,iright,im.size[0]))+xnudge
y1=(LatToY(KmlD[1][d],itop,ibottom,im.size[1]))+ynudge
x2=(LongToX(KmlD[0][d+1],ileft,iright,im.size[0]))+xnudge
y2=(LatToY(KmlD[1][d+1],itop,ibottom,im.size[1]))+ynudge
if(EuclidDistance(x1,y1,x2,y2) < 10000):
#setting this around 80 works okay. Attempts to remove some noise<|fim▁hole|> draw.line((x1,y1, x2,y2), fill=80)
cnt+=1
if cnt % 10000 ==0:
print "Drawing point number " + str(cnt)
im.save(OutputImage)
def LongToX(InputLong, LeftLong, RightLong, ImWidth):
"""Converts a longitude value in to an x coordinate"""
return ScalingFunc(InputLong+360, LeftLong+360, RightLong+360, ImWidth);
def LatToY(InputLat, TopLat, BottomLat, ImHeight):
"""Converts a latitude value in to a y coordinate"""
return ScalingFunc(InputLat+360, TopLat+360, BottomLat+360, ImHeight);
def EuclidDistance(x1, y1, x2, y2):
"""Calculates the euclidean distance between two points"""
return math.sqrt((x1 - x2)**2+(y1 - y2)**2)
def ScalingFunc(inputv, minv, maxv, size):
"""Helps convert latitudes and longitudes to x and y"""
if((float(maxv) -float(minv)) ==0):
return 0
return ((((float(inputv) - float(minv)) / (float(maxv) -float(minv))) * float(size)));
def ParseImageFile():
"""Reads SatelliteImageData.csv containing:
<File name of image to draw data on>,
<image top latitude>,
<image bottom lattitude>,
<image left longitude>,
<image right longitude>,
(optional) <x value nudge>,
(optional) <y value nudge>"""
with open('ImageData.csv', 'r') as f:
read_data = f.read().split(',')
while 5 <= len(read_data) < 7:
read_data.append(0)
ReturnData = [0]*7
ReturnData[0]=read_data[0]
for i in range(1,7):
ReturnData[i] = float(read_data[i])
return ReturnData
if __name__ == "__main__":
ImageData = ParseImageFile()
DrawMapData(GetKmlFiles(),ImageData[0], "LatitudeData.png", ImageData[1], ImageData[2], ImageData[3], ImageData[4],ImageData[5],ImageData[6])<|fim▁end|> | |
<|file_name|>build.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import logging
import os
from django.conf import settings
from django.core.management import BaseCommand
from django.apps import apps
from django.core.management import CommandError
from django.template.defaultfilters import slugify
from django.template.loader import get_template
from django.db import models
logger = logging.getLogger(__name__)
class BaseGenerator(object):
template_names = []
def __init__(self, context, path):
self.context = context
self.path = path
def get_destination(self, template_name, app_name="", model_name=""):
destination = self.path + template_name.replace(
'scaffold/', '/'
).replace(
'.py.html', '.py'
).replace(
'APP_NAME', app_name
).replace(
'MODEL_NAME', model_name
)
# Create the directory if it does not exist.
directory = os.path.dirname(destination)
if not os.path.exists(directory):
os.makedirs(directory)
return destination
def generate(self):
for template_name in self.template_names:
template = get_template(template_name)
data = template.render(self.context)
destination = self.get_destination(template_name, app_name=self.context['app_name'])
with open(destination, 'wb') as out:
out.write(data.encode('utf-8'))
logger.info(u"Write %s", destination)
class SingleFileGenerator(BaseGenerator):
"""SingeFileGenerator uses the complete context (all models) per template."""
template_names = [
'scaffold/admin.py.html',
'scaffold/context_processors.py.html',
'scaffold/model_mixins.py.html',
'scaffold/static/APP_NAME/styles.css',
'scaffold/templates/APP_NAME/index.html',
'scaffold/templates/APP_NAME/pagination.html',
'scaffold/templates/base.html',
'scaffold/templatetags/__init__.py',
'scaffold/templatetags/APP_NAME_tags.py',
'scaffold/urls.py.html',
'scaffold/views.py.html',
]
class MultiFileGenerator(BaseGenerator):
"""MultiFileGenerator splits the context into a context for each model. It generates multiple files per model."""
template_names = [
'scaffold/templates/APP_NAME/MODEL_NAME_base.html',
'scaffold/templates/APP_NAME/MODEL_NAME_confirm_delete.html',
'scaffold/templates/APP_NAME/MODEL_NAME_detail.html',
'scaffold/templates/APP_NAME/MODEL_NAME_form.html',
'scaffold/templates/APP_NAME/MODEL_NAME_list.html',
'scaffold/templates/APP_NAME/MODEL_NAME_table_detail.html',
'scaffold/templates/APP_NAME/MODEL_NAME_table_list.html',
]
def generate(self):
for obj in self.context['items']:
date_template_names = []
if obj['date_fields']:
date_template_names = [
'scaffold/templates/APP_NAME/MODEL_NAME_archive.html',
'scaffold/templates/APP_NAME/MODEL_NAME_archive_day.html',
'scaffold/templates/APP_NAME/MODEL_NAME_archive_month.html',
'scaffold/templates/APP_NAME/MODEL_NAME_archive_week.html',
'scaffold/templates/APP_NAME/MODEL_NAME_archive_year.html',
]
for template_name in self.template_names + date_template_names:
template = get_template(template_name)
data = template.render(obj)
destination = self.get_destination(template_name, obj['app_name'], obj['url_name'])
with open(destination, 'w') as out:
out.write(data)
logger.debug("Write %s", destination)
class Command(BaseCommand):
"""The handle method is executed by the `./manage.py build app_name` command.
Introspect all models in the given app and call generators.
The get fields methods are loosely based on:
https://docs.djangoproject.com/en/1.10/ref/models/meta/
"""
def add_arguments(self, parser):
parser.add_argument('app_name', nargs='+', type=str)<|fim▁hole|>
def get_fields(self, model):
"""All model fields, fields dynamically added from the other end excluded.
`include_hidden` is False by default. If set to True, get_fields() will include fields that are used to
back other field’s functionality. This will also include any fields that have a related_name (such as
ManyToManyField, or ForeignKey) that start with a `+`."""
return [field.name for field in model._meta.get_fields(include_hidden=False)]
def get_concrete_fields(self, model):
"""All model fields, like get_fields but NO backward related fields."""
fields = [
(f, f.model if f.model != model else None)
for f in model._meta.get_fields()
if f.concrete and (
not f.is_relation
or f.one_to_one
or (f.many_to_one and f.related_model)
)
]
return [field.name for field, model in fields]
def get_related_fields(self, model):
"""Related fields like ForeignKey, OneToOne fields."""
return [
field.name
for field in model._meta.get_fields()
if (field.one_to_many or field.one_to_one)
and field.auto_created and not field.concrete
]
def get_many_to_many_fields(self, model):
"""ManyToMany fields"""
return [
field.name
for field in model._meta.get_fields()
if field.many_to_many and not field.auto_created
]
def get_date_fields(self, model):
"""Date or datetime fields"""
return [
field.name for field in model._meta.get_fields()
if field.__class__ in (models.DateField, models.DateTimeField)
]
def get_text_fields(self, model):
"""Text fields"""
return [
field.name for field in model._meta.get_fields()
if field.__class__ in (models.CharField, models.TextField)
]
def get_related_with_models(self, model):
fields = [
(f.related_model.__name__, f.model if f.model != model else None)
for f in model._meta.get_fields()
if (f.one_to_many or f.one_to_one)
and f.auto_created and not f.concrete
]
return list(set([model_name for model_name, _ in fields]))
def handle(self, *args, **options):
"""Handle the command"""
# Raise error if app is not in INSTALLED_APPS.
app_name = options['app_name'][0]
if app_name not in settings.INSTALLED_APPS:
raise CommandError('Add {} to installed apps'.format(app_name))
# Build one big context of all models and their fields.
context = {'items': [], 'app_name': app_name}
all_models = apps.all_models[app_name]
for name, model in all_models.items():
if "_" not in name: # Django auto generated cross tables do have `_`. Exclude them.
context['items'].append({
'app_name': app_name,
'model': model,
'model_name': model.__name__,
'url_name': slugify(model._meta.verbose_name).replace('-', ''),
'model_slug': slugify(model._meta.verbose_name).replace('-', ''),
'verbose_name': model._meta.verbose_name,
'verbose_plural': model._meta.verbose_name,
'table_name': model._meta.db_table,
'slug': slugify(model._meta.verbose_name),
'slug_plural': slugify(model._meta.verbose_name),
'fields': self.get_fields(model),
'concrete_fields': self.get_concrete_fields(model),
'related_fields': self.get_related_fields(model),
'many_to_many_fields': self.get_many_to_many_fields(model),
'date_fields': self.get_date_fields(model),
'text_fields': self.get_text_fields(model),
'releated_with_models': self.get_related_with_models(model),
})
logger.info(context)
print(context)
path = apps.app_configs[app_name].path
for generator in [
SingleFileGenerator,
MultiFileGenerator,
]:
generator(context=context, path=path).generate()
logger.info('Success!')<|fim▁end|> | |
<|file_name|>cfg.rs<|end_file_name|><|fim▁begin|>extern crate cargo;
extern crate cargotest;
extern crate hamcrest;
use std::str::FromStr;
use std::fmt;
use cargo::util::{Cfg, CfgExpr};
use cargotest::{is_nightly, rustc_host};
use cargotest::support::registry::Package;
use cargotest::support::{project, execs};
use hamcrest::assert_that;
macro_rules! c {
($a:ident) => (
Cfg::Name(stringify!($a).to_string())
);
($a:ident = $e:expr) => (
Cfg::KeyPair(stringify!($a).to_string(), $e.to_string())
);
}
macro_rules! e {
(any($($t:tt),*)) => (CfgExpr::Any(vec![$(e!($t)),*]));
(all($($t:tt),*)) => (CfgExpr::All(vec![$(e!($t)),*]));
(not($($t:tt)*)) => (CfgExpr::Not(Box::new(e!($($t)*))));
(($($t:tt)*)) => (e!($($t)*));
($($t:tt)*) => (CfgExpr::Value(c!($($t)*)));
}
fn good<T>(s: &str, expected: T)
where T: FromStr + PartialEq + fmt::Debug,
T::Err: fmt::Display
{
let c = match T::from_str(s) {<|fim▁hole|> };
assert_eq!(c, expected);
}
fn bad<T>(s: &str, err: &str)
where T: FromStr + fmt::Display, T::Err: fmt::Display
{
let e = match T::from_str(s) {
Ok(cfg) => panic!("expected `{}` to not parse but got {}", s, cfg),
Err(e) => e.to_string(),
};
assert!(e.contains(err), "when parsing `{}`,\n\"{}\" not contained \
inside: {}", s, err, e);
}
#[test]
fn cfg_syntax() {
good("foo", c!(foo));
good("_bar", c!(_bar));
good(" foo", c!(foo));
good(" foo ", c!(foo));
good(" foo = \"bar\"", c!(foo = "bar"));
good("foo=\"\"", c!(foo = ""));
good(" foo=\"3\" ", c!(foo = "3"));
good("foo = \"3 e\"", c!(foo = "3 e"));
}
#[test]
fn cfg_syntax_bad() {
bad::<Cfg>("", "found nothing");
bad::<Cfg>(" ", "found nothing");
bad::<Cfg>("\t", "unexpected character");
bad::<Cfg>("7", "unexpected character");
bad::<Cfg>("=", "expected identifier");
bad::<Cfg>(",", "expected identifier");
bad::<Cfg>("(", "expected identifier");
bad::<Cfg>("foo (", "malformed cfg value");
bad::<Cfg>("bar =", "expected a string");
bad::<Cfg>("bar = \"", "unterminated string");
bad::<Cfg>("foo, bar", "malformed cfg value");
}
#[test]
fn cfg_expr() {
good("foo", e!(foo));
good("_bar", e!(_bar));
good(" foo", e!(foo));
good(" foo ", e!(foo));
good(" foo = \"bar\"", e!(foo = "bar"));
good("foo=\"\"", e!(foo = ""));
good(" foo=\"3\" ", e!(foo = "3"));
good("foo = \"3 e\"", e!(foo = "3 e"));
good("all()", e!(all()));
good("all(a)", e!(all(a)));
good("all(a, b)", e!(all(a, b)));
good("all(a, )", e!(all(a)));
good("not(a = \"b\")", e!(not(a = "b")));
good("not(all(a))", e!(not(all(a))));
}
#[test]
fn cfg_expr_bad() {
bad::<CfgExpr>(" ", "found nothing");
bad::<CfgExpr>(" all", "expected `(`");
bad::<CfgExpr>("all(a", "expected `)`");
bad::<CfgExpr>("not", "expected `(`");
bad::<CfgExpr>("not(a", "expected `)`");
bad::<CfgExpr>("a = ", "expected a string");
bad::<CfgExpr>("all(not())", "expected identifier");
bad::<CfgExpr>("foo(a)", "consider using all() or any() explicitly");
}
#[test]
fn cfg_matches() {
assert!(e!(foo).matches(&[c!(bar), c!(foo), c!(baz)]));
assert!(e!(any(foo)).matches(&[c!(bar), c!(foo), c!(baz)]));
assert!(e!(any(foo, bar)).matches(&[c!(bar)]));
assert!(e!(any(foo, bar)).matches(&[c!(foo)]));
assert!(e!(all(foo, bar)).matches(&[c!(foo), c!(bar)]));
assert!(e!(all(foo, bar)).matches(&[c!(foo), c!(bar)]));
assert!(e!(not(foo)).matches(&[c!(bar)]));
assert!(e!(not(foo)).matches(&[]));
assert!(e!(any((not(foo)), (all(foo, bar)))).matches(&[c!(bar)]));
assert!(e!(any((not(foo)), (all(foo, bar)))).matches(&[c!(foo), c!(bar)]));
assert!(!e!(foo).matches(&[]));
assert!(!e!(foo).matches(&[c!(bar)]));
assert!(!e!(foo).matches(&[c!(fo)]));
assert!(!e!(any(foo)).matches(&[]));
assert!(!e!(any(foo)).matches(&[c!(bar)]));
assert!(!e!(any(foo)).matches(&[c!(bar), c!(baz)]));
assert!(!e!(all(foo)).matches(&[c!(bar), c!(baz)]));
assert!(!e!(all(foo, bar)).matches(&[c!(bar)]));
assert!(!e!(all(foo, bar)).matches(&[c!(foo)]));
assert!(!e!(all(foo, bar)).matches(&[]));
assert!(!e!(not(bar)).matches(&[c!(bar)]));
assert!(!e!(not(bar)).matches(&[c!(baz), c!(bar)]));
assert!(!e!(any((not(foo)), (all(foo, bar)))).matches(&[c!(foo)]));
}
#[test]
fn cfg_easy() {
if !is_nightly() { return }
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "a"
version = "0.0.1"
authors = []
[target.'cfg(unix)'.dependencies]
b = { path = 'b' }
[target."cfg(windows)".dependencies]
b = { path = 'b' }
"#)
.file("src/lib.rs", "extern crate b;")
.file("b/Cargo.toml", r#"
[package]
name = "b"
version = "0.0.1"
authors = []
"#)
.file("b/src/lib.rs", "");
assert_that(p.cargo_process("build").arg("-v"),
execs().with_status(0));
}
#[test]
fn dont_include() {
if !is_nightly() { return }
let other_family = if cfg!(unix) {"windows"} else {"unix"};
let p = project("foo")
.file("Cargo.toml", &format!(r#"
[package]
name = "a"
version = "0.0.1"
authors = []
[target.'cfg({})'.dependencies]
b = {{ path = 'b' }}
"#, other_family))
.file("src/lib.rs", "")
.file("b/Cargo.toml", r#"
[package]
name = "b"
version = "0.0.1"
authors = []
"#)
.file("b/src/lib.rs", "");
assert_that(p.cargo_process("build"),
execs().with_status(0).with_stderr("\
[COMPILING] a v0.0.1 ([..])
"));
}
#[test]
fn works_through_the_registry() {
if !is_nightly() { return }
Package::new("foo", "0.1.0").publish();
Package::new("bar", "0.1.0")
.target_dep("foo", "0.1.0", "'cfg(unix)'")
.target_dep("foo", "0.1.0", "'cfg(windows)'")
.publish();
let p = project("a")
.file("Cargo.toml", &r#"
[package]
name = "a"
version = "0.0.1"
authors = []
[dependencies]
bar = "0.1.0"
"#)
.file("src/lib.rs", "extern crate bar;");
assert_that(p.cargo_process("build"),
execs().with_status(0).with_stderr("\
[UPDATING] registry [..]
[DOWNLOADING] [..]
[DOWNLOADING] [..]
[COMPILING] foo v0.1.0 ([..])
[COMPILING] bar v0.1.0 ([..])
[COMPILING] a v0.0.1 ([..])
"));
}
#[test]
fn bad_target_spec() {
let p = project("a")
.file("Cargo.toml", &r#"
[package]
name = "a"
version = "0.0.1"
authors = []
[target.'cfg(4)'.dependencies]
bar = "0.1.0"
"#)
.file("src/lib.rs", "");
assert_that(p.cargo_process("build"),
execs().with_status(101).with_stderr("\
[ERROR] failed to parse manifest at `[..]`
Caused by:
failed to parse `4` as a cfg expression
Caused by:
unexpected character in cfg `4`, [..]
"));
}
#[test]
fn bad_target_spec2() {
let p = project("a")
.file("Cargo.toml", &r#"
[package]
name = "a"
version = "0.0.1"
authors = []
[target.'cfg(foo =)'.dependencies]
bar = "0.1.0"
"#)
.file("src/lib.rs", "");
assert_that(p.cargo_process("build"),
execs().with_status(101).with_stderr("\
[ERROR] failed to parse manifest at `[..]`
Caused by:
failed to parse `foo =` as a cfg expression
Caused by:
expected a string, found nothing
"));
}
#[test]
fn multiple_match_ok() {
if !is_nightly() { return }
let p = project("foo")
.file("Cargo.toml", &format!(r#"
[package]
name = "a"
version = "0.0.1"
authors = []
[target.'cfg(unix)'.dependencies]
b = {{ path = 'b' }}
[target.'cfg(target_family = "unix")'.dependencies]
b = {{ path = 'b' }}
[target."cfg(windows)".dependencies]
b = {{ path = 'b' }}
[target.'cfg(target_family = "windows")'.dependencies]
b = {{ path = 'b' }}
[target."cfg(any(windows, unix))".dependencies]
b = {{ path = 'b' }}
[target.{}.dependencies]
b = {{ path = 'b' }}
"#, rustc_host()))
.file("src/lib.rs", "extern crate b;")
.file("b/Cargo.toml", r#"
[package]
name = "b"
version = "0.0.1"
authors = []
"#)
.file("b/src/lib.rs", "");
assert_that(p.cargo_process("build").arg("-v"),
execs().with_status(0));
}
#[test]
fn any_ok() {
if !is_nightly() { return }
let p = project("foo")
.file("Cargo.toml", r#"
[package]
name = "a"
version = "0.0.1"
authors = []
[target."cfg(any(windows, unix))".dependencies]
b = { path = 'b' }
"#)
.file("src/lib.rs", "extern crate b;")
.file("b/Cargo.toml", r#"
[package]
name = "b"
version = "0.0.1"
authors = []
"#)
.file("b/src/lib.rs", "");
assert_that(p.cargo_process("build").arg("-v"),
execs().with_status(0));
}<|fim▁end|> | Ok(c) => c,
Err(e) => panic!("failed to parse `{}`: {}", s, e), |
<|file_name|>fc_scan.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python
# encoding: utf-8<|fim▁hole|>import re
from waflib import Utils,Task,TaskGen,Logs
from waflib.TaskGen import feature,before_method,after_method,extension
from waflib.Configure import conf
INC_REGEX="""(?:^|['">]\s*;)\s*(?:|#\s*)INCLUDE\s+(?:\w+_)?[<"'](.+?)(?=["'>])"""
USE_REGEX="""(?:^|;)\s*USE(?:\s+|(?:(?:\s*,\s*(?:NON_)?INTRINSIC)?\s*::))\s*(\w+)"""
MOD_REGEX="""(?:^|;)\s*MODULE(?!\s*PROCEDURE)(?:\s+|(?:(?:\s*,\s*(?:NON_)?INTRINSIC)?\s*::))\s*(\w+)"""
re_inc=re.compile(INC_REGEX,re.I)
re_use=re.compile(USE_REGEX,re.I)
re_mod=re.compile(MOD_REGEX,re.I)
class fortran_parser(object):
def __init__(self,incpaths):
self.seen=[]
self.nodes=[]
self.names=[]
self.incpaths=incpaths
def find_deps(self,node):
txt=node.read()
incs=[]
uses=[]
mods=[]
for line in txt.splitlines():
m=re_inc.search(line)
if m:
incs.append(m.group(1))
m=re_use.search(line)
if m:
uses.append(m.group(1))
m=re_mod.search(line)
if m:
mods.append(m.group(1))
return(incs,uses,mods)
def start(self,node):
self.waiting=[node]
while self.waiting:
nd=self.waiting.pop(0)
self.iter(nd)
def iter(self,node):
path=node.abspath()
incs,uses,mods=self.find_deps(node)
for x in incs:
if x in self.seen:
continue
self.seen.append(x)
self.tryfind_header(x)
for x in uses:
name="USE@%s"%x
if not name in self.names:
self.names.append(name)
for x in mods:
name="MOD@%s"%x
if not name in self.names:
self.names.append(name)
def tryfind_header(self,filename):
found=None
for n in self.incpaths:
found=n.find_resource(filename)
if found:
self.nodes.append(found)
self.waiting.append(found)
break
if not found:
if not filename in self.names:
self.names.append(filename)<|fim▁end|> | # WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
|
<|file_name|>result.cc<|end_file_name|><|fim▁begin|>#include "protagonist.h"
#include "SerializeResult.h"
#include "v8_wrapper.h"
#include "snowcrash.h"
using namespace v8;
using namespace protagonist;
Result::Result()
{
}
Result::~Result()
{
}
Nan::Persistent<Function> Result::constructor;
void Result::Init(Handle<Object> exports)
{
Nan::HandleScope scope;
Local<FunctionTemplate> t = Nan::New<FunctionTemplate>(New);
t->SetClassName(Nan::New<String>("Result").ToLocalChecked());
t->InstanceTemplate()->SetInternalFieldCount(1);
constructor.Reset(t->GetFunction());
exports->Set(Nan::New<String>("Result").ToLocalChecked(), t->GetFunction());
}
NAN_METHOD(Result::New)
{
Nan::HandleScope scope;
Result* result = ::new Result();
result->Wrap(info.This());
info.GetReturnValue().Set(info.This());<|fim▁hole|> const snowcrash::BlueprintParserOptions& options,
const drafter::ASTType& astType)
{
static const char* AstKey = "ast";
static const char* ErrorKey = "error";
static const char* SourcemapKey = "sourcemap";
sos::Object result;
try {
result = drafter::WrapResult(parseResult, options, astType);
}
catch (snowcrash::Error& error) {
parseResult.report.error = error;
}
if (astType == drafter::NormalASTType && parseResult.report.error.code != snowcrash::Error::OK) {
result.set(AstKey, sos::Null());
if ((options & snowcrash::ExportSourcemapOption) != 0) {
result.set(SourcemapKey, sos::Null());
}
}
result.unset(ErrorKey);
return v8_wrap(result)->ToObject();
}<|fim▁end|> | }
v8::Local<v8::Object> Result::WrapResult(snowcrash::ParseResult<snowcrash::Blueprint>& parseResult, |
<|file_name|>preprocessor_tests.rs<|end_file_name|><|fim▁begin|>// Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
use crate::{
errors::*,
preprocessor::{build_transactions, extract_global_config, split_input},
};
fn parse_input(input: &str) -> Result<()> {
let config = extract_global_config("".lines(), false)?;
let (_, transactions) = split_input(input.lines(), &config)?;
build_transactions(&config, &transactions)?;
Ok(())
}
#[test]<|fim▁hole|>
#[test]
fn parse_input_no_transactions_with_config() {
parse_input("//! no-run: verifier").unwrap_err();
}
#[rustfmt::skip]
#[test]
fn parse_input_nothing_before_first_empty_transaction() {
parse_input(r"
//! new-transaction
main() {}
").unwrap();
}
#[rustfmt::skip]
#[test]
fn parse_input_config_before_first_empty_transaction() {
parse_input(r"
//! no-run: runtime
//! new-transaction
main() {}
").unwrap_err();
}
#[rustfmt::skip]
#[test]
fn parse_input_empty_transaction() {
parse_input(r"
main() {}
//! new-transaction
//! new-transaction
main() {}
").unwrap_err();
}
#[rustfmt::skip]
#[test]
fn parse_input_empty_transaction_with_config() {
parse_input(r"
main() {}
//! new-transaction
//! sender: default
//! new-transaction
main() {}
").unwrap_err();
}<|fim▁end|> | fn parse_input_no_transactions() {
parse_input("").unwrap_err();
} |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>// Copyright (c) 2020 DDN. All rights reserved.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.
use future::BoxFuture;
use futures::{future, FutureExt, TryFutureExt};
use iml_postgres::{
alert,
sqlx::{self, PgPool},
};
use iml_service_queue::service_queue::ImlServiceQueueError;
use iml_wire_types::{AlertRecordType, AlertSeverity};
use lazy_static::lazy_static;
use regex::Regex;
use std::collections::HashMap;
use thiserror::Error;
#[derive(Error, Debug)]
pub enum ImlJournalError {
#[error(transparent)]
ImlRabbitError(#[from] iml_rabbit::ImlRabbitError),
#[error(transparent)]
ImlServiceQueueError(#[from] ImlServiceQueueError),
#[error(transparent)]
SerdeJsonError(#[from] serde_json::Error),
#[error(transparent)]
SqlxCoreError(#[from] sqlx::error::Error),
#[error(transparent)]
TryFromIntError(#[from] std::num::TryFromIntError),
}
#[derive(Debug, Eq, PartialEq)]
#[repr(i16)]
pub enum LogMessageClass {
Normal,
Lustre,
LustreError,
Copytool,
CopytoolError,
}
lazy_static! {
static ref LUSTRE_ERROR_TS: Regex = Regex::new(r"^\[\d+\.\d+\] LustreError:").unwrap();
static ref LUSTRE_TS: Regex = Regex::new(r"^\[\d+\.\d+\] Lustre:").unwrap();
static ref LUSTRE_ERROR: Regex = Regex::new(r"^LustreError:").unwrap();
static ref LUSTRE: Regex = Regex::new(r"^Lustre:").unwrap();
}
type HandlerFut<'a> = BoxFuture<'a, Result<(), ImlJournalError>>;
type Handler = for<'a> fn(&'a PgPool, &'a str, i32, i32) -> HandlerFut<'a>;
lazy_static! {
static ref HANDLERS: HashMap<&'static str, Handler> = {
let mut hm: HashMap<&'static str, Handler> = HashMap::new();
hm.insert("Can't start acceptor on port", port_used_handler);
hm.insert("Can't create socket:", port_used_handler);
hm.insert(": connection from ", client_connection_handler);
hm.insert(": select flavor ", server_security_flavor_handler);
hm.insert(
": obd_export_evict_by_uuid()",
admin_client_eviction_handler,
);
hm.insert(": evicting client at ", client_eviction_handler);
hm
};
}
pub fn get_message_class(message: &str) -> LogMessageClass {
if LUSTRE_ERROR_TS.is_match(message) || LUSTRE_ERROR.is_match(message) {
LogMessageClass::LustreError
} else if LUSTRE_TS.is_match(message) || LUSTRE.is_match(message) {
LogMessageClass::Lustre
} else {
LogMessageClass::Normal
}
}
fn port_used_handler<'a>(
pool: &'a PgPool,
_: &str,
host_id: i32,
host_content_type_id: i32,
) -> BoxFuture<'a, Result<(), ImlJournalError>> {
alert::raise(
pool,
AlertRecordType::SyslogEvent,
"Lustre port already being used".into(),
host_content_type_id,
None,
AlertSeverity::ERROR,
host_id,
)
.err_into()
.boxed()
}
fn client_connection_handler<'a>(
pool: &'a PgPool,
msg: &str,
host_id: i32,
host_content_type_id: i32,
) -> HandlerFut<'a> {
if let Some((lustre_pid, msg)) = client_connection_parser(msg) {
return alert::raise(
pool,
AlertRecordType::ClientConnectEvent,
msg,
host_content_type_id,
Some(lustre_pid),
AlertSeverity::INFO,
host_id,
)
.err_into()
.boxed();
};
future::ok(()).boxed()
}
/// Parses a client connected to a target
fn client_connection_parser(msg: &str) -> Option<(i32, String)> {
lazy_static! {
static ref TARGET_END: Regex = Regex::new(r":\s+connection from").unwrap();
}
// get the client NID out of the string
let nid_start = msg.find('@')? + 1;
let nid_len = msg[nid_start..].find(' ')?;
// and the UUID
let uuid_start = msg.find(" from ")? + 6;
let uuid_len = msg[uuid_start..].find('@')?;
// and of course the target
let target_end = TARGET_END.find(msg)?.start();
let target_start = msg[0..target_end].rfind(' ')? + 1;
let lustre_pid = msg[9..9 + msg[9..].find(':')?].parse::<i32>().ok()?;
let msg = format!(
"client {} from {} connected to target {}",
&msg[uuid_start..uuid_start + uuid_len],
&msg[nid_start..nid_start + nid_len],
&msg[target_start..target_end],
);
Some((lustre_pid, msg))
}
fn server_security_flavor_parser(msg: &str) -> Option<(i32, String)> {
// get the flavor out of the string
let flavor_start = msg.rfind(' ')? + 1;
let flavor = &msg[flavor_start..];
let lustre_pid = msg[9..9 + msg[9..].find(':')?].parse::<i32>().ok()?;
Some((lustre_pid, format!("with security flavor {}", flavor)))
}
fn server_security_flavor_handler<'a>(
pool: &'a PgPool,
msg: &str,
_: i32,
_: i32,
) -> HandlerFut<'a> {
let (lustre_pid, msg) = match server_security_flavor_parser(msg) {
Some(x) => x,
None => return future::ok(()).boxed(),
};
struct Row {
id: i32,
message: Option<String>,
}
async move {
let row = sqlx::query_as!(Row,
"SELECT id, message FROM chroma_core_alertstate WHERE lustre_pid = $1 ORDER BY id DESC LIMIT 1",
Some(lustre_pid)
)
.fetch_optional(pool)
.await?;
let (id, msg) = match row {
Some(Row { id, message: Some(message) }) => (id, format!("{} {}", message, msg)),
Some(Row { message:None, ..}) | None => return Ok(()),
};
sqlx::query!(
r#"
UPDATE chroma_core_alertstate
SET message = $1
WHERE
id = $2
"#,
msg,
id
)
.execute(pool)
.await?;
Ok(())
}
.boxed()
}
fn admin_client_eviction_parser(msg: &str) -> Option<(i32, String)> {
let uuid = get_item_after(msg, "evicting ")?;
let x = format!("client {} evicted by the administrator", uuid);
let lustre_pid = msg[9..9 + msg[9..].find(':')?].parse::<i32>().ok()?;
Some((lustre_pid, x))
}
fn admin_client_eviction_handler<'a>(
pool: &'a PgPool,
msg: &str,
host_id: i32,
host_content_type_id: i32,
) -> HandlerFut<'a> {
if let Some((lustre_pid, msg)) = admin_client_eviction_parser(msg) {
return alert::raise(
pool,
AlertRecordType::ClientConnectEvent,
msg,
host_content_type_id,
Some(lustre_pid),
AlertSeverity::WARNING,
host_id,
)
.err_into()
.boxed();
};
future::ok(()).boxed()
}
fn client_eviction_parser(msg: &str) -> Option<(i32, String)> {
let s = msg.find("### ")? + 4;
let l = msg[s..].find(": evicting client at ")?;
let reason = &msg[s..s + l];
let client = get_item_after(msg, ": evicting client at ")?;
let lustre_pid = get_item_after(msg, "pid: ")?.parse::<i32>().ok()?;
Some((lustre_pid, format!("client {} evicted: {}", client, reason)))
}
fn client_eviction_handler<'a>(
pool: &'a PgPool,
msg: &str,
host_id: i32,
host_content_type_id: i32,
) -> HandlerFut<'a> {
if let Some((lustre_pid, msg)) = client_eviction_parser(msg) {
return alert::raise(
pool,
AlertRecordType::ClientConnectEvent,
msg,
host_content_type_id,
Some(lustre_pid),
AlertSeverity::WARNING,
host_id,
)
.err_into()
.boxed();
};
future::ok(()).boxed()
}
<|fim▁hole|>
Some(&s[sub..sub + l])
}
fn find_one_in_many<'a>(msg: &str, handlers: &'a HashMap<&str, Handler>) -> Option<&'a Handler> {
handlers
.iter()
.find(|(k, _)| k.find(msg).is_some())
.map(|(_, v)| v)
}
pub async fn execute_handlers(
msg: &str,
host_id: i32,
host_content_type_id: i32,
pool: &PgPool,
) -> Result<(), ImlJournalError> {
let handler = match find_one_in_many(msg, &HANDLERS) {
Some(h) => h,
None => return Ok(()),
};
handler(pool, msg, host_id, host_content_type_id).await?;
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
use insta::assert_debug_snapshot;
#[test]
fn test_get_message_class() {
let tests = vec![
(
"[NOT A TIME STAMP ] Lustre: Lustre output here",
LogMessageClass::Normal,
),
("Lustre: Lustre output here", LogMessageClass::Lustre),
(
"LustreError: Lustre output here",
LogMessageClass::LustreError,
),
(
"[1234567A89] LustreError: Not A Time Stamp",
LogMessageClass::Normal,
),
(
"[123456789.123456789A] LustreError: Not A Time Stamp",
LogMessageClass::Normal,
),
("Nothing to see here", LogMessageClass::Normal),
];
for (msg, expected) in tests {
assert_eq!(get_message_class(msg), expected, "{}", msg);
assert_eq!(
get_message_class(&format!("[9830337.7944560] {}", msg)),
expected,
"[9830337.7944560] {} ",
msg
);
}
}
#[test]
fn test_client_connection_parser() {
let inputs = vec![
" Lustre: 5629:0:(ldlm_lib.c:877:target_handle_connect()) lustre-MDT0000: connection from [email protected]@tcp t0 exp 0000000000000000 cur 1317994929 last 0",
" Lustre: 27559:0:(ldlm_lib.c:871:target_handle_connect()) lustre-OST0001: connection from [email protected]@tcp t0 exp 0000000000000000 cur 1317994930 last 0",
" Lustre: 9150:0:(ldlm_lib.c:871:target_handle_connect()) lustre-OST0000: connection from [email protected]@tcp t0 exp 0000000000000000 cur 1317994930 last 0",
" Lustre: 31793:0:(ldlm_lib.c:877:target_handle_connect()) MGS: connection from [email protected]@tcp t0 exp 0000000000000000 cur 1317994928 last 0",
];
for input in inputs {
assert_debug_snapshot!(client_connection_parser(input).unwrap());
}
}
#[test]
fn test_server_security_flavor_parser() {
let inputs = vec![
" Lustre: 5629:0:(sec.c:1474:sptlrpc_import_sec_adapt()) import lustre-MDT0000->NET_0x20000c0a87ada_UUID netid 20000: select flavor null",
"Lustre: 20380:0:(sec.c:1474:sptlrpc_import_sec_adapt()) import MGC192.168.122.105@tcp->MGC192.168.122.105@tcp_0 netid 20000: select flavor null"
];
for input in inputs {
assert_debug_snapshot!(server_security_flavor_parser(input).unwrap());
}
}
#[test]
fn test_admin_client_eviction_parser() {
let x = " Lustre: 2689:0:(genops.c:1379:obd_export_evict_by_uuid()) lustre-OST0001: evicting 26959b68-1208-1fca-1f07-da2dc872c55f at adminstrative request";
assert_debug_snapshot!(admin_client_eviction_parser(x).unwrap());
}
#[test]
fn test_client_eviction_parser() {
let inputs = vec![
" LustreError: 0:0:(ldlm_lockd.c:356:waiting_locks_callback()) ### lock callback timer expired after 101s: evicting client at 0@lo ns: mdt-ffff8801cd5be000 lock: ffff880126f8f480/0xe99a593b682aed45 lrc: 3/0,0 mode: PR/PR res: 8589935876/10593 bits 0x3 rrc: 2 type: IBT flags: 0x4000020 remote: 0xe99a593b682aecea expref: 14 pid: 3636 timeout: 4389324308'",
" LustreError: 0:0:(ldlm_lockd.c:356:waiting_locks_callback()) ### lock callback timer expired after 151s: evicting client at 10.10.6.127@tcp ns: mdt-ffff880027554000 lock: ffff8800345b9480/0x7e9e6dc241f05651 lrc: 3/0,0 mode: PR/PR res: 8589935619/19678 bits 0x3 rrc: 2 type: IBT flags: 0x4000020 remote: 0xebc1380d8b532fd7 expref: 5104 pid: 23056 timeout: 4313115550"];
for input in inputs {
assert_debug_snapshot!(client_eviction_parser(input).unwrap());
}
}
}<|fim▁end|> | fn get_item_after<'a>(s: &'a str, after: &str) -> Option<&'a str> {
let sub = s.find(after)? + after.len();
let l = s[sub..].find(' ')?; |
<|file_name|>app.module.ts<|end_file_name|><|fim▁begin|>import { NgModule, ErrorHandler } from "@angular/core";
import { RouterModule } from "@angular/router";
import { FormsModule } from "@angular/forms";
import { BrowserModule } from '@angular/platform-browser';
import { BrowserAnimationsModule } from '@angular/platform-browser/animations';
import { HttpModule } from '@angular/http';
import { TranslateModule, TranslateLoader } from "@ngx-translate/core";
import { NgxDatatableModule } from '@swimlane/ngx-datatable';
import { ToastyModule } from 'ng2-toasty';
import { ModalModule } from 'ng2-bootstrap/modal';
import { TooltipModule } from "ng2-bootstrap/tooltip";
import { PopoverModule } from "ng2-bootstrap/popover";
import { BsDropdownModule } from 'ng2-bootstrap/dropdown';
import { CarouselModule } from 'ng2-bootstrap/carousel';
import { TimepickerModule } from 'ng2-bootstrap/timepicker';
import { TabsModule } from 'ng2-bootstrap/tabs';
import { ChartsModule } from 'ng2-charts';
import { AppRoutingModule } from './app-routing.module';
import { AppErrorHandler } from './app-error.handler';
import { AppTitleService } from './services/app-title.service';
import { AppTranslationService, TranslateLanguageLoader } from './services/app-translation.service';
import { ConfigurationService } from './services/configuration.service';
import { AlertService } from './services/alert.service';
import { LocalStoreManager } from './services/local-store-manager.service';
import { EndpointFactory } from './services/endpoint-factory.service';
import { NotificationService } from './services/notification.service';
import { NotificationEndpoint } from './services/notification-endpoint.service';
import { AccountService } from './services/account.service';
import { AccountEndpoint } from './services/account-endpoint.service';
import { AppointmentService } from './services/appointment.service';
import { AppointmentEndpoint } from './services/appointment-endpoint.service';
import { ConsultationService } from './services/consultation.service';
import { ConsultationEndpoint } from './services/consultation-endpoint.service';
import { ProviderService } from './services/provider.service';
import { ProviderEndpoint } from './services/provider-endpoint.service';
import { PatientService } from './services/patient.service';
import { PatientEndpoint } from './services/patient-endpoint.service';
import { DepartmentService } from './services/department.service';
import { DepartmentEndpoint } from './services/department-endpoint.service';
import { EqualValidator } from './directives/equal-validator.directive';
import { LastElementDirective } from './directives/last-element.directive';
import { AutofocusDirective } from './directives/autofocus.directive';
import { BootstrapTabDirective } from './directives/bootstrap-tab.directive';
import { BootstrapToggleDirective } from './directives/bootstrap-toggle.directive';
import { BootstrapSelectDirective } from './directives/bootstrap-select.directive';
import { BootstrapDatepickerDirective } from './directives/bootstrap-datepicker.directive';
import { FullCalendarDirective } from './directives/fullcalendar.directive';
import { GroupByPipe } from './pipes/group-by.pipe';
import { AppComponent } from "./components/app.component";
import { LoginComponent } from "./components/login/login.component";
import { HomeComponent } from "./components/home/home.component";
import { AppointmentsComponent } from "./components/appointments/appointments.component";
import { ConsultationsComponent } from "./components/consultations/consultations.component";
import { ProvidersComponent } from "./components/providers/providers.component";
import { PatientsComponent } from "./components/patients/patients.component";
import { HistoryComponent } from "./components/history/history.component";
import { SettingsComponent } from "./components/settings/settings.component";
import { AboutComponent } from "./components/about/about.component";
import { NotFoundComponent } from "./components/not-found/not-found.component";
import { BannerDemoComponent } from "./components/controls/banner-demo.component";
import { TodoDemoComponent } from "./components/controls/todo-demo.component";
import { StatisticsDemoComponent } from "./components/controls/statistics-demo.component";
import { NotificationsViewerComponent } from "./components/controls/notifications-viewer.component";
import { SearchBoxComponent } from "./components/controls/search-box.component";
import { UserInfoComponent } from "./components/controls/user-info.component";
import { UserPreferencesComponent } from "./components/controls/user-preferences.component";
import { UsersManagementComponent } from "./components/controls/users-management.component";
import { DepartmentEditorComponent } from "./components/controls/department-editor.component";
import { DepartmentsManagementComponent } from "./components/controls/departments-management.component";
import { RolesManagementComponent } from "./components/controls/roles-management.component";
import { RoleEditorComponent } from "./components/controls/role-editor.component";
import { AppointmentsManagementComponent } from "./components/controls/appointments-management.component";
import { AppointmentEditorComponent } from "./components/controls/appointment-editor.component";
import { AvailabilityManagementComponent } from "./components/controls/availability-management.component";
import { AvailabilityEditorComponent } from "./components/controls/availability-editor.component";
import { ConsultationsManagementComponent } from "./components/controls/consultations-management.component";
import { ConsultationEditorComponent } from "./components/controls/consultation-editor.component";
import { PatientEditorComponent } from "./components/controls/patient-editor.component";
import { ProviderEditorComponent } from "./components/controls/provider-editor.component";
import { ProfileEditorComponent } from "./components/controls/profile-editor.component";
import { PatientsManagementComponent } from "./components/controls/patients-management.component";
import { ProvidersManagementComponent } from "./components/controls/providers-management.component";
import { PatientHistoryManagementComponent } from "./components/controls/patient-history-management.component";
import { PatientHistoryEditorComponent } from "./components/controls/patient-history-editor.component";
@NgModule({
imports: [
BrowserModule,
BrowserAnimationsModule,
HttpModule,
FormsModule,
AppRoutingModule,
TranslateModule.forRoot({
loader: {
provide: TranslateLoader,
useClass: TranslateLanguageLoader
}
}),
NgxDatatableModule,
ToastyModule.forRoot(),
TooltipModule.forRoot(),
PopoverModule.forRoot(),
BsDropdownModule.forRoot(),
CarouselModule.forRoot(),
TimepickerModule.forRoot(),
TabsModule.forRoot(),
ModalModule.forRoot(),
ChartsModule
],
declarations: [
AppComponent,
LoginComponent,
HomeComponent,
AppointmentsComponent,
ConsultationsComponent,
PatientsComponent,
ProvidersComponent,
HistoryComponent,
SettingsComponent,
UsersManagementComponent, UserInfoComponent, UserPreferencesComponent,
DepartmentsManagementComponent, DepartmentEditorComponent,
RolesManagementComponent, RoleEditorComponent,
AboutComponent,
NotFoundComponent,
NotificationsViewerComponent,
SearchBoxComponent,
AppointmentsManagementComponent, AppointmentEditorComponent,
AvailabilityManagementComponent, AvailabilityEditorComponent,
ConsultationsManagementComponent, ConsultationEditorComponent,
PatientEditorComponent,
ProviderEditorComponent,
ProfileEditorComponent,
ProvidersManagementComponent,
PatientsManagementComponent,
PatientHistoryManagementComponent,
PatientHistoryEditorComponent,
StatisticsDemoComponent, TodoDemoComponent, BannerDemoComponent,
EqualValidator,
LastElementDirective,
AutofocusDirective,
BootstrapTabDirective,
BootstrapToggleDirective,
BootstrapSelectDirective,
BootstrapDatepickerDirective,
FullCalendarDirective,
GroupByPipe
],
providers: [
{ provide: ErrorHandler, useClass: AppErrorHandler },
AlertService,
ConfigurationService,
AppTitleService,
AppTranslationService,
NotificationService,
NotificationEndpoint,
AccountService,
AccountEndpoint,
AppointmentService,
AppointmentEndpoint,
ConsultationService,
ConsultationEndpoint,
PatientService,<|fim▁hole|> ProviderService,
ProviderEndpoint,
DepartmentService,
DepartmentEndpoint,
LocalStoreManager,
EndpointFactory
],
bootstrap: [AppComponent]
})
export class AppModule { }<|fim▁end|> | PatientEndpoint, |
<|file_name|>eucopyright.js<|end_file_name|><|fim▁begin|>/* jshint strict: true, quotmark: false, es3: true */
/* global $: false, JSZip: false, odtprocessor: false */
var EUCopyright = EUCopyright || {};
EUCopyright.settings = EUCopyright.settings || {};
EUCopyright.settings.defaultToNoOpinion = EUCopyright.settings.defaultToNoOpinion === undefined ? true : EUCopyright.settings.defaultToNoOpinion;
(function(){
"use strict";
EUCopyright.parseUrlParams = function (querystr) {
var urlParams;
querystr = querystr || window.location.search;
var match,
pl = /\+/g, // Regex for replacing addition symbol with a space
search = /([^&=]+)=?([^&]*)/g,
decode = function (s) { return decodeURIComponent(s.replace(pl, " ")); },
query = querystr.substring(1);
urlParams = {};
match = search.exec(query);
while (match) {
urlParams[decode(match[1])] = decode(match[2]);
match = search.exec(query);
}
return urlParams;
};
EUCopyright.parseCSV = function( strData, strDelimiter ){
/*
This code taken from:
http://stackoverflow.com/a/1293163/114462
under CC-By-SA 3.0
*/
// This will parse a delimited string into an array of
// arrays. The default delimiter is the comma, but this
// can be overriden in the second argument.
// Check to see if the delimiter is defined. If not,
// then default to comma.
strDelimiter = (strDelimiter || ",");
var strMatchedValue, strMatchedDelimiter;
// Create a regular expression to parse the CSV values.
var objPattern = new RegExp(
(
// Delimiters.
"(\\" + strDelimiter + "|\\r?\\n|\\r|^)" +
// Quoted fields.
"(?:\"([^\"]*(?:\"\"[^\"]*)*)\"|" +
// Standard fields.
"([^\"\\" + strDelimiter + "\\r\\n]*))"
),
"gi"
);
// Create an array to hold our data. Give the array
// a default empty first row.
var arrData = [[]];
// Create an array to hold our individual pattern
// matching groups.
var arrMatches = null;
// Keep looping over the regular expression matches
// until we can no longer find a match.
arrMatches = objPattern.exec(strData);
while (arrMatches){
// Get the delimiter that was found.
strMatchedDelimiter = arrMatches[ 1 ];
// Check to see if the given delimiter has a length
// (is not the start of string) and if it matches
// field delimiter. If id does not, then we know
// that this delimiter is a row delimiter.
if (
strMatchedDelimiter.length &&
(strMatchedDelimiter != strDelimiter)
){
// Since we have reached a new row of data,
// add an empty row to our data array.
arrData.push( [] );
}
// Now that we have our delimiter out of the way,
// let's check to see which kind of value we
// captured (quoted or unquoted).
if (arrMatches[ 2 ]){
// We found a quoted value. When we capture
// this value, unescape any double quotes.
strMatchedValue = arrMatches[ 2 ].replace(/""/g, '"');
} else {
// We found a non-quoted value.
strMatchedValue = arrMatches[ 3 ];
}
// Now that we have our value string, let's add
// it to the data array.
arrData[arrData.length - 1].push(strMatchedValue);
arrMatches = objPattern.exec(strData);
}
// Return the parsed data.
return arrData ;
};
EUCopyright.collectData = function() {
var data = {};
var question, j, radio;
var typesOfRespondents = [];
$('*[name="typeofrespondent"]').each(function(i, el){
el = $(el);
if ((el.attr('type') !== 'checkbox' && el.attr('type') !== 'radio') || el.prop('checked')){
typesOfRespondents.push(el.val());
}
});
for (var i = 0; i < EUCopyright.questions.length; i += 1) {
question = EUCopyright.questions[i];
if (question.type === 'multiple_choice' && question.options) {
for (j = 0; j < question.options.length; j += 1) {
radio = $('#q-' + question.num + '-' + j);
if (radio.prop('checked')) {
data['q-' + question.num] = j;
if (question.options[j].fulltext) {
data['q-' + question.num + '-' + j + '-text'] = $('#q-' + question.num + '-' + j + '-text').val();
}
}
}
} else if (question.type == 'open_question') {
data['q-' + question.num + '-text'] = $('#q-' + question.num + '-text').val();
}
}
data.name = $('#name').val();
data.registerid = $('#register-id').val();
data.typeofrespondent = typesOfRespondents;
data.typeofrespondentother = $('#typeofrespondent-other-text').val();
return data;
};
EUCopyright.compile = function(data, settings){
var addFile = function(zip, zipPath){
var d = $.Deferred();
$.get(EUCopyright.baseurl + '/data/' + zipPath).done(function(parsed, mes, xhr){
zip.file(zipPath, xhr.responseText);
d.resolve();
});
return d;
};
var constructContents = function(zip, data, settings) {
var d = $.Deferred();
$.get(EUCopyright.baseurl + '/data/content.xml').done(function(parsed, mes, xhr){
var text = xhr.responseText;
text = odtprocessor.renderText(
text,
data,
EUCopyright.questions,
settings
);
zip.file('content.xml', text);
d.resolve();
});
return d;
};
var zip = new JSZip();
var jobs = [
constructContents(zip, data, settings),
addFile(zip, 'mimetype'),
addFile(zip, 'META-INF/manifest.xml'),
addFile(zip, 'meta.xml'),
addFile(zip, 'settings.xml'),
addFile(zip, 'styles.xml')
];
var d = $.Deferred();
$.when.apply($, jobs).then(function(){
d.resolve(zip);
});
return d;
};
EUCopyright.answerCache = {};
EUCopyright.applyGuideToAll = function(guide, options){
var question, answer, answers = EUCopyright.answerCache[guide.slug];
for (var i = 0; i < EUCopyright.questions.length; i += 1) {
if (answers[EUCopyright.questions[i].num]) {
question = EUCopyright.questions[i];
answer = answers[question.num];
EUCopyright.applyGuide(guide, question, answer, options);
}
}
};
EUCopyright.supports_html5_storage = function() {
try {
return 'localStorage' in window && window.localStorage !== null;
} catch (e) {
return false;
}
};
EUCopyright.applyGuide = function(guide, question, answer, options) {
options = options || {};
var isAnswered = false;
if (options.activeOnly && !$('#q-' + question.num).hasClass('active')) {
return;
}
if (question.type === 'multiple_choice' && question.options) {
if (answer.option !== null) {
isAnswered = true;
$('#q-' + question.num + '-' + answer.option).
prop('checked', true).
parents('div').addClass('isChecked'); // microsites might need this to hide unrecommended answer options
if (question.options && question.options[answer.option].fulltext) {
if ($('#q-' + question.num + '-' + answer.option + '-text').val() === '') {
$('#q-' + question.num + '-' + answer.option + '-text').val(answer.answer);
}
}
}
} else if (question.type == 'open_question') {
if (answer.answer) {
isAnswered = true;
}
if ($('#q-' + question.num + '-text').val() === '') {
$('#q-' + question.num + '-text').val(answer.answer);
}<|fim▁hole|> isAnswered = true;
$('#q-' + question.num + '-customexplanation').slideDown();
$('#q-' + question.num + '-customexplanation-text').html(answer.explanation);
if (answer.explanationmore) {
$('#q-' + question.num + '-customexplanation').find('.toggle').show();
$('#q-' + question.num + '-customexplanationmore-text').html(answer.explanationmore);
} else {
$('#q-' + question.num + '-customexplanation').find('.toggle').hide();
$('#q-' + question.num + '-customexplanationmore-text').html('').hide();
}
} else {
$('#q-' + question.num + '-customexplanation').slideUp();
$('#q-' + question.num + '-customexplanationmore-text').slideUp();
}
$('.answer-choices-' + question.num + ' a').removeClass('active');
if (isAnswered) {
$('#answer-choice-' + guide.slug + '-' + question.num).addClass('active');
}
};
EUCopyright.loadQuestionGuide = function(slug, clb){
if (EUCopyright.answerCache[slug] !== undefined){
if (EUCopyright.answerCache[slug] === 'waiting') {
return;
}
return clb(EUCopyright.answerCache[slug]);
}
EUCopyright.answerCache[slug] = 'waiting';
$.get(EUCopyright.answers[slug].url, function(text, status, xhr){
var csv = EUCopyright.parseCSV(xhr.responseText);
var answers = {};
for (var i = 1; i < csv.length; i += 1) {
var row = {};
if (csv[i].length <= 1) {
continue; // skip empty line in csv (usually last one)
}
for (var j = 0; j < csv[0].length; j += 1) {
row[csv[0][j]] = csv[i][j];
}
var answer = {
option: row.Option ? parseInt(row.Option, 10) - 1 : null,
answer: row.Answer,
explanation: row.Explanation.replace(/\n/g, '<br/>')
};
if (row.Explanation_more) {
answer.explanationmore = row.Explanation_more.replace(/\n/g, '<br/>');
}
answers[parseInt(row.Question, 10)] = answer;
}
EUCopyright.answerCache[slug] = answers;
clb(EUCopyright.answerCache[slug]);
});
};
EUCopyright.loadGuide = function(slug, options){
$('.load-question-guide').removeClass('active');
$('.load-question-guide-' + slug).addClass('active');
EUCopyright.loadQuestionGuide(slug, function(){
EUCopyright.applyGuideToAll(EUCopyright.answers[slug], options);
});
};
EUCopyright.trackGoal = function(goalId){
if (window._paq !== undefined) {
window._paq.push(['trackGoal', goalId]);
}
};
EUCopyright.createDownload = function(zip){
var filename = 'consultation-document_en.odt';
if (window.URL === undefined || !JSZip.support.blob) {
$('#download-link-container').downloadify({
swf: EUCopyright.baseurl + '/js/downloadify.swf',
downloadImage: EUCopyright.baseurl + '/img/downloadbutton.png',
width: 116,
height: 45,
filename: filename,
data: function(){
return zip.generate();
},
dataType: 'base64',
onComplete: function(){
EUCopyright.trackGoal(1);
}
});
} else {
$('#download').attr({
'href': window.URL.createObjectURL(zip.generate({type: "blob"})),
'download': filename
}).removeClass('disabled');
}
$('#download-preparing').fadeOut();
};
EUCopyright.showDownloadModal = function(){
var data = EUCopyright.collectData();
EUCopyright.compile(data, EUCopyright.settings).done(EUCopyright.createDownload);
$('#download').addClass('disabled');
$('#download-preparing').show();
$('#download-modal').modal();
};
$(function(){
$('.submit-form').removeClass('hide')
.click(function(e){
e.preventDefault();
EUCopyright.trackGoal(1);
var $c = $('#consultation-form');
var $dm = $('#download-modal');
$dm.find('.final-cases').addClass('hide');
var email = $c.find('*[name=email]').val();
if (email) {
EUCopyright.trackGoal(2);
$dm.find('.email-sent').removeClass('hide');
$dm.find('.email-sent-to').text(email);
} else {
$dm.find('.download-only').removeClass('hide');
}
$dm.modal();
var action = $c.attr('action');
action = action.split('?')[0];
$c.attr('action', action);
$c.submit();
});
$('#download').click(function(){
if (window._paq !== undefined) {
window._paq.push(['trackGoal', 1]);
}
});
$('.load-question-guide').click(function(e){
e.preventDefault();
var params = EUCopyright.parseUrlParams($(this).attr('href'));
EUCopyright.loadGuide(params.guide);
});
$('.load-question').click(function(e){
e.preventDefault();
var slug = $(this).attr('href').substr(1);
var qnum = parseInt($(this).data('question'), 10);
EUCopyright.loadQuestionGuide(slug, function(answers){
EUCopyright.applyGuide(
EUCopyright.answers[slug],
EUCopyright.questions[qnum - 1],
answers[qnum]
);
});
});
$('.div-toggle').hide();
$('.toggle').show().click(function(e){
e.preventDefault();
if ($(this).hasClass('toggle-hide')) {
$(this).hide();
}
var div = $($(this).attr('href'));
if (div.css('display') === 'block') {
div.slideUp();
} else {
div.slideDown();
}
});
$('.needs-js').removeClass('hide');
if (!EUCopyright.supports_html5_storage()) {
$('#localstorage-hint').hide();
}
if (EUCopyright.supports_html5_storage()) {
$('.delete-localstorage').click(function(e){
e.preventDefault();
var answer = window.confirm('Are you sure?');
if (!answer) { return; }
for (var key in localStorage) {
delete localStorage[key];
}
window.location.reload();
});
}
$('.radio-text textarea.save').on('keyup', function(){
var radio = $(this).parent().parent().find('input:not(checked)');
radio.prop('checked', true);
if (EUCopyright.supports_html5_storage()) {
var name = radio.attr('name');
var value = radio.val();
if (value !== null) {
localStorage.setItem(name, value);
}
}
});
$('.sdfootnoteanc').click(function(){
$('#footnote-div').show();
});
$('textarea').autogrow();
if (EUCopyright.supports_html5_storage()) {
$('textarea.save').each(function() {
var id = $(this).attr('id');
var value = localStorage.getItem(id);
$(this).val(value);
});
$('input[type=radio].save').each(function() {
var name = $(this).attr('name');
var value = localStorage.getItem(name);
if (value !== null) {
$('input[type=radio]#' + name + '-' + value).prop('checked', true);
}
});
$('input[type=checkbox].save').each(function() {
var name = $(this).attr('id');
var value = localStorage.getItem(name);
if (!!value) {
$('input[type=checkbox]#' + name).prop('checked', true);
}
});
$('input[type=text].save, input[type=email].save').each(function() {
var id = $(this).attr('id');
var value = localStorage.getItem(id);
$(this).val(value);
});
$('textarea.save').on('keydown change', function() {
var id = $(this).attr('id');
var value = $(this).val();
if (value !== null) {
localStorage.setItem(id, value);
}
});
$('input[type=radio].save').on('click change', function() {
var name = $(this).attr('name');
var value = $(this).val();
if (value !== null) {
localStorage.setItem(name, value);
}
});
$('input[type=checkbox].save').on('click change', function() {
var name = $(this).attr('id');
var value = $(this).prop('checked');
if (value) {
localStorage.setItem(name, value);
} else {
delete localStorage[name];
}
});
$('input[type=text].save, input[type=email].save').on('keydown change', function() {
var id = $(this).attr('id');
var value = $(this).val();
if (value !== null) {
localStorage.setItem(id, value);
}
});
}
setTimeout(function () {
var $sideBar = $('.side-navbar');
$sideBar.affix({
offset: {
top: function () {
var offsetTop = $sideBar.offset().top;
var sideBarMargin = parseInt($sideBar.children(0).css('margin-top'), 10);
var navOuterHeight = $('.navbar').height();
this.top = offsetTop - navOuterHeight - sideBarMargin;
return this.top;
},
bottom: function () {
this.bottom = $('.footer-row').outerHeight(true);
return this.bottom;
}
}
});
}, 100);
var urlParams = EUCopyright.parseUrlParams();
if (urlParams.guide) {
EUCopyright.loadGuide(urlParams.guide);
}
});
/*
Mozilla Cooke Reader/Writer
https://developer.mozilla.org/en-US/docs/Web/API/document.cookie#A_little_framework.3A_a_complete_cookies_reader.2Fwriter_with_full_unicode_support
*/
window.docCookies = {
getItem: function (sKey) {
return decodeURIComponent(document.cookie.replace(new RegExp("(?:(?:^|.*;)\\s*" + encodeURIComponent(sKey).replace(/[\-\.\+\*]/g, "\\$&") + "\\s*\\=\\s*([^;]*).*$)|^.*$"), "$1")) || null;
},
setItem: function (sKey, sValue, vEnd, sPath, sDomain, bSecure) {
if (!sKey || /^(?:expires|max\-age|path|domain|secure)$/i.test(sKey)) { return false; }
var sExpires = "";
if (vEnd) {
switch (vEnd.constructor) {
case Number:
sExpires = vEnd === Infinity ? "; expires=Fri, 31 Dec 9999 23:59:59 GMT" : "; max-age=" + vEnd;
break;
case String:
sExpires = "; expires=" + vEnd;
break;
case Date:
sExpires = "; expires=" + vEnd.toUTCString();
break;
}
}
document.cookie = encodeURIComponent(sKey) + "=" + encodeURIComponent(sValue) + sExpires + (sDomain ? "; domain=" + sDomain : "") + (sPath ? "; path=" + sPath : "") + (bSecure ? "; secure" : "");
return true;
},
removeItem: function (sKey, sPath, sDomain) {
if (!sKey || !this.hasItem(sKey)) { return false; }
document.cookie = encodeURIComponent(sKey) + "=; expires=Thu, 01 Jan 1970 00:00:00 GMT" + ( sDomain ? "; domain=" + sDomain : "") + ( sPath ? "; path=" + sPath : "");
return true;
},
hasItem: function (sKey) {
return (new RegExp("(?:^|;\\s*)" + encodeURIComponent(sKey).replace(/[\-\.\+\*]/g, "\\$&") + "\\s*\\=")).test(document.cookie);
},
keys: /* optional method: you can safely remove it! */ function () {
var aKeys = document.cookie.replace(/((?:^|\s*;)[^\=]+)(?=;|$)|^\s*|\s*(?:\=[^;]*)?(?:\1|$)/g, "").split(/\s*(?:\=[^;]*)?;\s*/);
for (var nIdx = 0; nIdx < aKeys.length; nIdx++) { aKeys[nIdx] = decodeURIComponent(aKeys[nIdx]); }
return aKeys;
}
};
}());<|fim▁end|> | }
if (answer.explanation) { |
<|file_name|>test_run.py<|end_file_name|><|fim▁begin|>import argparse
from getpass import getpass
import json
import sys
import textwrap
import zmq
import colorama
from colorama import Fore
def execute(code):
ctx = zmq.Context.instance()
ctx.setsockopt(zmq.LINGER, 50)
repl_in = ctx.socket(zmq.PUSH)
repl_in.connect('tcp://127.0.0.1:2000')
repl_out = ctx.socket(zmq.PULL)
repl_out.connect('tcp://127.0.0.1:2001')
with repl_in, repl_out:
msg = (b'xcode1', code.encode('utf8'))
repl_in.send_multipart(msg)
while True:
data = repl_out.recv_multipart()
msg_type = data[0].decode('ascii')
msg_data = data[1].decode('utf8')
if msg_type == 'finished':
print('--- finished ---')
break
elif msg_type == 'stdout':
print(msg_data, end='')
sys.stdout.flush()
elif msg_type == 'stderr':
print(Fore.RED + msg_data + Fore.RESET, end='', file=sys.stderr)
sys.stderr.flush()
elif msg_type == 'waiting-input':
opts = json.loads(msg_data)
if opts['is_password']:
t = getpass(prompt='')
else:
t = input()
repl_in.send_multipart([b'input', t.encode('utf8')])
else:<|fim▁hole|> print(msg_type)
print(msg_data)
sources = {
'interleaving': '''
import sys
print('asdf', end='', file=sys.stderr)
print('qwer', end='', file=sys.stdout)
print('zxcv', file=sys.stderr)
''',
'long_running': '''
import time
for i in range(10):
time.sleep(1)
print(i)
''',
'user_input': '''
import hashlib
import getpass
print('Please type your name.')
name = input('>> ')
print('Hello, {0}'.format(name))
print('Please type your password.')
pw = getpass.getpass()
m = hashlib.sha256()
m.update(pw.encode('utf8'))
print('Your password hash is {0}'.format(m.hexdigest()))
''',
'early_exception': '''a = wrong-+****syntax''',
'runtime_error': '''
def x():
raise RuntimeError('asdf')
def s():
x()
if __name__ == '__main__':
s()
''',
'tensorflow': '''
import tensorflow as tf
print('TensorFlow version:', tf.__version__)
print(tf.test.is_gpu_available())
print('ok')'''
}
def main():
parser = argparse.ArgumentParser()
parser.add_argument('program_name')
args =parser.parse_args()
src = sources[args.program_name]
print('Test code:')
print(textwrap.indent(src, ' '))
print()
print('Execution log:')
execute(src)
if __name__ == '__main__':
colorama.init()
main()<|fim▁end|> | print('--- other msg ---') |
<|file_name|>gif.js<|end_file_name|><|fim▁begin|>var gif_bgs = [];
var gif_center = [];
var length_bgs = 0;
var length_center = 0;
var timer;
var duration = 4000;
var loaded = 0;
var next_bg;
var next_center;
var audio = document.getElementById("sound");
var muted = false;
function next(e){
clearInterval(timer);
timer = setInterval(next, duration);
$("#background").css("background-image","url("+gif_bgs[next_bg]+")");
$("#center").css("background-image","url("+gif_center[next_center]+")");
next_bg = Math.floor( Math.random()*length_bgs );
next_center = Math.floor( Math.random()*length_center );
$("#load_bg").attr("src",gif_bgs[next_bg]);
$("#load_center").attr("src",gif_center[next_center]);
}
function toggleInfo(){
$("#info-overlay").toggleClass("show");
$("#info-btn").toggleClass("show");
}
function check(){
if (loaded > 1) {
next_bg = Math.floor( Math.random()*length_bgs );
next_center = Math.floor( Math.random()*length_center );
next();
$("#wrapper").click(next);
}
}
function toggleSound(){
if (muted) {
muted = false;
audio.muted = muted;
$("#sound-btn").removeClass('muted');
}else{
muted = true;
audio.muted = muted;<|fim▁hole|>}
function init() {
$("#info-btn").click(toggleInfo);
$("#sound-btn").click(toggleSound);
$.ajax({
url: "json/bg.json",
cache: false,
dataType: "json",
success: function(d){
gif_bgs = d;
length_bgs = gif_bgs.length;
loaded++;
check();
}
});
$.ajax({
url: "json/center.json",
cache: false,
dataType: "json",
success: function(d){
gif_center = d;
length_center = gif_center.length;
loaded++;
check();
}
});
}
Meteor.startup(function(){init();});<|fim▁end|> | $("#sound-btn").addClass('muted');
}
|
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>pub mod feed;
pub mod link_checking;
pub mod sass;
pub mod sitemap;
pub mod tpls;
use std::collections::HashMap;
use std::fs::remove_dir_all;
use std::path::{Path, PathBuf};
use std::sync::{Arc, Mutex, RwLock};
use lazy_static::lazy_static;
use rayon::prelude::*;
use tera::{Context, Tera};
use walkdir::{DirEntry, WalkDir};
use config::{get_config, Config};
use errors::{bail, Error, Result};
use front_matter::InsertAnchor;
use library::{find_taxonomies, Library, Page, Paginator, Section, Taxonomy};
use relative_path::RelativePathBuf;
use std::time::Instant;
use templates::render_redirect_template;
use utils::fs::{
copy_directory, copy_file_if_needed, create_directory, create_file, ensure_directory_exists,
};
use utils::minify;
use utils::net::get_available_port;
use utils::templates::render_template;
lazy_static! {
/// The in-memory rendered map content
pub static ref SITE_CONTENT: Arc<RwLock<HashMap<RelativePathBuf, String>>> = Arc::new(RwLock::new(HashMap::new()));
}
/// Where are we building the site
#[derive(Debug, Clone, Copy, Eq, PartialEq)]
pub enum BuildMode {
/// On the filesystem -> `zola build`, The path is the `output_path`
Disk,
/// In memory for the content -> `zola serve`
Memory,
}
#[derive(Debug)]
pub struct Site {
/// The base path of the zola site
pub base_path: PathBuf,
/// The parsed config for the site
pub config: Config,
pub tera: Tera,
imageproc: Arc<Mutex<imageproc::Processor>>,
// the live reload port to be used if there is one
pub live_reload: Option<u16>,
pub output_path: PathBuf,
content_path: PathBuf,
pub static_path: PathBuf,
pub taxonomies: Vec<Taxonomy>,
/// A map of all .md files (section and pages) and their permalink
/// We need that if there are relative links in the content that need to be resolved
pub permalinks: HashMap<String, String>,
/// Contains all pages and sections of the site
pub library: Arc<RwLock<Library>>,
/// Whether to load draft pages
include_drafts: bool,
build_mode: BuildMode,
}
impl Site {
/// Parse a site at the given path. Defaults to the current dir
/// Passing in a path is used in tests and when --root argument is passed
pub fn new<P: AsRef<Path>, P2: AsRef<Path>>(path: P, config_file: P2) -> Result<Site> {
let path = path.as_ref();
let config_file = config_file.as_ref();
let mut config = get_config(config_file);
config.load_extra_syntaxes(path)?;
if let Some(theme) = config.theme.clone() {
// Grab data from the extra section of the theme
config.merge_with_theme(&path.join("themes").join(&theme).join("theme.toml"))?;
}
let tera = tpls::load_tera(path, &config)?;
let content_path = path.join("content");
let static_path = path.join("static");
let imageproc =
imageproc::Processor::new(content_path.clone(), &static_path, &config.base_url);
let output_path = path.join(config.output_dir.clone());
let site = Site {
base_path: path.to_path_buf(),
config,
tera,
imageproc: Arc::new(Mutex::new(imageproc)),
live_reload: None,
output_path,
content_path,
static_path,
taxonomies: Vec::new(),
permalinks: HashMap::new(),
include_drafts: false,
// We will allocate it properly later on
library: Arc::new(RwLock::new(Library::new(0, 0, false))),
build_mode: BuildMode::Disk,
};
Ok(site)
}
/// Enable some `zola serve` related options
pub fn enable_serve_mode(&mut self) {
SITE_CONTENT.write().unwrap().clear();
self.config.enable_serve_mode();
self.build_mode = BuildMode::Memory;
}
/// Set the site to load the drafts.
/// Needs to be called before loading it
pub fn include_drafts(&mut self) {
self.include_drafts = true;
}
/// The index sections are ALWAYS at those paths
/// There are one index section for the default language + 1 per language
fn index_section_paths(&self) -> Vec<(PathBuf, Option<String>)> {
let mut res = vec![(self.content_path.join("_index.md"), None)];
for language in &self.config.languages {
res.push((
self.content_path.join(format!("_index.{}.md", language.code)),
Some(language.code.clone()),
));
}
res
}
/// We avoid the port the server is going to use as it's not bound yet
/// when calling this function and we could end up having tried to bind
/// both http and websocket server to the same port
pub fn enable_live_reload(&mut self, port_to_avoid: u16) {
self.live_reload = get_available_port(port_to_avoid);
}
/// Only used in `zola serve` to re-use the initial websocket port
pub fn enable_live_reload_with_port(&mut self, live_reload_port: u16) {
self.live_reload = Some(live_reload_port);
}
/// Reloads the templates and rebuild the site without re-rendering the Markdown.
pub fn reload_templates(&mut self) -> Result<()> {
self.tera.full_reload()?;
// TODO: be smarter than that, no need to recompile sass for example
self.build()
}
pub fn set_base_url(&mut self, base_url: String) {
let mut imageproc = self.imageproc.lock().expect("Couldn't lock imageproc (set_base_url)");
imageproc.set_base_url(&base_url);
self.config.base_url = base_url;
}
pub fn set_output_path<P: AsRef<Path>>(&mut self, path: P) {
self.output_path = path.as_ref().to_path_buf();
}
/// Reads all .md files in the `content` directory and create pages/sections
/// out of them
pub fn load(&mut self) -> Result<()> {
let base_path = self.base_path.to_string_lossy().replace("\\", "/");
self.library = Arc::new(RwLock::new(Library::new(0, 0, self.config.is_multilingual())));
let mut pages_insert_anchors = HashMap::new();
// not the most elegant loop, but this is necessary to use skip_current_dir
// which we can only decide to use after we've deserialised the section
// so it's kinda necessecary
let mut dir_walker = WalkDir::new(format!("{}/{}", base_path, "content/")).into_iter();
let mut allowed_index_filenames: Vec<_> =
self.config.languages.iter().map(|l| format!("_index.{}.md", l.code)).collect();
allowed_index_filenames.push("_index.md".to_string());
loop {
let entry: DirEntry = match dir_walker.next() {
None => break,
Some(Err(_)) => continue,
Some(Ok(entry)) => entry,
};
let path = entry.path();
let file_name = match path.file_name() {
None => continue,
Some(name) => name.to_str().unwrap(),
};
// ignore excluded content
match &self.config.ignored_content_globset {
Some(gs) => {
if gs.is_match(path) {
continue;
}
}
None => (),
}
// we process a section when we encounter the dir
// so we can process it before any of the pages
// therefore we should skip the actual file to avoid duplication
if file_name.starts_with("_index.") {
continue;
}
// skip hidden files and non md files
if !path.is_dir() && (!file_name.ends_with(".md") || file_name.starts_with('.')) {
continue;
}
// is it a section or not?
if path.is_dir() {
// if we are processing a section we have to collect
// index files for all languages and process them simultaniously
// before any of the pages
let index_files = WalkDir::new(&path)
.max_depth(1)
.into_iter()
.filter_map(|e| match e {
Err(_) => None,
Ok(f) => {
let path_str = f.path().file_name().unwrap().to_str().unwrap();
if f.path().is_file()
&& allowed_index_filenames.iter().find(|&s| *s == path_str).is_some()
{
Some(f)
} else {
// https://github.com/getzola/zola/issues/1244
if path_str.starts_with("_index.") {
println!("Expected a section filename, got `{}`. Allowed values: `{:?}`", path_str, &allowed_index_filenames);
}
None
}
}
})
.collect::<Vec<DirEntry>>();
for index_file in index_files {
let section = match Section::from_file(
index_file.path(),
&self.config,
&self.base_path,
) {
Err(_) => continue,
Ok(sec) => sec,
};
// if the section is drafted we can skip the enitre dir
if section.meta.draft && !self.include_drafts {
dir_walker.skip_current_dir();
continue;
}
self.add_section(section, false)?;
}
} else {
let page = Page::from_file(path, &self.config, &self.base_path)
.expect("error deserialising page");
// should we skip drafts?
if page.meta.draft && !self.include_drafts {
continue;
}
pages_insert_anchors.insert(
page.file.path.clone(),
self.find_parent_section_insert_anchor(&page.file.parent.clone(), &page.lang),
);
self.add_page(page, false)?;
}
}
self.create_default_index_sections()?;
{
let library = self.library.read().unwrap();
let collisions = library.check_for_path_collisions();
if !collisions.is_empty() {
return Err(Error::from_collisions(collisions));
}
}
// taxonomy Tera fns are loaded in `register_early_global_fns`
// so we do need to populate it first.
self.populate_taxonomies()?;
tpls::register_early_global_fns(self);
self.populate_sections();
self.render_markdown()?;
tpls::register_tera_global_fns(self);
// Needs to be done after rendering markdown as we only get the anchors at that point
link_checking::check_internal_links_with_anchors(&self)?;
if self.config.is_in_check_mode() {
link_checking::check_external_links(&self)?;
}
Ok(())
}
/// Insert a default index section for each language if necessary so we don't need to create
/// a _index.md to render the index page at the root of the site
pub fn create_default_index_sections(&mut self) -> Result<()> {
for (index_path, lang) in self.index_section_paths() {
if let Some(ref index_section) = self.library.read().unwrap().get_section(&index_path) {
if self.config.build_search_index && !index_section.meta.in_search_index {
bail!(
"You have enabled search in the config but disabled it in the index section: \
either turn off the search in the config or remote `in_search_index = true` from the \
section front-matter."
)
}
}
let mut library = self.library.write().expect("Get lock for load");
// Not in else because of borrow checker
if !library.contains_section(&index_path) {
let mut index_section = Section::default();
index_section.file.parent = self.content_path.clone();
index_section.file.filename =
index_path.file_name().unwrap().to_string_lossy().to_string();
if let Some(ref l) = lang {
index_section.file.name = format!("_index.{}", l);
index_section.path = format!("{}/", l);
index_section.permalink = self.config.make_permalink(l);
let filename = format!("_index.{}.md", l);
index_section.file.path = self.content_path.join(&filename);
index_section.file.relative = filename;
} else {
index_section.file.name = "_index".to_string();
index_section.permalink = self.config.make_permalink("");
index_section.file.path = self.content_path.join("_index.md");
index_section.file.relative = "_index.md".to_string();
index_section.path = "/".to_string();
}
index_section.lang = index_section.file.find_language(&self.config)?;
library.insert_section(index_section);
}
}
Ok(())
}
/// Render the markdown of all pages/sections
/// Used in a build and in `serve` if a shortcode has changed
pub fn render_markdown(&mut self) -> Result<()> {
// Another silly thing needed to not borrow &self in parallel and
// make the borrow checker happy
let permalinks = &self.permalinks;
let tera = &self.tera;
let config = &self.config;
// This is needed in the first place because of silly borrow checker
let mut pages_insert_anchors = HashMap::new();
for (_, p) in self.library.read().unwrap().pages() {
pages_insert_anchors.insert(
p.file.path.clone(),
self.find_parent_section_insert_anchor(&p.file.parent.clone(), &p.lang),
);
}
let mut library = self.library.write().expect("Get lock for render_markdown");
library
.pages_mut()
.values_mut()
.collect::<Vec<_>>()
.par_iter_mut()
.map(|page| {
let insert_anchor = pages_insert_anchors[&page.file.path];
page.render_markdown(permalinks, tera, config, insert_anchor)
})
.collect::<Result<()>>()?;
library
.sections_mut()
.values_mut()
.collect::<Vec<_>>()
.par_iter_mut()
.map(|section| section.render_markdown(permalinks, tera, config))
.collect::<Result<()>>()?;
Ok(())
}
/// Add a page to the site
/// The `render` parameter is used in the serve command with --fast, when rebuilding a page.
pub fn add_page(&mut self, mut page: Page, render_md: bool) -> Result<()> {
self.permalinks.insert(page.file.relative.clone(), page.permalink.clone());
if render_md {
let insert_anchor =
self.find_parent_section_insert_anchor(&page.file.parent, &page.lang);
page.render_markdown(&self.permalinks, &self.tera, &self.config, insert_anchor)?;
}
let mut library = self.library.write().expect("Get lock for add_page");
library.remove_page(&page.file.path);
library.insert_page(page);
Ok(())
}
/// Adds a page to the site and render it
/// Only used in `zola serve --fast`
pub fn add_and_render_page(&mut self, path: &Path) -> Result<()> {
let page = Page::from_file(path, &self.config, &self.base_path)?;
self.add_page(page, true)?;
self.populate_sections();
self.populate_taxonomies()?;
let library = self.library.read().unwrap();
let page = library.get_page(&path).unwrap();
self.render_page(&page)
}
/// Add a section to the site
/// The `render` parameter is used in the serve command with --fast, when rebuilding a page.
pub fn add_section(&mut self, mut section: Section, render_md: bool) -> Result<()> {
self.permalinks.insert(section.file.relative.clone(), section.permalink.clone());
if render_md {
section.render_markdown(&self.permalinks, &self.tera, &self.config)?;
}
let mut library = self.library.write().expect("Get lock for add_section");
library.remove_section(§ion.file.path);
library.insert_section(section);
Ok(())
}
/// Adds a section to the site and render it
/// Only used in `zola serve --fast`
pub fn add_and_render_section(&mut self, path: &Path) -> Result<()> {
let section = Section::from_file(path, &self.config, &self.base_path)?;
self.add_section(section, true)?;
self.populate_sections();
let library = self.library.read().unwrap();
let section = library.get_section(&path).unwrap();
self.render_section(§ion, true)
}
/// Finds the insert_anchor for the parent section of the directory at `path`.
/// Defaults to `AnchorInsert::None` if no parent section found
pub fn find_parent_section_insert_anchor(
&self,
parent_path: &PathBuf,
lang: &str,
) -> InsertAnchor {
let parent = if lang != self.config.default_language {
parent_path.join(format!("_index.{}.md", lang))
} else {
parent_path.join("_index.md")
};
match self.library.read().unwrap().get_section(&parent) {
Some(s) => s.meta.insert_anchor_links,
None => InsertAnchor::None,
}
}
/// Find out the direct subsections of each subsection if there are some
/// as well as the pages for each section
pub fn populate_sections(&mut self) {
let mut library = self.library.write().expect("Get lock for populate_sections");
library.populate_sections(&self.config);
}
/// Find all the tags and categories if it's asked in the config
pub fn populate_taxonomies(&mut self) -> Result<()> {
if self.config.taxonomies.is_empty() {
return Ok(());
}
self.taxonomies = find_taxonomies(&self.config, &self.library.read().unwrap())?;
Ok(())
}
/// Inject live reload script tag if in live reload mode
fn inject_livereload(&self, mut html: String) -> String {
if let Some(port) = self.live_reload {
let script =
format!(r#"<script src="/livereload.js?port={}&mindelay=10"></script>"#, port,);
if let Some(index) = html.rfind("</body>") {
html.insert_str(index, &script);
} else {
html.push_str(&script);
}
}
html
}
/// Copy the main `static` folder and the theme `static` folder if a theme is used
pub fn copy_static_directories(&self) -> Result<()> {
// The user files will overwrite the theme files
if let Some(ref theme) = self.config.theme {
copy_directory(
&self.base_path.join("themes").join(theme).join("static"),
&self.output_path,
false,
)?;
}
// We're fine with missing static folders
if self.static_path.exists() {
copy_directory(&self.static_path, &self.output_path, self.config.hard_link_static)?;
}
Ok(())
}
pub fn num_img_ops(&self) -> usize {
let imageproc = self.imageproc.lock().expect("Couldn't lock imageproc (num_img_ops)");
imageproc.num_img_ops()
}
pub fn process_images(&self) -> Result<()> {
let mut imageproc =
self.imageproc.lock().expect("Couldn't lock imageproc (process_images)");
imageproc.prune()?;
imageproc.do_process()
}
/// Deletes the `public` directory if it exists
pub fn clean(&self) -> Result<()> {
if self.output_path.exists() {
// Delete current `public` directory so we can start fresh
remove_dir_all(&self.output_path)
.map_err(|e| Error::chain("Couldn't delete output directory", e))?;
}
Ok(())
}
/// Handles whether to write to disk or to memory
pub fn write_content(
&self,
components: &[&str],
filename: &str,
content: String,
create_dirs: bool,
) -> Result<PathBuf> {
let write_dirs = self.build_mode == BuildMode::Disk || create_dirs;
ensure_directory_exists(&self.output_path)?;
let mut site_path = RelativePathBuf::new();
let mut current_path = self.output_path.to_path_buf();
for component in components {
current_path.push(component);
site_path.push(component);
if !current_path.exists() && write_dirs {
create_directory(¤t_path)?;
}
}
if write_dirs {
create_directory(¤t_path)?;
}
let final_content = if !filename.ends_with("html") || !self.config.minify_html {
content
} else {
match minify::html(content) {
Ok(minified_content) => minified_content,
Err(error) => bail!(error),
}
};
match self.build_mode {
BuildMode::Disk => {
let end_path = current_path.join(filename);
create_file(&end_path, &final_content)?;
}
BuildMode::Memory => {
let site_path =
if filename != "index.html" { site_path.join(filename) } else { site_path };
SITE_CONTENT.write().unwrap().insert(site_path, final_content);
}
}
Ok(current_path)
}
fn copy_asset(&self, src: &Path, dest: &PathBuf) -> Result<()> {
copy_file_if_needed(src, dest, self.config.hard_link_static)
}
/// Renders a single content page
pub fn render_page(&self, page: &Page) -> Result<()> {
let output = page.render_html(&self.tera, &self.config, &self.library.read().unwrap())?;
let content = self.inject_livereload(output);
let components: Vec<&str> = page.path.split('/').collect();
let current_path =
self.write_content(&components, "index.html", content, !page.assets.is_empty())?;
// Copy any asset we found previously into the same directory as the index.html
for asset in &page.assets {
let asset_path = asset.as_path();
self.copy_asset(
&asset_path,
¤t_path
.join(asset_path.file_name().expect("Couldn't get filename from page asset")),
)?;
}
Ok(())
}
/// Deletes the `public` directory (only for `zola build`) and builds the site
pub fn build(&self) -> Result<()> {
let mut start = Instant::now();
// Do not clean on `zola serve` otherwise we end up copying assets all the time
if self.build_mode == BuildMode::Disk {
self.clean()?;<|fim▁hole|>
// Generate/move all assets before rendering any content
if let Some(ref theme) = self.config.theme {
let theme_path = self.base_path.join("themes").join(theme);
if theme_path.join("sass").exists() {
sass::compile_sass(&theme_path, &self.output_path)?;
start = log_time(start, "Compiled theme Sass");
}
}
if self.config.compile_sass {
sass::compile_sass(&self.base_path, &self.output_path)?;
start = log_time(start, "Compiled own Sass");
}
if self.config.build_search_index {
self.build_search_index()?;
start = log_time(start, "Built search index");
}
// Render aliases first to allow overwriting
self.render_aliases()?;
start = log_time(start, "Rendered aliases");
self.render_sections()?;
start = log_time(start, "Rendered sections");
self.render_orphan_pages()?;
start = log_time(start, "Rendered orphan pages");
self.render_sitemap()?;
start = log_time(start, "Rendered sitemap");
let library = self.library.read().unwrap();
if self.config.generate_feed {
let is_multilingual = self.config.is_multilingual();
let pages = if is_multilingual {
library
.pages_values()
.iter()
.filter(|p| p.lang == self.config.default_language)
.cloned()
.collect()
} else {
library.pages_values()
};
self.render_feed(pages, None, &self.config.default_language, |c| c)?;
start = log_time(start, "Generated feed in default language");
}
for lang in &self.config.languages {
if !lang.feed {
continue;
}
let pages =
library.pages_values().iter().filter(|p| p.lang == lang.code).cloned().collect();
self.render_feed(pages, Some(&PathBuf::from(lang.code.clone())), &lang.code, |c| c)?;
start = log_time(start, "Generated feed in other language");
}
self.render_404()?;
start = log_time(start, "Rendered 404");
self.render_robots()?;
start = log_time(start, "Rendered robots.txt");
self.render_taxonomies()?;
start = log_time(start, "Rendered taxonomies");
// We process images at the end as we might have picked up images to process from markdown
// or from templates
self.process_images()?;
start = log_time(start, "Processed images");
// Processed images will be in static so the last step is to copy it
self.copy_static_directories()?;
log_time(start, "Copied static dir");
Ok(())
}
pub fn build_search_index(&self) -> Result<()> {
ensure_directory_exists(&self.output_path)?;
// TODO: add those to the SITE_CONTENT map
// index first
create_file(
&self.output_path.join(&format!("search_index.{}.js", self.config.default_language)),
&format!(
"window.searchIndex = {};",
search::build_index(
&self.config.default_language,
&self.library.read().unwrap(),
&self.config
)?
),
)?;
for language in &self.config.languages {
if language.code != self.config.default_language && language.search {
create_file(
&self.output_path.join(&format!("search_index.{}.js", &language.code)),
&format!(
"window.searchIndex = {};",
search::build_index(
&language.code,
&self.library.read().unwrap(),
&self.config
)?
),
)?;
}
}
// then elasticlunr.min.js
create_file(&self.output_path.join("elasticlunr.min.js"), search::ELASTICLUNR_JS)?;
Ok(())
}
fn render_alias(&self, alias: &str, permalink: &str) -> Result<()> {
let mut split = alias.split('/').collect::<Vec<_>>();
// If the alias ends with an html file name, use that instead of mapping
// as a path containing an `index.html`
let page_name = match split.pop() {
Some(part) if part.ends_with(".html") => part,
Some(part) => {
split.push(part);
"index.html"
}
None => "index.html",
};
let content = render_redirect_template(&permalink, &self.tera)?;
self.write_content(&split, page_name, content, false)?;
Ok(())
}
/// Renders all the aliases for each page/section: a magic HTML template that redirects to
/// the canonical one
pub fn render_aliases(&self) -> Result<()> {
ensure_directory_exists(&self.output_path)?;
let library = self.library.read().unwrap();
for (_, page) in library.pages() {
for alias in &page.meta.aliases {
self.render_alias(&alias, &page.permalink)?;
}
}
for (_, section) in library.sections() {
for alias in §ion.meta.aliases {
self.render_alias(&alias, §ion.permalink)?;
}
}
Ok(())
}
/// Renders 404.html
pub fn render_404(&self) -> Result<()> {
ensure_directory_exists(&self.output_path)?;
let mut context = Context::new();
context.insert("config", &self.config);
context.insert("lang", &self.config.default_language);
let output = render_template("404.html", &self.tera, context, &self.config.theme)?;
let content = self.inject_livereload(output);
self.write_content(&[], "404.html", content, false)?;
Ok(())
}
/// Renders robots.txt
pub fn render_robots(&self) -> Result<()> {
ensure_directory_exists(&self.output_path)?;
let mut context = Context::new();
context.insert("config", &self.config);
let content = render_template("robots.txt", &self.tera, context, &self.config.theme)?;
self.write_content(&[], "robots.txt", content, false)?;
Ok(())
}
/// Renders all taxonomies
pub fn render_taxonomies(&self) -> Result<()> {
for taxonomy in &self.taxonomies {
self.render_taxonomy(taxonomy)?;
}
Ok(())
}
fn render_taxonomy(&self, taxonomy: &Taxonomy) -> Result<()> {
if taxonomy.items.is_empty() {
return Ok(());
}
ensure_directory_exists(&self.output_path)?;
let mut components = Vec::new();
if taxonomy.kind.lang != self.config.default_language {
components.push(taxonomy.kind.lang.as_ref());
}
components.push(taxonomy.slug.as_ref());
let list_output =
taxonomy.render_all_terms(&self.tera, &self.config, &self.library.read().unwrap())?;
let content = self.inject_livereload(list_output);
self.write_content(&components, "index.html", content, false)?;
let library = self.library.read().unwrap();
taxonomy
.items
.par_iter()
.map(|item| {
let mut comp = components.clone();
comp.push(&item.slug);
if taxonomy.kind.is_paginated() {
self.render_paginated(
comp.clone(),
&Paginator::from_taxonomy(&taxonomy, item, &library),
)?;
} else {
let single_output =
taxonomy.render_term(item, &self.tera, &self.config, &library)?;
let content = self.inject_livereload(single_output);
self.write_content(&comp, "index.html", content, false)?;
}
if taxonomy.kind.feed {
self.render_feed(
item.pages.iter().map(|p| library.get_page_by_key(*p)).collect(),
Some(&PathBuf::from(format!("{}/{}", taxonomy.slug, item.slug))),
if self.config.is_multilingual() && !taxonomy.kind.lang.is_empty() {
&taxonomy.kind.lang
} else {
&self.config.default_language
},
|mut context: Context| {
context.insert("taxonomy", &taxonomy.kind);
context
.insert("term", &feed::SerializedFeedTaxonomyItem::from_item(item));
context
},
)
} else {
Ok(())
}
})
.collect::<Result<()>>()
}
/// What it says on the tin
pub fn render_sitemap(&self) -> Result<()> {
ensure_directory_exists(&self.output_path)?;
let library = self.library.read().unwrap();
let all_sitemap_entries =
{ sitemap::find_entries(&library, &self.taxonomies[..], &self.config) };
let sitemap_limit = 30000;
if all_sitemap_entries.len() < sitemap_limit {
// Create single sitemap
let mut context = Context::new();
context.insert("entries", &all_sitemap_entries);
let sitemap = render_template("sitemap.xml", &self.tera, context, &self.config.theme)?;
self.write_content(&[], "sitemap.xml", sitemap, false)?;
return Ok(());
}
// Create multiple sitemaps (max 30000 urls each)
let mut sitemap_index = Vec::new();
for (i, chunk) in
all_sitemap_entries.iter().collect::<Vec<_>>().chunks(sitemap_limit).enumerate()
{
let mut context = Context::new();
context.insert("entries", &chunk);
let sitemap = render_template("sitemap.xml", &self.tera, context, &self.config.theme)?;
let file_name = format!("sitemap{}.xml", i + 1);
self.write_content(&[], &file_name, sitemap, false)?;
let mut sitemap_url = self.config.make_permalink(&file_name);
sitemap_url.pop(); // Remove trailing slash
sitemap_index.push(sitemap_url);
}
// Create main sitemap that reference numbered sitemaps
let mut main_context = Context::new();
main_context.insert("sitemaps", &sitemap_index);
let sitemap = render_template(
"split_sitemap_index.xml",
&self.tera,
main_context,
&self.config.theme,
)?;
self.write_content(&[], "sitemap.xml", sitemap, false)?;
Ok(())
}
/// Renders a feed for the given path and at the given path
/// If both arguments are `None`, it will render only the feed for the whole
/// site at the root folder.
pub fn render_feed(
&self,
all_pages: Vec<&Page>,
base_path: Option<&PathBuf>,
lang: &str,
additional_context_fn: impl Fn(Context) -> Context,
) -> Result<()> {
ensure_directory_exists(&self.output_path)?;
let feed = match feed::render_feed(self, all_pages, lang, base_path, additional_context_fn)?
{
Some(v) => v,
None => return Ok(()),
};
let feed_filename = &self.config.feed_filename;
if let Some(ref base) = base_path {
let mut components = Vec::new();
for component in base.components() {
// TODO: avoid cloning the paths
components.push(component.as_os_str().to_string_lossy().as_ref().to_string());
}
self.write_content(
&components.iter().map(|x| x.as_ref()).collect::<Vec<_>>(),
&feed_filename,
feed,
false,
)?;
} else {
self.write_content(&[], &feed_filename, feed, false)?;
}
Ok(())
}
/// Renders a single section
pub fn render_section(&self, section: &Section, render_pages: bool) -> Result<()> {
ensure_directory_exists(&self.output_path)?;
let mut output_path = self.output_path.clone();
let mut components: Vec<&str> = Vec::new();
let create_directories = self.build_mode == BuildMode::Disk || !section.assets.is_empty();
if section.lang != self.config.default_language {
components.push(§ion.lang);
output_path.push(§ion.lang);
if !output_path.exists() && create_directories {
create_directory(&output_path)?;
}
}
for component in §ion.file.components {
components.push(component);
output_path.push(component);
if !output_path.exists() && create_directories {
create_directory(&output_path)?;
}
}
if section.meta.generate_feed {
let library = &self.library.read().unwrap();
let pages = section.pages.iter().map(|k| library.get_page_by_key(*k)).collect();
self.render_feed(
pages,
Some(&PathBuf::from(§ion.path[1..])),
§ion.lang,
|mut context: Context| {
context.insert("section", §ion.to_serialized(library));
context
},
)?;
}
// Copy any asset we found previously into the same directory as the index.html
for asset in §ion.assets {
let asset_path = asset.as_path();
self.copy_asset(
&asset_path,
&output_path.join(
asset_path.file_name().expect("Failed to get asset filename for section"),
),
)?;
}
if render_pages {
section
.pages
.par_iter()
.map(|k| self.render_page(self.library.read().unwrap().get_page_by_key(*k)))
.collect::<Result<()>>()?;
}
if !section.meta.render {
return Ok(());
}
if let Some(ref redirect_to) = section.meta.redirect_to {
let permalink = self.config.make_permalink(redirect_to);
self.write_content(
&components,
"index.html",
render_redirect_template(&permalink, &self.tera)?,
create_directories,
)?;
return Ok(());
}
if section.meta.is_paginated() {
self.render_paginated(
components,
&Paginator::from_section(§ion, &self.library.read().unwrap()),
)?;
} else {
let output =
section.render_html(&self.tera, &self.config, &self.library.read().unwrap())?;
let content = self.inject_livereload(output);
self.write_content(&components, "index.html", content, false)?;
}
Ok(())
}
/// Renders all sections
pub fn render_sections(&self) -> Result<()> {
self.library
.read()
.unwrap()
.sections_values()
.into_par_iter()
.map(|s| self.render_section(s, true))
.collect::<Result<()>>()
}
/// Renders all pages that do not belong to any sections
pub fn render_orphan_pages(&self) -> Result<()> {
ensure_directory_exists(&self.output_path)?;
let library = self.library.read().unwrap();
for page in library.get_all_orphan_pages() {
self.render_page(page)?;
}
Ok(())
}
/// Renders a list of pages when the section/index is wanting pagination.
pub fn render_paginated<'a>(
&self,
components: Vec<&'a str>,
paginator: &'a Paginator,
) -> Result<()> {
ensure_directory_exists(&self.output_path)?;
let index_components = components.clone();
paginator
.pagers
.par_iter()
.map(|pager| {
let mut pager_components = index_components.clone();
pager_components.push(&paginator.paginate_path);
let pager_path = format!("{}", pager.index);
pager_components.push(&pager_path);
let output = paginator.render_pager(
pager,
&self.config,
&self.tera,
&self.library.read().unwrap(),
)?;
let content = self.inject_livereload(output);
if pager.index > 1 {
self.write_content(&pager_components, "index.html", content, false)?;
} else {
self.write_content(&index_components, "index.html", content, false)?;
self.write_content(
&pager_components,
"index.html",
render_redirect_template(&paginator.permalink, &self.tera)?,
false,
)?;
}
Ok(())
})
.collect::<Result<()>>()
}
}
fn log_time(start: Instant, message: &str) -> Instant {
let do_print = std::env::var("ZOLA_PERF_LOG").is_ok();
let now = Instant::now();
if do_print {
println!("{} took {}ms", message, now.duration_since(start).as_millis());
}
now
}<|fim▁end|> | }
start = log_time(start, "Cleaned folder"); |
<|file_name|>flash_tune.py<|end_file_name|><|fim▁begin|>'''
main tuning script, LCLS
'''
import numpy as np
from ocelot.mint.mint import Optimizer, Action
from ocelot.mint.flash1_interface import FLASH1MachineInterface, FLASH1DeviceProperties, TestInterface
<|fim▁hole|>
mi = FLASH1MachineInterface()
dp = FLASH1DeviceProperties()
#opt = Optimizer(mi, dp)
opt = Optimizer(TestInterface(), dp)
opt.debug = True
opt.logging = True
opt.log_file = 'test.log'
opt.timeout = 1.2
seq1 = [Action(func=opt.max_sase, args=[ ['H10SMATCH','H12SMATCH'], 'simplex'] ) ]
seq2 = [Action(func=opt.max_sase, args=[ ['V14SMATCH','V7SMATCH'], 'simplex' ] )]
seq3 = [Action(func=opt.max_sase, args=[ ['V14SMATCH','V7SMATCH','H10SMATCH','H12SMATCH'], 'simplex' ] )]
seq4 = [Action(func=opt.max_sase, args=[ ['Q13SMATCH','Q15SMATCH'], 'simplex' ] )]
seq5 = [Action(func=opt.max_sase, args=[ ['H3DBC3','V3DBC3'], 'simplex' ] )]
seq6 = [Action(func=opt.max_sase, args=[ ['H3DBC3','V3DBC3','H10ACC7','V10ACC7'], 'simplex' ] )]
seq7 = [Action(func=opt.max_sase, args=[ ['Q5UND1.3.5','Q5UND2.4'], 'simplex' ] )]
seq8 = [Action(func=opt.max_sase, args=[ ['H3UND1','H3UND3','H3UND4','H3UND5'], 'simplex' ] )]
seq9 = [Action(func=opt.max_sase, args=[ ['H8TCOL','V8TCOL'], 'simplex' ] )]
seq10 = [Action(func=opt.max_sase, args=[ ['H3DBC3'], 'simplex' ] )]
seq0 = [Action(func=opt.max_sase, args=[ ['H10SMATCH','H12SMATCH'], 'cg', {'maxiter':15}] ),
Action(func=opt.max_sase, args=[ ['H10SMATCH','H12SMATCH'], 'simplex', {'maxiter':25}] )]
opt.eval(seq1)
"""
#import json
def get_dict(lat, bpms):
dict_bpms = {}
for elem in lat.sequence:
if elem.type == "monitor" and elem.mi_id in bpms:
dict_bpms[elem.mi_id] = {}
dict_bpms[elem.mi_id]["x"] = elem.x
dict_bpms[elem.mi_id]["y"] = elem.y
return dict_bpms
#dp = FLASH1DeviceProperties()
def apply_bump(names, currents, dIs, alpha):
mi.set_value(names, currents+dIs*alpha)
cors = ['H3DBC3', 'H10ACC4','H9ACC5', 'H10ACC5', 'H9ACC6', 'H10ACC6', 'H10ACC7']
dI = np.array([-0.0114768844711, -0.183727960466, 0.325959042831, 0.318743893708, 0.15280311903, 0.130996600233, -0.831909116508])
currents = np.array([ -0.0229914523661, 0.0250000003725, 0.985000014305, 0.0, -1.17299997807, 0.0, 0.148000001907])
bump = {"correctors":cors, "dI": dI, "currents":currents}
alpha = 0.1
seq_bump = [Action(func=opt.max_sase_bump, args=[ bump, alpha, 'simplex' ] )]
orbit = {}
orbit["correctors"] = ['H3SFELC', 'H4SFELC', 'H10SMATCH', 'D11SMATCH', 'H12SMATCH']
setup = log.MachineSetup()
#setup.save_lattice(lat, "init.txt")
lat_all = MagneticLattice(lattice)
setup.load_lattice("init.txt", lat_all)
orbit["bpms"] = get_dict(lat, bpms)
seq_min_orb = [Action(func=opt.min_orbit, args=[orbit, 'simplex' ] )]
opt.eval(seq_bump)
apply_bump(cors, currents, dI, alpha=0.1)
"""<|fim▁end|> | |
<|file_name|>l10n-fetch-po-files.py<|end_file_name|><|fim▁begin|>import os
import re
import subprocess
# Copied from Trojita
"""Fetch the .po files from KDE's SVN for GCompris
Run me from GCompris's top-level directory.
"""
SVN_PATH = "svn://anonsvn.kde.org/home/kde/trunk/l10n-kf5/"
SOURCE_PO_PATH = "/messages/kdereview/gcompris_qt.po"
OUTPUT_PO_PATH = "./po/"
OUTPUT_PO_PATTERN = "gcompris_%s.po"
fixer = re.compile(r'^#~\| ', re.MULTILINE)
re_empty_msgid = re.compile('^msgid ""$', re.MULTILINE)
re_empty_line = re.compile('^$', re.MULTILINE)
re_has_qt_contexts = re.compile('X-Qt-Contexts: true\\n')
if not os.path.exists(OUTPUT_PO_PATH):
os.mkdir(OUTPUT_PO_PATH)
all_languages = subprocess.check_output(['svn', 'cat', SVN_PATH + 'subdirs'],
stderr=subprocess.STDOUT)
all_languages = [x.strip() for x in all_languages.split("\n") if len(x)]
all_languages.remove("x-test")
for lang in all_languages:
try:
raw_data = subprocess.check_output(['svn', 'cat', SVN_PATH + lang + SOURCE_PO_PATH],
stderr=subprocess.PIPE)
(transformed, subs) = fixer.subn('# ~| ', raw_data)
pos1 = re_empty_msgid.search(transformed).start()
pos2 = re_empty_line.search(transformed).start()
if re_has_qt_contexts.search(transformed, pos1, pos2) is None:
transformed = transformed[:pos2] + \
'"X-Qt-Contexts: true\\n"\n' + \
transformed[pos2:]
subs = subs + 1
if (subs > 0):
print "Fetched %s (and performed %d cleanups)" % (lang, subs)
else:
print "Fetched %s" % lang
file(OUTPUT_PO_PATH + OUTPUT_PO_PATTERN % lang, "wb").write(transformed)
except subprocess.CalledProcessError:<|fim▁hole|> print "No data for %s" % lang
# Inform qmake about the updated file list
#os.utime("CMakeLists.txt", None)<|fim▁end|> | |
<|file_name|>bayes.py<|end_file_name|><|fim▁begin|>"""
Various bayesian regression
"""
from __future__ import print_function
# Authors: V. Michel, F. Pedregosa, A. Gramfort
# License: BSD 3 clause
from math import log
import numpy as np
from scipy import linalg
from .base import LinearModel
from ..base import RegressorMixin
from ..utils.extmath import fast_logdet, pinvh
from ..utils import check_arrays
###############################################################################
# BayesianRidge regression
class BayesianRidge(LinearModel, RegressorMixin):
"""Bayesian ridge regression
Fit a Bayesian ridge model and optimize the regularization parameters
lambda (precision of the weights) and alpha (precision of the noise).
Parameters
----------
X : array, shape = (n_samples, n_features)
Training vectors.
y : array, shape = (length)
Target values for training vectors
n_iter : int, optional
Maximum number of iterations. Default is 300.
tol : float, optional
Stop the algorithm if w has converged. Default is 1.e-3.
alpha_1 : float, optional
Hyper-parameter : shape parameter for the Gamma distribution prior
over the alpha parameter. Default is 1.e-6
alpha_2 : float, optional
Hyper-parameter : inverse scale parameter (rate parameter) for the
Gamma distribution prior over the alpha parameter.
Default is 1.e-6.
lambda_1 : float, optional
Hyper-parameter : shape parameter for the Gamma distribution prior
over the lambda parameter. Default is 1.e-6.
lambda_2 : float, optional
Hyper-parameter : inverse scale parameter (rate parameter) for the
Gamma distribution prior over the lambda parameter.
Default is 1.e-6
compute_score : boolean, optional
If True, compute the objective function at each step of the model.
Default is False
fit_intercept : boolean, optional
wether to calculate the intercept for this model. If set
to false, no intercept will be used in calculations
(e.g. data is expected to be already centered).
Default is True.
normalize : boolean, optional, default False
If True, the regressors X will be normalized before regression.
copy_X : boolean, optional, default True
If True, X will be copied; else, it may be overwritten.
verbose : boolean, optional, default False
Verbose mode when fitting the model.
Attributes
----------
`coef_` : array, shape = (n_features)
Coefficients of the regression model (mean of distribution)
`alpha_` : float
estimated precision of the noise.
`lambda_` : array, shape = (n_features)
estimated precisions of the weights.
`scores_` : float
if computed, value of the objective function (to be maximized)
Examples
--------
>>> from sklearn import linear_model
>>> clf = linear_model.BayesianRidge()
>>> clf.fit([[0,0], [1, 1], [2, 2]], [0, 1, 2])
... # doctest: +NORMALIZE_WHITESPACE
BayesianRidge(alpha_1=1e-06, alpha_2=1e-06, compute_score=False,
copy_X=True, fit_intercept=True, lambda_1=1e-06, lambda_2=1e-06,
n_iter=300, normalize=False, tol=0.001, verbose=False)
>>> clf.predict([[1, 1]])
array([ 1.])
Notes
-----
See examples/linear_model/plot_bayesian_ridge.py for an example.
"""
def __init__(self, n_iter=300, tol=1.e-3, alpha_1=1.e-6, alpha_2=1.e-6,
lambda_1=1.e-6, lambda_2=1.e-6, compute_score=False,
fit_intercept=True, normalize=False, copy_X=True,
verbose=False):
self.n_iter = n_iter
self.tol = tol
self.alpha_1 = alpha_1
self.alpha_2 = alpha_2
self.lambda_1 = lambda_1
self.lambda_2 = lambda_2
self.compute_score = compute_score
self.fit_intercept = fit_intercept
self.normalize = normalize
self.copy_X = copy_X
self.verbose = verbose
def fit(self, X, y):
"""Fit the model
Parameters
----------
X : numpy array of shape [n_samples,n_features]
Training data
y : numpy array of shape [n_samples]
Target values
Returns
-------
self : returns an instance of self.
"""
X, y = check_arrays(X, y, sparse_format='dense',
dtype=np.float)
X, y, X_mean, y_mean, X_std = self._center_data(
X, y, self.fit_intercept, self.normalize, self.copy_X)
n_samples, n_features = X.shape
### Initialization of the values of the parameters
alpha_ = 1. / np.var(y)
lambda_ = 1.
verbose = self.verbose
lambda_1 = self.lambda_1
lambda_2 = self.lambda_2
alpha_1 = self.alpha_1
alpha_2 = self.alpha_2
self.scores_ = list()
coef_old_ = None
XT_y = np.dot(X.T, y)
U, S, Vh = linalg.svd(X, full_matrices=False)
eigen_vals_ = S ** 2
### Convergence loop of the bayesian ridge regression
for iter_ in range(self.n_iter):
### Compute mu and sigma
# sigma_ = lambda_ / alpha_ * np.eye(n_features) + np.dot(X.T, X)
# coef_ = sigma_^-1 * XT * y
if n_samples > n_features:
coef_ = np.dot(Vh.T,
Vh / (eigen_vals_ + lambda_ / alpha_)[:, None])
coef_ = np.dot(coef_, XT_y)
if self.compute_score:
logdet_sigma_ = - np.sum(
np.log(lambda_ + alpha_ * eigen_vals_))
else:
coef_ = np.dot(X.T, np.dot(
U / (eigen_vals_ + lambda_ / alpha_)[None, :], U.T))
coef_ = np.dot(coef_, y)
if self.compute_score:
logdet_sigma_ = lambda_ * np.ones(n_features)
logdet_sigma_[:n_samples] += alpha_ * eigen_vals_
logdet_sigma_ = - np.sum(np.log(logdet_sigma_))
### Update alpha and lambda
rmse_ = np.sum((y - np.dot(X, coef_)) ** 2)
gamma_ = (np.sum((alpha_ * eigen_vals_)
/ (lambda_ + alpha_ * eigen_vals_)))
lambda_ = ((gamma_ + 2 * lambda_1)
/ (np.sum(coef_ ** 2) + 2 * lambda_2))
alpha_ = ((n_samples - gamma_ + 2 * alpha_1)
/ (rmse_ + 2 * alpha_2))
### Compute the objective function
if self.compute_score:
s = lambda_1 * log(lambda_) - lambda_2 * lambda_
s += alpha_1 * log(alpha_) - alpha_2 * alpha_
s += 0.5 * (n_features * log(lambda_)
+ n_samples * log(alpha_)
- alpha_ * rmse_
- (lambda_ * np.sum(coef_ ** 2))
- logdet_sigma_
- n_samples * log(2 * np.pi))
self.scores_.append(s)
### Check for convergence
if iter_ != 0 and np.sum(np.abs(coef_old_ - coef_)) < self.tol:
if verbose:
print("Convergence after ", str(iter_), " iterations")
break
coef_old_ = np.copy(coef_)
self.alpha_ = alpha_
self.lambda_ = lambda_
self.coef_ = coef_
self._set_intercept(X_mean, y_mean, X_std)
return self
###############################################################################
# ARD (Automatic Relevance Determination) regression
class ARDRegression(LinearModel, RegressorMixin):
"""Bayesian ARD regression.
Fit the weights of a regression model, using an ARD prior. The weights of
the regression model are assumed to be in Gaussian distributions.
Also estimate the parameters lambda (precisions of the distributions of the
weights) and alpha (precision of the distribution of the noise).
The estimation is done by an iterative procedures (Evidence Maximization)
Parameters
----------
X : array, shape = (n_samples, n_features)
Training vectors.
y : array, shape = (n_samples)
Target values for training vectors
n_iter : int, optional
Maximum number of iterations. Default is 300
tol : float, optional
Stop the algorithm if w has converged. Default is 1.e-3.
alpha_1 : float, optional
Hyper-parameter : shape parameter for the Gamma distribution prior
over the alpha parameter. Default is 1.e-6.
alpha_2 : float, optional
Hyper-parameter : inverse scale parameter (rate parameter) for the
Gamma distribution prior over the alpha parameter. Default is 1.e-6.
lambda_1 : float, optional
Hyper-parameter : shape parameter for the Gamma distribution prior
over the lambda parameter. Default is 1.e-6.
lambda_2 : float, optional
Hyper-parameter : inverse scale parameter (rate parameter) for the
Gamma distribution prior over the lambda parameter. Default is 1.e-6.
compute_score : boolean, optional
If True, compute the objective function at each step of the model.
Default is False.
threshold_lambda : float, optional
threshold for removing (pruning) weights with high precision from
the computation. Default is 1.e+4.
fit_intercept : boolean, optional
wether to calculate the intercept for this model. If set
to false, no intercept will be used in calculations
(e.g. data is expected to be already centered).
Default is True.
normalize : boolean, optional, default False
If True, the regressors X will be normalized before regression.
copy_X : boolean, optional, default True.
If True, X will be copied; else, it may be overwritten.
verbose : boolean, optional, default False
Verbose mode when fitting the model.
Attributes
----------
`coef_` : array, shape = (n_features)
Coefficients of the regression model (mean of distribution)
`alpha_` : float
estimated precision of the noise.
`lambda_` : array, shape = (n_features)
estimated precisions of the weights.
`sigma_` : array, shape = (n_features, n_features)
estimated variance-covariance matrix of the weights
`scores_` : float
if computed, value of the objective function (to be maximized)<|fim▁hole|> >>> clf = linear_model.ARDRegression()
>>> clf.fit([[0,0], [1, 1], [2, 2]], [0, 1, 2])
... # doctest: +NORMALIZE_WHITESPACE
ARDRegression(alpha_1=1e-06, alpha_2=1e-06, compute_score=False,
copy_X=True, fit_intercept=True, lambda_1=1e-06, lambda_2=1e-06,
n_iter=300, normalize=False, threshold_lambda=10000.0, tol=0.001,
verbose=False)
>>> clf.predict([[1, 1]])
array([ 1.])
Notes
--------
See examples/linear_model/plot_ard.py for an example.
"""
def __init__(self, n_iter=300, tol=1.e-3, alpha_1=1.e-6, alpha_2=1.e-6,
lambda_1=1.e-6, lambda_2=1.e-6, compute_score=False,
threshold_lambda=1.e+4, fit_intercept=True, normalize=False,
copy_X=True, verbose=False):
self.n_iter = n_iter
self.tol = tol
self.fit_intercept = fit_intercept
self.normalize = normalize
self.alpha_1 = alpha_1
self.alpha_2 = alpha_2
self.lambda_1 = lambda_1
self.lambda_2 = lambda_2
self.compute_score = compute_score
self.threshold_lambda = threshold_lambda
self.copy_X = copy_X
self.verbose = verbose
def fit(self, X, y):
"""Fit the ARDRegression model according to the given training data
and parameters.
Iterative procedure to maximize the evidence
Parameters
----------
X : array-like, shape = [n_samples, n_features]
Training vector, where n_samples in the number of samples and
n_features is the number of features.
y : array, shape = [n_samples]
Target values (integers)
Returns
-------
self : returns an instance of self.
"""
X, y = check_arrays(X, y, sparse_format='dense',
dtype=np.float)
n_samples, n_features = X.shape
coef_ = np.zeros(n_features)
X, y, X_mean, y_mean, X_std = self._center_data(
X, y, self.fit_intercept, self.normalize, self.copy_X)
### Launch the convergence loop
keep_lambda = np.ones(n_features, dtype=bool)
lambda_1 = self.lambda_1
lambda_2 = self.lambda_2
alpha_1 = self.alpha_1
alpha_2 = self.alpha_2
verbose = self.verbose
### Initialization of the values of the parameters
alpha_ = 1. / np.var(y)
lambda_ = np.ones(n_features)
self.scores_ = list()
coef_old_ = None
### Iterative procedure of ARDRegression
for iter_ in range(self.n_iter):
### Compute mu and sigma (using Woodbury matrix identity)
sigma_ = pinvh(np.eye(n_samples) / alpha_ +
np.dot(X[:, keep_lambda] *
np.reshape(1. / lambda_[keep_lambda], [1, -1]),
X[:, keep_lambda].T))
sigma_ = np.dot(sigma_, X[:, keep_lambda]
* np.reshape(1. / lambda_[keep_lambda], [1, -1]))
sigma_ = - np.dot(np.reshape(1. / lambda_[keep_lambda], [-1, 1])
* X[:, keep_lambda].T, sigma_)
sigma_.flat[::(sigma_.shape[1] + 1)] += 1. / lambda_[keep_lambda]
coef_[keep_lambda] = alpha_ * np.dot(
sigma_, np.dot(X[:, keep_lambda].T, y))
### Update alpha and lambda
rmse_ = np.sum((y - np.dot(X, coef_)) ** 2)
gamma_ = 1. - lambda_[keep_lambda] * np.diag(sigma_)
lambda_[keep_lambda] = ((gamma_ + 2. * lambda_1)
/ ((coef_[keep_lambda]) ** 2
+ 2. * lambda_2))
alpha_ = ((n_samples - gamma_.sum() + 2. * alpha_1)
/ (rmse_ + 2. * alpha_2))
### Prune the weights with a precision over a threshold
keep_lambda = lambda_ < self.threshold_lambda
coef_[~keep_lambda] = 0
### Compute the objective function
if self.compute_score:
s = (lambda_1 * np.log(lambda_) - lambda_2 * lambda_).sum()
s += alpha_1 * log(alpha_) - alpha_2 * alpha_
s += 0.5 * (fast_logdet(sigma_) + n_samples * log(alpha_)
+ np.sum(np.log(lambda_)))
s -= 0.5 * (alpha_ * rmse_ + (lambda_ * coef_ ** 2).sum())
self.scores_.append(s)
### Check for convergence
if iter_ > 0 and np.sum(np.abs(coef_old_ - coef_)) < self.tol:
if verbose:
print("Converged after %s iterations" % iter_)
break
coef_old_ = np.copy(coef_)
self.coef_ = coef_
self.alpha_ = alpha_
self.sigma_ = sigma_
self._set_intercept(X_mean, y_mean, X_std)
return self<|fim▁end|> |
Examples
--------
>>> from sklearn import linear_model |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Maked by Mr. Have fun! Version 0.2
# Shadow Weapon Coupons contributed by BiTi for the Official L2J Datapack Project
# Visit http://forum.l2jdp.com for more details
import sys
from com.l2scoria.gameserver.model.quest import State
from com.l2scoria.gameserver.model.quest import QuestState
from com.l2scoria.gameserver.model.quest.jython import QuestJython as JQuest
qn = "404_PathToWizard"
MAP_OF_LUSTER = 1280
KEY_OF_FLAME = 1281
FLAME_EARING = 1282
BROKEN_BRONZE_MIRROR = 1283
WIND_FEATHER = 1284
WIND_BANGEL = 1285
RAMAS_DIARY = 1286
SPARKLE_PEBBLE = 1287
WATER_NECKLACE = 1288
RUST_GOLD_COIN = 1289
RED_SOIL = 1290
EARTH_RING = 1291
BEAD_OF_SEASON = 1292
class Quest (JQuest) :
def __init__(self,id,name,descr): JQuest.__init__(self,id,name,descr)
def onEvent (self,event,st) :
htmltext = event
player = st.getPlayer()
if event == "1" :
st.set("id","0")
if player.getClassId().getId() == 0x0a :
if player.getLevel() >= 19 :
if st.getQuestItemsCount(BEAD_OF_SEASON) :
htmltext = "30391-03.htm"
else:
htmltext = "30391-08.htm"
st.set("cond","1")
st.setState(STARTED)
st.playSound("ItemSound.quest_accept")
else:
htmltext = "30391-02.htm"
else:
if player.getClassId().getId() == 0x0b :
htmltext = "30391-02a.htm"
else:
htmltext = "30391-01.htm"
elif event == "30410_1" :
if st.getQuestItemsCount(WIND_FEATHER) == 0 :
htmltext = "30410-03.htm"
st.giveItems(WIND_FEATHER,1)
st.set("cond","6")
return htmltext
def onTalk (self,npc,player):
htmltext = "<html><body>You are either not carrying out your quest or don't meet the criteria.</body></html>"
st = player.getQuestState(qn)
if not st : return htmltext
npcId = npc.getNpcId()
id = st.getState()
if npcId != 30391 and id != STARTED : return htmltext
if id == CREATED :
st.setState(STARTING)
st.set("cond","0")
st.set("onlyone","0")
st.set("id","0")
if npcId == 30391 and st.getInt("cond")==0 :
#Talking to Parina before completing this quest
if st.getInt("cond")<15 :
htmltext = "30391-04.htm"
return htmltext
else:
htmltext = "30391-04.htm"
elif npcId == 30391 and st.getInt("cond")!=0 and (st.getQuestItemsCount(FLAME_EARING)==0 or st.getQuestItemsCount(WIND_BANGEL)==0 or st.getQuestItemsCount(WATER_NECKLACE)==0 or st.getQuestItemsCount(EARTH_RING)==0) :
htmltext = "30391-05.htm"
elif npcId == 30411 and st.getInt("cond")!=0 and st.getQuestItemsCount(MAP_OF_LUSTER)==0 and st.getQuestItemsCount(FLAME_EARING)==0 :
#Taking to the Flame salamander for the first time
#gains us the MAP_OF_LUSTER
#and flags cond = 2
if st.getQuestItemsCount(MAP_OF_LUSTER) == 0 :
st.giveItems(MAP_OF_LUSTER,1)
htmltext = "30411-01.htm"
st.set("cond","2")
elif npcId == 30411 and st.getInt("cond")!=0 and st.getQuestItemsCount(MAP_OF_LUSTER)!=0 and st.getQuestItemsCount(KEY_OF_FLAME)==0 :
#Talking to the Flame Salamander more than once
#without the KEY_OF_FLAME
#But with the MAP_OF_LUSTER
#results in the following text
htmltext = "30411-02.htm"
elif npcId == 30411 and st.getInt("cond")!=0 and st.getQuestItemsCount(MAP_OF_LUSTER)!=0 and st.getQuestItemsCount(KEY_OF_FLAME)!=0 :
#Talking to the Flame Salamander when Cond != 0
#while we have a KEY_OF_FLAME from the ratmen and the MAP_OF_LUSTER
#Remove both Items and give a FLAME_EARING
#Set the cond flag to 4 to signify we have completed the first part
st.takeItems(KEY_OF_FLAME,st.getQuestItemsCount(KEY_OF_FLAME))
st.takeItems(MAP_OF_LUSTER,st.getQuestItemsCount(MAP_OF_LUSTER))
if st.getQuestItemsCount(FLAME_EARING) == 0 :
st.giveItems(FLAME_EARING,1)
htmltext = "30411-03.htm"
st.set("cond","4")
elif npcId == 30411 and st.getInt("cond")!=0 and st.getQuestItemsCount(FLAME_EARING)!=0 :
#Talking to the Flame Salamander
#after finishing the Fire component results
#in the following text
htmltext = "30411-04.htm"
elif npcId == 30412 and st.getInt("cond")!=0 and st.getQuestItemsCount(FLAME_EARING)!=0 and st.getQuestItemsCount(BROKEN_BRONZE_MIRROR)==0 and st.getQuestItemsCount(WIND_BANGEL)==0 :
#Talking to the Wind Sylph for the first time
#With a FLAME_EARING (fire component complete)
#Gives us a BROKEN_BRONZE_MIRROR
#and sets cond = 5
if st.getQuestItemsCount(BROKEN_BRONZE_MIRROR) == 0 :
st.giveItems(BROKEN_BRONZE_MIRROR,1)
htmltext = "30412-01.htm"
st.set("cond","5")
elif npcId == 30412 and st.getInt("cond")!=0 and st.getQuestItemsCount(BROKEN_BRONZE_MIRROR)!=0 and st.getQuestItemsCount(WIND_FEATHER)==0 :
#Talking to the Wind Sylph for a second time
#results in the following text
htmltext = "30412-02.htm"<|fim▁hole|> #while having a BROKEN_BRONZE_MIRROR and a WIND_FEATHER
#Removes both items
#Gives a WIND_BANGEL
#and sets cond = 7
st.takeItems(WIND_FEATHER,st.getQuestItemsCount(WIND_FEATHER))
st.takeItems(BROKEN_BRONZE_MIRROR,st.getQuestItemsCount(BROKEN_BRONZE_MIRROR))
if st.getQuestItemsCount(WIND_BANGEL) == 0 :
st.giveItems(WIND_BANGEL,1)
htmltext = "30412-03.htm"
st.set("cond","7")
elif npcId == 30412 and st.getInt("cond")!=0 and st.getQuestItemsCount(WIND_BANGEL)!=0 :
#Talking to the Wind Sylph after we get the WIND_BANGLE
#results in the following text
htmltext = "30412-04.htm"
elif npcId == 30410 and st.getInt("cond")!=0 and st.getQuestItemsCount(BROKEN_BRONZE_MIRROR)!=0 and st.getQuestItemsCount(WIND_FEATHER)==0 :
#Talking to the Lizardman of the Wastelands for the first time
#begins this conversation
htmltext = "30410-01.htm"
elif npcId == 30410 and st.getInt("cond")!=0 and st.getQuestItemsCount(BROKEN_BRONZE_MIRROR)!=0 and st.getQuestItemsCount(WIND_FEATHER)!=0 :
#Talking to the Lizardman of the Wastelands after obtaining
#the WIND_FEATHER
htmltext = "30410-04.htm"
elif npcId == 30413 and st.getInt("cond")!=0 and st.getQuestItemsCount(WIND_BANGEL)!=0 and st.getQuestItemsCount(RAMAS_DIARY)==0 and st.getQuestItemsCount(WATER_NECKLACE)==0 :
#Talking to the Water Undine for the first time
#gives RAMAS_DIARY
#and sets cond = 8
if st.getQuestItemsCount(RAMAS_DIARY) == 0 :
st.giveItems(RAMAS_DIARY,1)
htmltext = "30413-01.htm"
st.set("cond","8")
elif npcId == 30413 and st.getInt("cond")!=0 and st.getQuestItemsCount(RAMAS_DIARY)!=0 and st.getQuestItemsCount(SPARKLE_PEBBLE)<2 :
#Talking to the Water Undine for a second time
#without 2 SPARKLE_PEBLE
htmltext = "30413-02.htm"
elif npcId == 30413 and st.getInt("cond")!=0 and st.getQuestItemsCount(RAMAS_DIARY)!=0 and st.getQuestItemsCount(SPARKLE_PEBBLE)>=2 :
#Talking to the Water Undine with the 2 SPARKLE_PEBLE
#removes both items
#and gives WATER_NECKLACE
#sets cond = 10
st.takeItems(SPARKLE_PEBBLE,st.getQuestItemsCount(SPARKLE_PEBBLE))
st.takeItems(RAMAS_DIARY,st.getQuestItemsCount(RAMAS_DIARY))
if st.getQuestItemsCount(WATER_NECKLACE) == 0 :
st.giveItems(WATER_NECKLACE,1)
htmltext = "30413-03.htm"
st.set("cond","10")
elif npcId == 30413 and st.getInt("cond")!=0 and st.getQuestItemsCount(WATER_NECKLACE)!=0 :
#Talking to the Water Undine after completing it's task
htmltext = "30413-04.htm"
elif npcId == 30409 and st.getInt("cond")!=0 and st.getQuestItemsCount(WATER_NECKLACE)!=0 and st.getQuestItemsCount(RUST_GOLD_COIN)==0 and st.getQuestItemsCount(EARTH_RING)==0 :
#Talking to the Earth Snake for the first time
if st.getQuestItemsCount(RUST_GOLD_COIN) == 0 :
st.giveItems(RUST_GOLD_COIN,1)
htmltext = "30409-01.htm"
st.set("cond","11")
elif npcId == 30409 and st.getInt("cond")!=0 and st.getQuestItemsCount(RUST_GOLD_COIN)!=0 and st.getQuestItemsCount(RED_SOIL)==0 :
#Talking to the Earth Snake for a second time
#without RED_SOIL
htmltext = "30409-02.htm"
elif npcId == 30409 and st.getInt("cond")!=0 and st.getQuestItemsCount(RUST_GOLD_COIN)!=0 and st.getQuestItemsCount(RED_SOIL)!=0 :
#Talking to the Earth Snake afket collecting the RED_SOIL
#Gives EARTH_RING
#and sets cond = 13
st.takeItems(RED_SOIL,st.getQuestItemsCount(RED_SOIL))
st.takeItems(RUST_GOLD_COIN,st.getQuestItemsCount(RUST_GOLD_COIN))
if st.getQuestItemsCount(EARTH_RING) == 0 :
st.giveItems(EARTH_RING,1)
htmltext = "30409-03.htm"
st.set("cond","13")
elif npcId == 30409 and st.getInt("cond")!=0 and st.getQuestItemsCount(EARTH_RING)!=0 :
#Talking to the Earth Snake after completing his task
htmltext = "30409-03.htm"
elif npcId == 30391 and st.getInt("cond")!=0 and st.getQuestItemsCount(FLAME_EARING)!=0 and st.getQuestItemsCount(WIND_BANGEL)!=0 and st.getQuestItemsCount(WATER_NECKLACE)!=0 and st.getQuestItemsCount(EARTH_RING)!=0 :
#Talking to Parina after gathering all 4 tokens
#Gains BEAD_OF_SEASON
#Resets cond so these NPC's will no longer speak to you
#and Sets the quest as completed
st.takeItems(FLAME_EARING,st.getQuestItemsCount(FLAME_EARING))
st.takeItems(WIND_BANGEL,st.getQuestItemsCount(WIND_BANGEL))
st.takeItems(WATER_NECKLACE,st.getQuestItemsCount(WATER_NECKLACE))
st.takeItems(EARTH_RING,st.getQuestItemsCount(EARTH_RING))
st.set("cond","0")
st.setState(COMPLETED)
st.playSound("ItemSound.quest_finish")
if st.getQuestItemsCount(BEAD_OF_SEASON) == 0 :
st.giveItems(BEAD_OF_SEASON,1)
htmltext = "30391-06.htm"
return htmltext
def onKill(self,npc,player,isPet):
st = player.getQuestState(qn)
if not st : return
if st.getState() != STARTED : return
npcId = npc.getNpcId()
if npcId == 20359 : #Ratman Warrior, as of C3.
st.set("id","0")
#Only get a KEY_OF_FLAME if we are on the quest for the Fire Salamander
if st.getInt("cond") == 2 :
st.giveItems(KEY_OF_FLAME,1)
st.playSound("ItemSound.quest_middle")
#Increase the Cond so we can only get one key
st.set("cond","3")
elif npcId == 27030 : #water seer
st.set("id","0")
#Only get a SPARKLE_PEBBLE if we are on the quest for the Water Undine
if st.getInt("cond") == 8 and st.getQuestItemsCount(SPARKLE_PEBBLE) < 2:
st.giveItems(SPARKLE_PEBBLE,1)
if st.getQuestItemsCount(SPARKLE_PEBBLE) == 2 :
st.playSound("ItemSound.quest_middle")
st.set("cond","9")
else:
st.playSound("ItemSound.quest_itemget")
elif npcId == 20021 : #Red Bear
st.set("id","0")
#Only get a RED_SOIL if we are on the quest for the Earth Snake
if st.getInt("cond") == 11 :
st.giveItems(RED_SOIL,1)
st.playSound("ItemSound.quest_middle")
st.set("cond","12")
return
QUEST = Quest(404,qn,"Path To Wizard")
CREATED = State('Start', QUEST)
STARTING = State('Starting', QUEST)
STARTED = State('Started', QUEST)
COMPLETED = State('Completed', QUEST)
QUEST.setInitialState(CREATED)
QUEST.addStartNpc(30391)
QUEST.addTalkId(30391)
QUEST.addTalkId(30409)
QUEST.addTalkId(30410)
QUEST.addTalkId(30411)
QUEST.addTalkId(30412)
QUEST.addTalkId(30413)
QUEST.addKillId(20021)
QUEST.addKillId(20359)
QUEST.addKillId(27030)
STARTED.addQuestDrop(20359,KEY_OF_FLAME,1)
STARTED.addQuestDrop(30411,MAP_OF_LUSTER,1)
STARTED.addQuestDrop(30410,WIND_FEATHER,1)
STARTED.addQuestDrop(30412,BROKEN_BRONZE_MIRROR,1)
STARTED.addQuestDrop(27030,SPARKLE_PEBBLE,1)
STARTED.addQuestDrop(30413,RAMAS_DIARY,1)
STARTED.addQuestDrop(20021,RED_SOIL,1)
STARTED.addQuestDrop(30409,RUST_GOLD_COIN,1)
STARTED.addQuestDrop(30411,FLAME_EARING,1)
STARTED.addQuestDrop(30412,WIND_BANGEL,1)
STARTED.addQuestDrop(30413,WATER_NECKLACE,1)
STARTED.addQuestDrop(30409,EARTH_RING,1)<|fim▁end|> | elif npcId == 30412 and st.getInt("cond")!=0 and st.getQuestItemsCount(BROKEN_BRONZE_MIRROR)!=0 and st.getQuestItemsCount(WIND_FEATHER)!=0 :
#Talking to the Wind Sylph with cond != 0 |
<|file_name|>generic.py<|end_file_name|><|fim▁begin|># (void)walker CPU architecture support
# Copyright (C) 2013 David Holm <[email protected]>
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by<|fim▁hole|># This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from ...framework.platform import Architecture
from ...framework.platform import Cpu
from ...framework.platform import Register
from ...framework.platform import register_cpu
@register_cpu
class GenericCpu(Cpu):
def __init__(self, cpu_factory, registers):
for group, register_list in registers.iteritems():
registers[group] = [Register(x) for x in register_list]
super(GenericCpu, self).__init__(cpu_factory, registers)
@classmethod
def architecture(cls):
return Architecture.Generic
def stack_pointer(self):
return self.register('sp')
def program_counter(self):
return self.register('pc')<|fim▁end|> | # the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
# |
<|file_name|>virt_pool.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2015, Maciej Delmanowski <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: virt_pool
author: "Maciej Delmanowski (@drybjed)"
version_added: "2.0"
short_description: Manage libvirt storage pools
description:
- Manage I(libvirt) storage pools.
options:
name:
required: false
aliases: [ "pool" ]
description:
- name of the storage pool being managed. Note that pool must be previously
defined with xml.
state:
required: false
choices: [ "active", "inactive", "present", "absent", "undefined", "deleted" ]
description:
- specify which state you want a storage pool to be in.
If 'active', pool will be started.
If 'present', ensure that pool is present but do not change its
state; if it's missing, you need to specify xml argument.
If 'inactive', pool will be stopped.
If 'undefined' or 'absent', pool will be removed from I(libvirt) configuration.
If 'deleted', pool contents will be deleted and then pool undefined.
command:
required: false
choices: [ "define", "build", "create", "start", "stop", "destroy",
"delete", "undefine", "get_xml", "list_pools", "facts",
"info", "status" ]
description:
- in addition to state management, various non-idempotent commands are available.
See examples.
autostart:
required: false
type: bool
description:
- Specify if a given storage pool should be started automatically on system boot.
uri:
required: false
default: "qemu:///system"
description:
- I(libvirt) connection uri.
xml:
required: false
description:
- XML document used with the define command.
mode:
required: false
choices: [ 'new', 'repair', 'resize', 'no_overwrite', 'overwrite', 'normal', 'zeroed' ]
description:
- Pass additional parameters to 'build' or 'delete' commands.
requirements:
- "python >= 2.6"
- "python-libvirt"
- "python-lxml"
'''
EXAMPLES = '''
# Define a new storage pool
- virt_pool:
command: define
name: vms
xml: '{{ lookup("template", "pool/dir.xml.j2") }}'
# Build a storage pool if it does not exist
- virt_pool:
command: build
name: vms
# Start a storage pool
- virt_pool:
command: create
name: vms
# List available pools
- virt_pool:
command: list_pools
# Get XML data of a specified pool
- virt_pool:
command: get_xml
name: vms
# Stop a storage pool
- virt_pool:
command: destroy
name: vms
# Delete a storage pool (destroys contents)
- virt_pool:
command: delete
name: vms
# Undefine a storage pool
- virt_pool:
command: undefine
name: vms
# Gather facts about storage pools
# Facts will be available as 'ansible_libvirt_pools'
- virt_pool:
command: facts
# Gather information about pools managed by 'libvirt' remotely using uri
- virt_pool:
command: info
uri: '{{ item }}'
with_items: '{{ libvirt_uris }}'
register: storage_pools
# Ensure that a pool is active (needs to be defined and built first)
- virt_pool:
state: active
name: vms
# Ensure that a pool is inactive
- virt_pool:
state: inactive
name: vms
# Ensure that a given pool will be started at boot
- virt_pool:
autostart: yes
name: vms
# Disable autostart for a given pool
- virt_pool:
autostart: no
name: vms
'''
try:
import libvirt
except ImportError:
HAS_VIRT = False
else:
HAS_VIRT = True
try:
from lxml import etree
except ImportError:
HAS_XML = False
else:
HAS_XML = True
from ansible.module_utils.basic import AnsibleModule
VIRT_FAILED = 1
VIRT_SUCCESS = 0
VIRT_UNAVAILABLE = 2
ALL_COMMANDS = []
ENTRY_COMMANDS = ['create', 'status', 'start', 'stop', 'build', 'delete',
'undefine', 'destroy', 'get_xml', 'define', 'refresh']
HOST_COMMANDS = ['list_pools', 'facts', 'info']
ALL_COMMANDS.extend(ENTRY_COMMANDS)
ALL_COMMANDS.extend(HOST_COMMANDS)
ENTRY_STATE_ACTIVE_MAP = {
0: "inactive",
1: "active"
}
ENTRY_STATE_AUTOSTART_MAP = {
0: "no",
1: "yes"
}
ENTRY_STATE_PERSISTENT_MAP = {
0: "no",
1: "yes"
}
ENTRY_STATE_INFO_MAP = {
0: "inactive",
1: "building",
2: "running",
3: "degraded",
4: "inaccessible"
}
ENTRY_BUILD_FLAGS_MAP = {
"new": 0,
"repair": 1,
"resize": 2,
"no_overwrite": 4,
"overwrite": 8
}
ENTRY_DELETE_FLAGS_MAP = {
"normal": 0,
"zeroed": 1
}
ALL_MODES = []
ALL_MODES.extend(ENTRY_BUILD_FLAGS_MAP.keys())
ALL_MODES.extend(ENTRY_DELETE_FLAGS_MAP.keys())
class EntryNotFound(Exception):
pass
class LibvirtConnection(object):
def __init__(self, uri, module):
self.module = module
conn = libvirt.open(uri)
if not conn:
raise Exception("hypervisor connection failure")
self.conn = conn
def find_entry(self, entryid):
# entryid = -1 returns a list of everything
results = []
# Get active entries
for name in self.conn.listStoragePools():
entry = self.conn.storagePoolLookupByName(name)
results.append(entry)
# Get inactive entries
for name in self.conn.listDefinedStoragePools():
entry = self.conn.storagePoolLookupByName(name)
results.append(entry)
if entryid == -1:
return results
for entry in results:
if entry.name() == entryid:
return entry
raise EntryNotFound("storage pool %s not found" % entryid)
def create(self, entryid):
if not self.module.check_mode:
return self.find_entry(entryid).create()
else:
try:
state = self.find_entry(entryid).isActive()
except Exception:
return self.module.exit_json(changed=True)
if not state:
return self.module.exit_json(changed=True)
def destroy(self, entryid):
if not self.module.check_mode:
return self.find_entry(entryid).destroy()
else:
if self.find_entry(entryid).isActive():
return self.module.exit_json(changed=True)
def undefine(self, entryid):
if not self.module.check_mode:
return self.find_entry(entryid).undefine()
else:
if not self.find_entry(entryid):
return self.module.exit_json(changed=True)
def get_status2(self, entry):
state = entry.isActive()
return ENTRY_STATE_ACTIVE_MAP.get(state, "unknown")
def get_status(self, entryid):
if not self.module.check_mode:
state = self.find_entry(entryid).isActive()
return ENTRY_STATE_ACTIVE_MAP.get(state, "unknown")
else:
try:
state = self.find_entry(entryid).isActive()
return ENTRY_STATE_ACTIVE_MAP.get(state, "unknown")
except Exception:
return ENTRY_STATE_ACTIVE_MAP.get("inactive", "unknown")
def get_uuid(self, entryid):
return self.find_entry(entryid).UUIDString()
def get_xml(self, entryid):
return self.find_entry(entryid).XMLDesc(0)
def get_info(self, entryid):
return self.find_entry(entryid).info()
def get_volume_count(self, entryid):
return self.find_entry(entryid).numOfVolumes()
def get_volume_names(self, entryid):
return self.find_entry(entryid).listVolumes()
def get_devices(self, entryid):
xml = etree.fromstring(self.find_entry(entryid).XMLDesc(0))
if xml.xpath('/pool/source/device'):
result = []
for device in xml.xpath('/pool/source/device'):
result.append(device.get('path'))
try:
return result
except Exception:
raise ValueError('No devices specified')
def get_format(self, entryid):
xml = etree.fromstring(self.find_entry(entryid).XMLDesc(0))
try:
result = xml.xpath('/pool/source/format')[0].get('type')
except Exception:
raise ValueError('Format not specified')
return result
def get_host(self, entryid):
xml = etree.fromstring(self.find_entry(entryid).XMLDesc(0))
try:
result = xml.xpath('/pool/source/host')[0].get('name')
except Exception:
raise ValueError('Host not specified')
return result
def get_source_path(self, entryid):
xml = etree.fromstring(self.find_entry(entryid).XMLDesc(0))
try:
result = xml.xpath('/pool/source/dir')[0].get('path')
except Exception:
raise ValueError('Source path not specified')
return result
def get_path(self, entryid):
xml = etree.fromstring(self.find_entry(entryid).XMLDesc(0))
return xml.xpath('/pool/target/path')[0].text
def get_type(self, entryid):
xml = etree.fromstring(self.find_entry(entryid).XMLDesc(0))
return xml.get('type')
def build(self, entryid, flags):
if not self.module.check_mode:
return self.find_entry(entryid).build(flags)<|fim▁hole|> try:
state = self.find_entry(entryid)
except Exception:
return self.module.exit_json(changed=True)
if not state:
return self.module.exit_json(changed=True)
def delete(self, entryid, flags):
if not self.module.check_mode:
return self.find_entry(entryid).delete(flags)
else:
try:
state = self.find_entry(entryid)
except Exception:
return self.module.exit_json(changed=True)
if state:
return self.module.exit_json(changed=True)
def get_autostart(self, entryid):
state = self.find_entry(entryid).autostart()
return ENTRY_STATE_AUTOSTART_MAP.get(state, "unknown")
def get_autostart2(self, entryid):
if not self.module.check_mode:
return self.find_entry(entryid).autostart()
else:
try:
return self.find_entry(entryid).autostart()
except Exception:
return self.module.exit_json(changed=True)
def set_autostart(self, entryid, val):
if not self.module.check_mode:
return self.find_entry(entryid).setAutostart(val)
else:
try:
state = self.find_entry(entryid).autostart()
except Exception:
return self.module.exit_json(changed=True)
if bool(state) != val:
return self.module.exit_json(changed=True)
def refresh(self, entryid):
return self.find_entry(entryid).refresh()
def get_persistent(self, entryid):
state = self.find_entry(entryid).isPersistent()
return ENTRY_STATE_PERSISTENT_MAP.get(state, "unknown")
def define_from_xml(self, entryid, xml):
if not self.module.check_mode:
return self.conn.storagePoolDefineXML(xml)
else:
try:
self.find_entry(entryid)
except Exception:
return self.module.exit_json(changed=True)
class VirtStoragePool(object):
def __init__(self, uri, module):
self.module = module
self.uri = uri
self.conn = LibvirtConnection(self.uri, self.module)
def get_pool(self, entryid):
return self.conn.find_entry(entryid)
def list_pools(self, state=None):
results = []
for entry in self.conn.find_entry(-1):
if state:
if state == self.conn.get_status2(entry):
results.append(entry.name())
else:
results.append(entry.name())
return results
def state(self):
results = []
for entry in self.list_pools():
state_blurb = self.conn.get_status(entry)
results.append("%s %s" % (entry, state_blurb))
return results
def autostart(self, entryid):
return self.conn.set_autostart(entryid, True)
def get_autostart(self, entryid):
return self.conn.get_autostart2(entryid)
def set_autostart(self, entryid, state):
return self.conn.set_autostart(entryid, state)
def create(self, entryid):
return self.conn.create(entryid)
def start(self, entryid):
return self.conn.create(entryid)
def stop(self, entryid):
return self.conn.destroy(entryid)
def destroy(self, entryid):
return self.conn.destroy(entryid)
def undefine(self, entryid):
return self.conn.undefine(entryid)
def status(self, entryid):
return self.conn.get_status(entryid)
def get_xml(self, entryid):
return self.conn.get_xml(entryid)
def define(self, entryid, xml):
return self.conn.define_from_xml(entryid, xml)
def build(self, entryid, flags):
return self.conn.build(entryid, ENTRY_BUILD_FLAGS_MAP.get(flags, 0))
def delete(self, entryid, flags):
return self.conn.delete(entryid, ENTRY_DELETE_FLAGS_MAP.get(flags, 0))
def refresh(self, entryid):
return self.conn.refresh(entryid)
def info(self):
return self.facts(facts_mode='info')
def facts(self, facts_mode='facts'):
results = dict()
for entry in self.list_pools():
results[entry] = dict()
if self.conn.find_entry(entry):
data = self.conn.get_info(entry)
# libvirt returns maxMem, memory, and cpuTime as long()'s, which
# xmlrpclib tries to convert to regular int's during serialization.
# This throws exceptions, so convert them to strings here and
# assume the other end of the xmlrpc connection can figure things
# out or doesn't care.
results[entry] = {
"status": ENTRY_STATE_INFO_MAP.get(data[0], "unknown"),
"size_total": str(data[1]),
"size_used": str(data[2]),
"size_available": str(data[3]),
}
results[entry]["autostart"] = self.conn.get_autostart(entry)
results[entry]["persistent"] = self.conn.get_persistent(entry)
results[entry]["state"] = self.conn.get_status(entry)
results[entry]["path"] = self.conn.get_path(entry)
results[entry]["type"] = self.conn.get_type(entry)
results[entry]["uuid"] = self.conn.get_uuid(entry)
if self.conn.find_entry(entry).isActive():
results[entry]["volume_count"] = self.conn.get_volume_count(entry)
results[entry]["volumes"] = list()
for volume in self.conn.get_volume_names(entry):
results[entry]["volumes"].append(volume)
else:
results[entry]["volume_count"] = -1
try:
results[entry]["host"] = self.conn.get_host(entry)
except ValueError:
pass
try:
results[entry]["source_path"] = self.conn.get_source_path(entry)
except ValueError:
pass
try:
results[entry]["format"] = self.conn.get_format(entry)
except ValueError:
pass
try:
devices = self.conn.get_devices(entry)
results[entry]["devices"] = devices
except ValueError:
pass
else:
results[entry]["state"] = self.conn.get_status(entry)
facts = dict()
if facts_mode == 'facts':
facts["ansible_facts"] = dict()
facts["ansible_facts"]["ansible_libvirt_pools"] = results
elif facts_mode == 'info':
facts['pools'] = results
return facts
def core(module):
state = module.params.get('state', None)
name = module.params.get('name', None)
command = module.params.get('command', None)
uri = module.params.get('uri', None)
xml = module.params.get('xml', None)
autostart = module.params.get('autostart', None)
mode = module.params.get('mode', None)
v = VirtStoragePool(uri, module)
res = {}
if state and command == 'list_pools':
res = v.list_pools(state=state)
if not isinstance(res, dict):
res = {command: res}
return VIRT_SUCCESS, res
if state:
if not name:
module.fail_json(msg="state change requires a specified name")
res['changed'] = False
if state in ['active']:
if v.status(name) != 'active':
res['changed'] = True
res['msg'] = v.start(name)
elif state in ['present']:
try:
v.get_pool(name)
except EntryNotFound:
if not xml:
module.fail_json(msg="storage pool '" + name + "' not present, but xml not specified")
v.define(name, xml)
res = {'changed': True, 'created': name}
elif state in ['inactive']:
entries = v.list_pools()
if name in entries:
if v.status(name) != 'inactive':
res['changed'] = True
res['msg'] = v.destroy(name)
elif state in ['undefined', 'absent']:
entries = v.list_pools()
if name in entries:
if v.status(name) != 'inactive':
v.destroy(name)
res['changed'] = True
res['msg'] = v.undefine(name)
elif state in ['deleted']:
entries = v.list_pools()
if name in entries:
if v.status(name) != 'inactive':
v.destroy(name)
v.delete(name, mode)
res['changed'] = True
res['msg'] = v.undefine(name)
else:
module.fail_json(msg="unexpected state")
return VIRT_SUCCESS, res
if command:
if command in ENTRY_COMMANDS:
if not name:
module.fail_json(msg="%s requires 1 argument: name" % command)
if command == 'define':
if not xml:
module.fail_json(msg="define requires xml argument")
try:
v.get_pool(name)
except EntryNotFound:
v.define(name, xml)
res = {'changed': True, 'created': name}
return VIRT_SUCCESS, res
elif command == 'build':
res = v.build(name, mode)
if not isinstance(res, dict):
res = {'changed': True, command: res}
return VIRT_SUCCESS, res
elif command == 'delete':
res = v.delete(name, mode)
if not isinstance(res, dict):
res = {'changed': True, command: res}
return VIRT_SUCCESS, res
res = getattr(v, command)(name)
if not isinstance(res, dict):
res = {command: res}
return VIRT_SUCCESS, res
elif hasattr(v, command):
res = getattr(v, command)()
if not isinstance(res, dict):
res = {command: res}
return VIRT_SUCCESS, res
else:
module.fail_json(msg="Command %s not recognized" % command)
if autostart is not None:
if not name:
module.fail_json(msg="state change requires a specified name")
res['changed'] = False
if autostart:
if not v.get_autostart(name):
res['changed'] = True
res['msg'] = v.set_autostart(name, True)
else:
if v.get_autostart(name):
res['changed'] = True
res['msg'] = v.set_autostart(name, False)
return VIRT_SUCCESS, res
module.fail_json(msg="expected state or command parameter to be specified")
def main():
module = AnsibleModule(
argument_spec=dict(
name=dict(aliases=['pool']),
state=dict(choices=['active', 'inactive', 'present', 'absent', 'undefined', 'deleted']),
command=dict(choices=ALL_COMMANDS),
uri=dict(default='qemu:///system'),
xml=dict(),
autostart=dict(type='bool'),
mode=dict(choices=ALL_MODES),
),
supports_check_mode=True
)
if not HAS_VIRT:
module.fail_json(
msg='The `libvirt` module is not importable. Check the requirements.'
)
if not HAS_XML:
module.fail_json(
msg='The `lxml` module is not importable. Check the requirements.'
)
rc = VIRT_SUCCESS
try:
rc, result = core(module)
except Exception as e:
module.fail_json(msg=str(e))
if rc != 0: # something went wrong emit the msg
module.fail_json(rc=rc, msg=result)
else:
module.exit_json(**result)
if __name__ == '__main__':
main()<|fim▁end|> | else: |
<|file_name|>postProcessor.py<|end_file_name|><|fim▁begin|># Author: Nic Wolfe <[email protected]>
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of Sick Beard.
#
# Sick Beard is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Sick Beard is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Sick Beard. If not, see <http://www.gnu.org/licenses/>.
from __future__ import with_statement
import glob
import os
import re
import shlex
import subprocess
import stat
import sickbeard
from sickbeard import db
from sickbeard import classes
from sickbeard import common
from sickbeard import exceptions
from sickbeard import helpers
from sickbeard import history
from sickbeard import logger
from sickbeard import notifiers
from sickbeard import show_name_helpers
from sickbeard import scene_exceptions
from sickbeard import encodingKludge as ek
from sickbeard.exceptions import ex
from sickbeard.name_parser.parser import NameParser, InvalidNameException
from lib.tvdb_api import tvdb_api, tvdb_exceptions
class PostProcessor(object):
"""
A class which will process a media file according to the post processing settings in the config.
"""
EXISTS_LARGER = 1
EXISTS_SAME = 2
EXISTS_SMALLER = 3
DOESNT_EXIST = 4
IGNORED_FILESTRINGS = [ "/.AppleDouble/", ".DS_Store" ]
NZB_NAME = 1
FOLDER_NAME = 2
FILE_NAME = 3
def __init__(self, file_path, nzb_name = None):
"""
Creates a new post processor with the given file path and optionally an NZB name.
file_path: The path to the file to be processed
nzb_name: The name of the NZB which resulted in this file being downloaded (optional)
"""
# absolute path to the folder that is being processed
self.folder_path = ek.ek(os.path.dirname, ek.ek(os.path.abspath, file_path))
# full path to file
self.file_path = file_path
# file name only
self.file_name = ek.ek(os.path.basename, file_path)
# the name of the folder only
self.folder_name = ek.ek(os.path.basename, self.folder_path)
# name of the NZB that resulted in this folder
self.nzb_name = nzb_name
self.in_history = False
self.release_group = None
self.is_proper = False
self.good_results = {self.NZB_NAME: False,
self.FOLDER_NAME: False,
self.FILE_NAME: False}
self.log = ''
def _log(self, message, level=logger.MESSAGE):
"""
A wrapper for the internal logger which also keeps track of messages and saves them to a string for later.
message: The string to log (unicode)
level: The log level to use (optional)
"""
logger.log(message, level)
self.log += message + '\n'
def _checkForExistingFile(self, existing_file):
"""
Checks if a file exists already and if it does whether it's bigger or smaller than
the file we are post processing
existing_file: The file to compare to
Returns:
DOESNT_EXIST if the file doesn't exist
EXISTS_LARGER if the file exists and is larger than the file we are post processing
EXISTS_SMALLER if the file exists and is smaller than the file we are post processing
EXISTS_SAME if the file exists and is the same size as the file we are post processing
"""
if not existing_file:
self._log(u"There is no existing file so there's no worries about replacing it", logger.DEBUG)
return PostProcessor.DOESNT_EXIST
# if the new file exists, return the appropriate code depending on the size
if ek.ek(os.path.isfile, existing_file):
# see if it's bigger than our old file
if ek.ek(os.path.getsize, existing_file) > ek.ek(os.path.getsize, self.file_path):
self._log(u"File "+existing_file+" is larger than "+self.file_path, logger.DEBUG)
return PostProcessor.EXISTS_LARGER
elif ek.ek(os.path.getsize, existing_file) == ek.ek(os.path.getsize, self.file_path):
self._log(u"File "+existing_file+" is the same size as "+self.file_path, logger.DEBUG)
return PostProcessor.EXISTS_SAME
else:
self._log(u"File "+existing_file+" is smaller than "+self.file_path, logger.DEBUG)
return PostProcessor.EXISTS_SMALLER
else:
self._log(u"File "+existing_file+" doesn't exist so there's no worries about replacing it", logger.DEBUG)
return PostProcessor.DOESNT_EXIST
def _list_associated_files(self, file_path, subtitles_only=False):
"""
For a given file path searches for files with the same name but different extension and returns their absolute paths
file_path: The file to check for associated files
Returns: A list containing all files which are associated to the given file
"""
if not file_path:
return []
file_path_list = []
base_name = file_path.rpartition('.')[0]+'.'
# don't strip it all and use cwd by accident
if not base_name:
return []
# don't confuse glob with chars we didn't mean to use
base_name = re.sub(r'[\[\]\*\?]', r'[\g<0>]', base_name)
for associated_file_path in ek.ek(glob.glob, base_name+'*'):
# only add associated to list
if associated_file_path == file_path:
continue
# only list it if the only non-shared part is the extension or if it is a subtitle
if '.' in associated_file_path[len(base_name):] and not associated_file_path[len(associated_file_path)-3:] in common.subtitleExtensions:
continue
if subtitles_only and not associated_file_path[len(associated_file_path)-3:] in common.subtitleExtensions:
continue
file_path_list.append(associated_file_path)
return file_path_list
def _delete(self, file_path, associated_files=False):
"""
Deletes the file and optionally all associated files.
file_path: The file to delete
associated_files: True to delete all files which differ only by extension, False to leave them
"""
if not file_path:
return
# figure out which files we want to delete
file_list = [file_path]
if associated_files:
file_list = file_list + self._list_associated_files(file_path)
if not file_list:
self._log(u"There were no files associated with " + file_path + ", not deleting anything", logger.DEBUG)
return
# delete the file and any other files which we want to delete
for cur_file in file_list:
self._log(u"Deleting file " + cur_file, logger.DEBUG)
if ek.ek(os.path.isfile, cur_file):
#check first the read-only attribute
file_attribute = ek.ek(os.stat, cur_file)[0]
if (not file_attribute & stat.S_IWRITE):
# File is read-only, so make it writeable
self._log('Read only mode on file ' + cur_file + ' Will try to make it writeable', logger.DEBUG)
try:
ek.ek(os.chmod,cur_file,stat.S_IWRITE)
except:
self._log(u'Cannot change permissions of ' + cur_file, logger.WARNING)
ek.ek(os.remove, cur_file)
# do the library update for synoindex
notifiers.synoindex_notifier.deleteFile(cur_file)
def _combined_file_operation (self, file_path, new_path, new_base_name, associated_files=False, action=None, subtitles=False):
"""
Performs a generic operation (move or copy) on a file. Can rename the file as well as change its location,
and optionally move associated files too.
file_path: The full path of the media file to act on
new_path: Destination path where we want to move/copy the file to
new_base_name: The base filename (no extension) to use during the copy. Use None to keep the same name.
associated_files: Boolean, whether we should copy similarly-named files too
action: function that takes an old path and new path and does an operation with them (move/copy)
"""
if not action:
self._log(u"Must provide an action for the combined file operation", logger.ERROR)
return
file_list = [file_path]
if associated_files:
file_list = file_list + self._list_associated_files(file_path)
elif subtitles:
file_list = file_list + self._list_associated_files(file_path, True)
if not file_list:
self._log(u"There were no files associated with " + file_path + ", not moving anything", logger.DEBUG)
return
# deal with all files
for cur_file_path in file_list:
cur_file_name = ek.ek(os.path.basename, cur_file_path)
# get the extension
cur_extension = ek.ek(os.path.splitext, cur_file_path)[1][1:]
# check if file have subtitles language
if cur_extension in common.subtitleExtensions:
cur_lang = ek.ek(os.path.splitext, ek.ek(os.path.splitext, cur_file_path)[0])[1][1:]
if cur_lang in sickbeard.SUBTITLES_LANGUAGES:
cur_extension = cur_lang + '.' + cur_extension
# replace .nfo with .nfo-orig to avoid conflicts
if cur_extension == 'nfo':
cur_extension = 'nfo-orig'
# If new base name then convert name
if new_base_name:
new_file_name = new_base_name + '.' + cur_extension
# if we're not renaming we still want to change extensions sometimes
else:
new_file_name = helpers.replaceExtension(cur_file_name, cur_extension)
if sickbeard.SUBTITLES_DIR and cur_extension in common.subtitleExtensions:
subs_new_path = ek.ek(os.path.join, new_path, sickbeard.SUBTITLES_DIR)
dir_exists = helpers.makeDir(subs_new_path)
if not dir_exists:
logger.log(u"Unable to create subtitles folder "+subs_new_path, logger.ERROR)
else:
helpers.chmodAsParent(subs_new_path)
new_file_path = ek.ek(os.path.join, subs_new_path, new_file_name)
else:
new_file_path = ek.ek(os.path.join, new_path, new_file_name)
action(cur_file_path, new_file_path)
def _move(self, file_path, new_path, new_base_name, associated_files=False, subtitles=False):
"""
file_path: The full path of the media file to move
new_path: Destination path where we want to move the file to
new_base_name: The base filename (no extension) to use during the move. Use None to keep the same name.
associated_files: Boolean, whether we should move similarly-named files too
"""
def _int_move(cur_file_path, new_file_path):
self._log(u"Moving file from "+cur_file_path+" to "+new_file_path, logger.DEBUG)
try:
helpers.moveFile(cur_file_path, new_file_path)
helpers.chmodAsParent(new_file_path)
except (IOError, OSError), e:
self._log("Unable to move file "+cur_file_path+" to "+new_file_path+": "+ex(e), logger.ERROR)
raise e
self._combined_file_operation(file_path, new_path, new_base_name, associated_files, action=_int_move, subtitles=subtitles)
def _copy(self, file_path, new_path, new_base_name, associated_files=False, subtitles=False):
"""
file_path: The full path of the media file to copy
new_path: Destination path where we want to copy the file to
new_base_name: The base filename (no extension) to use during the copy. Use None to keep the same name.
associated_files: Boolean, whether we should copy similarly-named files too
"""
def _int_copy (cur_file_path, new_file_path):
self._log(u"Copying file from "+cur_file_path+" to "+new_file_path, logger.DEBUG)
try:
helpers.copyFile(cur_file_path, new_file_path)
helpers.chmodAsParent(new_file_path)
except (IOError, OSError), e:
logger.log("Unable to copy file "+cur_file_path+" to "+new_file_path+": "+ex(e), logger.ERROR)
raise e
self._combined_file_operation(file_path, new_path, new_base_name, associated_files, action=_int_copy, subtitles=subtitles)
def _history_lookup(self):
"""
Look up the NZB name in the history and see if it contains a record for self.nzb_name
Returns a (tvdb_id, season, []) tuple. The first two may be None if none were found.
"""
to_return = (None, None, [])
# if we don't have either of these then there's nothing to use to search the history for anyway
if not self.nzb_name and not self.folder_name:
self.in_history = False
return to_return
# make a list of possible names to use in the search
names = []
if self.nzb_name:
names.append(self.nzb_name)
if '.' in self.nzb_name:
names.append(self.nzb_name.rpartition(".")[0])
if self.folder_name:
names.append(self.folder_name)
myDB = db.DBConnection()
# search the database for a possible match and return immediately if we find one
for curName in names:
sql_results = myDB.select("SELECT * FROM history WHERE resource LIKE ?", [re.sub("[\.\-\ ]", "_", curName)])
if len(sql_results) == 0:
continue
tvdb_id = int(sql_results[0]["showid"])
season = int(sql_results[0]["season"])
self.in_history = True
to_return = (tvdb_id, season, [])
self._log("Found result in history: "+str(to_return), logger.DEBUG)
if curName == self.nzb_name:
self.good_results[self.NZB_NAME] = True
elif curName == self.folder_name:
self.good_results[self.FOLDER_NAME] = True
elif curName == self.file_name:
self.good_results[self.FILE_NAME] = True
return to_return
self.in_history = False
return to_return
def _analyze_name(self, name, file=True):
"""
Takes a name and tries to figure out a show, season, and episode from it.
name: A string which we want to analyze to determine show info from (unicode)
Returns a (tvdb_id, season, [episodes]) tuple. The first two may be None and episodes may be []
if none were found.
"""
logger.log(u"Analyzing name "+repr(name))
to_return = (None, None, [])
if not name:
return to_return
# parse the name to break it into show name, season, and episode
np = NameParser(file)
parse_result = np.parse(name)
self._log("Parsed "+name+" into "+str(parse_result).decode('utf-8'), logger.DEBUG)
if parse_result.air_by_date:
season = -1
episodes = [parse_result.air_date]
else:
season = parse_result.season_number
episodes = parse_result.episode_numbers
to_return = (None, season, episodes)
# do a scene reverse-lookup to get a list of all possible names
name_list = show_name_helpers.sceneToNormalShowNames(parse_result.series_name)
if not name_list:
return (None, season, episodes)
def _finalize(parse_result):
self.release_group = parse_result.release_group
# remember whether it's a proper
if parse_result.extra_info:
self.is_proper = re.search('(^|[\. _-])(proper|repack)([\. _-]|$)', parse_result.extra_info, re.I) != None
# if the result is complete then remember that for later
if parse_result.series_name and parse_result.season_number != None and parse_result.episode_numbers and parse_result.release_group:
test_name = os.path.basename(name)
if test_name == self.nzb_name:
self.good_results[self.NZB_NAME] = True
elif test_name == self.folder_name:
self.good_results[self.FOLDER_NAME] = True
elif test_name == self.file_name:
self.good_results[self.FILE_NAME] = True
else:
logger.log(u"Nothing was good, found "+repr(test_name)+" and wanted either "+repr(self.nzb_name)+", "+repr(self.folder_name)+", or "+repr(self.file_name))
else:
logger.log("Parse result not suficent(all folowing have to be set). will not save release name", logger.DEBUG)
logger.log("Parse result(series_name): " + str(parse_result.series_name), logger.DEBUG)
logger.log("Parse result(season_number): " + str(parse_result.season_number), logger.DEBUG)
logger.log("Parse result(episode_numbers): " + str(parse_result.episode_numbers), logger.DEBUG)
logger.log("Parse result(release_group): " + str(parse_result.release_group), logger.DEBUG)
# for each possible interpretation of that scene name
for cur_name in name_list:
self._log(u"Checking scene exceptions for a match on "+cur_name, logger.DEBUG)
scene_id = scene_exceptions.get_scene_exception_by_name(cur_name)
if scene_id:
self._log(u"Scene exception lookup got tvdb id "+str(scene_id)+u", using that", logger.DEBUG)
_finalize(parse_result)
return (scene_id, season, episodes)
# see if we can find the name directly in the DB, if so use it
for cur_name in name_list:
self._log(u"Looking up "+cur_name+u" in the DB", logger.DEBUG)
db_result = helpers.searchDBForShow(cur_name)
if db_result:
self._log(u"Lookup successful, using tvdb id "+str(db_result[0]), logger.DEBUG)
_finalize(parse_result)
return (int(db_result[0]), season, episodes)<|fim▁hole|> t = tvdb_api.Tvdb(custom_ui=classes.ShowListUI, **sickbeard.TVDB_API_PARMS)
self._log(u"Looking up name "+cur_name+u" on TVDB", logger.DEBUG)
showObj = t[cur_name]
except (tvdb_exceptions.tvdb_exception):
# if none found, search on all languages
try:
# There's gotta be a better way of doing this but we don't wanna
# change the language value elsewhere
ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy()
ltvdb_api_parms['search_all_languages'] = True
t = tvdb_api.Tvdb(custom_ui=classes.ShowListUI, **ltvdb_api_parms)
self._log(u"Looking up name "+cur_name+u" in all languages on TVDB", logger.DEBUG)
showObj = t[cur_name]
except (tvdb_exceptions.tvdb_exception, IOError):
pass
continue
except (IOError):
continue
self._log(u"Lookup successful, using tvdb id "+str(showObj["id"]), logger.DEBUG)
_finalize(parse_result)
return (int(showObj["id"]), season, episodes)
_finalize(parse_result)
return to_return
def _find_info(self):
"""
For a given file try to find the showid, season, and episode.
"""
tvdb_id = season = None
episodes = []
# try to look up the nzb in history
attempt_list = [self._history_lookup,
# try to analyze the nzb name
lambda: self._analyze_name(self.nzb_name),
# try to analyze the file name
lambda: self._analyze_name(self.file_name),
# try to analyze the dir name
lambda: self._analyze_name(self.folder_name),
# try to analyze the file+dir names together
lambda: self._analyze_name(self.file_path),
# try to analyze the dir + file name together as one name
lambda: self._analyze_name(self.folder_name + u' ' + self.file_name)
]
# attempt every possible method to get our info
for cur_attempt in attempt_list:
try:
(cur_tvdb_id, cur_season, cur_episodes) = cur_attempt()
except InvalidNameException, e:
logger.log(u"Unable to parse, skipping: "+ex(e), logger.DEBUG)
continue
# if we already did a successful history lookup then keep that tvdb_id value
if cur_tvdb_id and not (self.in_history and tvdb_id):
tvdb_id = cur_tvdb_id
if cur_season != None:
season = cur_season
if cur_episodes:
episodes = cur_episodes
# for air-by-date shows we need to look up the season/episode from tvdb
if season == -1 and tvdb_id and episodes:
self._log(u"Looks like this is an air-by-date show, attempting to convert the date to season/episode", logger.DEBUG)
# try to get language set for this show
tvdb_lang = None
try:
showObj = helpers.findCertainShow(sickbeard.showList, tvdb_id)
if(showObj != None):
tvdb_lang = showObj.lang
except exceptions.MultipleShowObjectsException:
raise #TODO: later I'll just log this, for now I want to know about it ASAP
try:
# There's gotta be a better way of doing this but we don't wanna
# change the language value elsewhere
ltvdb_api_parms = sickbeard.TVDB_API_PARMS.copy()
if tvdb_lang and not tvdb_lang == 'en':
ltvdb_api_parms['language'] = tvdb_lang
t = tvdb_api.Tvdb(**ltvdb_api_parms)
epObj = t[tvdb_id].airedOn(episodes[0])[0]
season = int(epObj["seasonnumber"])
episodes = [int(epObj["episodenumber"])]
self._log(u"Got season " + str(season) + " episodes " + str(episodes), logger.DEBUG)
except tvdb_exceptions.tvdb_episodenotfound, e:
self._log(u"Unable to find episode with date " + str(episodes[0]) + u" for show " + str(tvdb_id) + u", skipping", logger.DEBUG)
# we don't want to leave dates in the episode list if we couldn't convert them to real episode numbers
episodes = []
continue
except tvdb_exceptions.tvdb_error, e:
logger.log(u"Unable to contact TVDB: " + ex(e), logger.WARNING)
episodes = []
continue
# if there's no season then we can hopefully just use 1 automatically
elif season == None and tvdb_id:
myDB = db.DBConnection()
numseasonsSQlResult = myDB.select("SELECT COUNT(DISTINCT season) as numseasons FROM tv_episodes WHERE showid = ? and season != 0", [tvdb_id])
if int(numseasonsSQlResult[0][0]) == 1 and season == None:
self._log(u"Don't have a season number, but this show appears to only have 1 season, setting seasonnumber to 1...", logger.DEBUG)
season = 1
if tvdb_id and season != None and episodes:
return (tvdb_id, season, episodes)
return (tvdb_id, season, episodes)
def _get_ep_obj(self, tvdb_id, season, episodes):
"""
Retrieve the TVEpisode object requested.
tvdb_id: The TVDBID of the show (int)
season: The season of the episode (int)
episodes: A list of episodes to find (list of ints)
If the episode(s) can be found then a TVEpisode object with the correct related eps will
be instantiated and returned. If the episode can't be found then None will be returned.
"""
show_obj = None
self._log(u"Loading show object for tvdb_id "+str(tvdb_id), logger.DEBUG)
# find the show in the showlist
try:
show_obj = helpers.findCertainShow(sickbeard.showList, tvdb_id)
except exceptions.MultipleShowObjectsException:
raise #TODO: later I'll just log this, for now I want to know about it ASAP
# if we can't find the show then there's nothing we can really do
if not show_obj:
self._log(u"This show isn't in your list, you need to add it to SB before post-processing an episode", logger.ERROR)
raise exceptions.PostProcessingFailed()
root_ep = None
for cur_episode in episodes:
episode = int(cur_episode)
self._log(u"Retrieving episode object for " + str(season) + "x" + str(episode), logger.DEBUG)
# now that we've figured out which episode this file is just load it manually
try:
curEp = show_obj.getEpisode(season, episode)
except exceptions.EpisodeNotFoundException, e:
self._log(u"Unable to create episode: "+ex(e), logger.DEBUG)
raise exceptions.PostProcessingFailed()
# associate all the episodes together under a single root episode
if root_ep == None:
root_ep = curEp
root_ep.relatedEps = []
elif curEp not in root_ep.relatedEps:
root_ep.relatedEps.append(curEp)
return root_ep
def _get_quality(self, ep_obj):
"""
Determines the quality of the file that is being post processed, first by checking if it is directly
available in the TVEpisode's status or otherwise by parsing through the data available.
ep_obj: The TVEpisode object related to the file we are post processing
Returns: A quality value found in common.Quality
"""
ep_quality = common.Quality.UNKNOWN
# if there is a quality available in the status then we don't need to bother guessing from the filename
if ep_obj.status in common.Quality.SNATCHED + common.Quality.SNATCHED_PROPER:
oldStatus, ep_quality = common.Quality.splitCompositeStatus(ep_obj.status) #@UnusedVariable
if ep_quality != common.Quality.UNKNOWN:
self._log(u"The old status had a quality in it, using that: "+common.Quality.qualityStrings[ep_quality], logger.DEBUG)
return ep_quality
# nzb name is the most reliable if it exists, followed by folder name and lastly file name
name_list = [self.nzb_name, self.folder_name, self.file_name]
# search all possible names for our new quality, in case the file or dir doesn't have it
for cur_name in name_list:
# some stuff might be None at this point still
if not cur_name:
continue
ep_quality = common.Quality.nameQuality(cur_name)
self._log(u"Looking up quality for name "+cur_name+u", got "+common.Quality.qualityStrings[ep_quality], logger.DEBUG)
# if we find a good one then use it
if ep_quality != common.Quality.UNKNOWN:
logger.log(cur_name+u" looks like it has quality "+common.Quality.qualityStrings[ep_quality]+", using that", logger.DEBUG)
return ep_quality
# if we didn't get a quality from one of the names above, try assuming from each of the names
ep_quality = common.Quality.assumeQuality(self.file_name)
self._log(u"Guessing quality for name "+self.file_name+u", got "+common.Quality.qualityStrings[ep_quality], logger.DEBUG)
if ep_quality != common.Quality.UNKNOWN:
logger.log(self.file_name+u" looks like it has quality "+common.Quality.qualityStrings[ep_quality]+", using that", logger.DEBUG)
return ep_quality
return ep_quality
def _run_extra_scripts(self, ep_obj):
"""
Executes any extra scripts defined in the config.
ep_obj: The object to use when calling the extra script
"""
for curScriptName in sickbeard.EXTRA_SCRIPTS:
# generate a safe command line string to execute the script and provide all the parameters
script_cmd = shlex.split(curScriptName) + [ep_obj.location, self.file_path, str(ep_obj.show.tvdbid), str(ep_obj.season), str(ep_obj.episode), str(ep_obj.airdate)]
# use subprocess to run the command and capture output
self._log(u"Executing command "+str(script_cmd))
self._log(u"Absolute path to script: "+ek.ek(os.path.abspath, script_cmd[0]), logger.DEBUG)
try:
p = subprocess.Popen(script_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=sickbeard.PROG_DIR)
out, err = p.communicate() #@UnusedVariable
self._log(u"Script result: "+str(out), logger.DEBUG)
except OSError, e:
self._log(u"Unable to run extra_script: "+ex(e))
def _is_priority(self, ep_obj, new_ep_quality):
"""
Determines if the episode is a priority download or not (if it is expected). Episodes which are expected
(snatched) or larger than the existing episode are priority, others are not.
ep_obj: The TVEpisode object in question
new_ep_quality: The quality of the episode that is being processed
Returns: True if the episode is priority, False otherwise.
"""
# if SB downloaded this on purpose then this is a priority download
if self.in_history or ep_obj.status in common.Quality.SNATCHED + common.Quality.SNATCHED_PROPER:
self._log(u"SB snatched this episode so I'm marking it as priority", logger.DEBUG)
return True
# if the user downloaded it manually and it's higher quality than the existing episode then it's priority
if new_ep_quality > ep_obj and new_ep_quality != common.Quality.UNKNOWN:
self._log(u"This was manually downloaded but it appears to be better quality than what we have so I'm marking it as priority", logger.DEBUG)
return True
# if the user downloaded it manually and it appears to be a PROPER/REPACK then it's priority
old_ep_status, old_ep_quality = common.Quality.splitCompositeStatus(ep_obj.status) #@UnusedVariable
if self.is_proper and new_ep_quality >= old_ep_quality:
self._log(u"This was manually downloaded but it appears to be a proper so I'm marking it as priority", logger.DEBUG)
return True
return False
def process(self):
"""
Post-process a given file
"""
self._log(u"Processing " + self.file_path + " (" + str(self.nzb_name) + ")")
if ek.ek(os.path.isdir, self.file_path):
self._log(u"File " + self.file_path + " seems to be a directory")
return False
for ignore_file in self.IGNORED_FILESTRINGS:
if ignore_file in self.file_path:
self._log(u"File " + self.file_path + " is ignored type, skipping")
return False
# reset per-file stuff
self.in_history = False
# try to find the file info
(tvdb_id, season, episodes) = self._find_info()
# if we don't have it then give up
if not tvdb_id or season == None or not episodes:
return False
# retrieve/create the corresponding TVEpisode objects
ep_obj = self._get_ep_obj(tvdb_id, season, episodes)
# get the quality of the episode we're processing
new_ep_quality = self._get_quality(ep_obj)
logger.log(u"Quality of the episode we're processing: " + str(new_ep_quality), logger.DEBUG)
# see if this is a priority download (is it snatched, in history, or PROPER)
priority_download = self._is_priority(ep_obj, new_ep_quality)
self._log(u"Is ep a priority download: " + str(priority_download), logger.DEBUG)
# set the status of the episodes
for curEp in [ep_obj] + ep_obj.relatedEps:
curEp.status = common.Quality.compositeStatus(common.SNATCHED, new_ep_quality)
# check for an existing file
existing_file_status = self._checkForExistingFile(ep_obj.location)
# if it's not priority then we don't want to replace smaller files in case it was a mistake
if not priority_download:
# if there's an existing file that we don't want to replace stop here
if existing_file_status in (PostProcessor.EXISTS_LARGER, PostProcessor.EXISTS_SAME):
self._log(u"File exists and we are not going to replace it because it's not smaller, quitting post-processing", logger.DEBUG)
return False
elif existing_file_status == PostProcessor.EXISTS_SMALLER:
self._log(u"File exists and is smaller than the new file so I'm going to replace it", logger.DEBUG)
elif existing_file_status != PostProcessor.DOESNT_EXIST:
self._log(u"Unknown existing file status. This should never happen, please log this as a bug.", logger.ERROR)
return False
# if the file is priority then we're going to replace it even if it exists
else:
self._log(u"This download is marked a priority download so I'm going to replace an existing file if I find one", logger.DEBUG)
# delete the existing file (and company)
for cur_ep in [ep_obj] + ep_obj.relatedEps:
try:
self._delete(cur_ep.location, associated_files=True)
# clean up any left over folders
if cur_ep.location:
helpers.delete_empty_folders(ek.ek(os.path.dirname, cur_ep.location), keep_dir=ep_obj.show._location)
except (OSError, IOError):
raise exceptions.PostProcessingFailed("Unable to delete the existing files")
# if the show directory doesn't exist then make it if allowed
if not ek.ek(os.path.isdir, ep_obj.show._location) and sickbeard.CREATE_MISSING_SHOW_DIRS:
self._log(u"Show directory doesn't exist, creating it", logger.DEBUG)
try:
ek.ek(os.mkdir, ep_obj.show._location)
# do the library update for synoindex
notifiers.synoindex_notifier.addFolder(ep_obj.show._location)
except (OSError, IOError):
raise exceptions.PostProcessingFailed("Unable to create the show directory: " + ep_obj.show._location)
# get metadata for the show (but not episode because it hasn't been fully processed)
ep_obj.show.writeMetadata(True)
# update the ep info before we rename so the quality & release name go into the name properly
for cur_ep in [ep_obj] + ep_obj.relatedEps:
with cur_ep.lock:
cur_release_name = None
# use the best possible representation of the release name
if self.good_results[self.NZB_NAME]:
cur_release_name = self.nzb_name
if cur_release_name.lower().endswith('.nzb'):
cur_release_name = cur_release_name.rpartition('.')[0]
elif self.good_results[self.FOLDER_NAME]:
cur_release_name = self.folder_name
elif self.good_results[self.FILE_NAME]:
cur_release_name = self.file_name
# take the extension off the filename, it's not needed
if '.' in self.file_name:
cur_release_name = self.file_name.rpartition('.')[0]
if cur_release_name:
self._log("Found release name " + cur_release_name, logger.DEBUG)
cur_ep.release_name = cur_release_name
else:
logger.log("good results: " + repr(self.good_results), logger.DEBUG)
cur_ep.status = common.Quality.compositeStatus(common.DOWNLOADED, new_ep_quality)
cur_ep.subtitles = []
cur_ep.subtitles_searchcount = 0
cur_ep.subtitles_lastsearch = '0001-01-01 00:00:00'
cur_ep.is_proper = self.is_proper
cur_ep.saveToDB()
# find the destination folder
try:
proper_path = ep_obj.proper_path()
proper_absolute_path = ek.ek(os.path.join, ep_obj.show.location, proper_path)
dest_path = ek.ek(os.path.dirname, proper_absolute_path)
except exceptions.ShowDirNotFoundException:
raise exceptions.PostProcessingFailed(u"Unable to post-process an episode if the show dir doesn't exist, quitting")
self._log(u"Destination folder for this episode: " + dest_path, logger.DEBUG)
# create any folders we need
helpers.make_dirs(dest_path)
# download subtitles
if sickbeard.USE_SUBTITLES and ep_obj.show.subtitles:
cur_ep.location = self.file_path
cur_ep.downloadSubtitles(force=True)
# figure out the base name of the resulting episode file
if sickbeard.RENAME_EPISODES:
orig_extension = self.file_name.rpartition('.')[-1]
new_base_name = ek.ek(os.path.basename, proper_path)
new_file_name = new_base_name + '.' + orig_extension
else:
# if we're not renaming then there's no new base name, we'll just use the existing name
new_base_name = None
new_file_name = self.file_name
try:
# move the episode and associated files to the show dir
if sickbeard.KEEP_PROCESSED_DIR:
self._copy(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES, sickbeard.USE_SUBTITLES and ep_obj.show.subtitles)
else:
self._move(self.file_path, dest_path, new_base_name, sickbeard.MOVE_ASSOCIATED_FILES, sickbeard.USE_SUBTITLES and ep_obj.show.subtitles)
except (OSError, IOError):
raise exceptions.PostProcessingFailed("Unable to move the files to their new home")
# put the new location in the database
for cur_ep in [ep_obj] + ep_obj.relatedEps:
with cur_ep.lock:
cur_ep.location = ek.ek(os.path.join, dest_path, new_file_name)
cur_ep.saveToDB()
# log it to history
history.logDownload(ep_obj, self.file_path, new_ep_quality, self.release_group)
# send notifications
notifiers.notify_download(ep_obj._format_pattern('%SN - %Sx%0E - %EN - %QN'))
# generate nfo/tbn
ep_obj.createMetaFiles()
ep_obj.saveToDB()
# do the library update for XBMC
notifiers.xbmc_notifier.update_library(ep_obj.show.name)
# do the library update for Plex
notifiers.plex_notifier.update_library()
# do the library update for NMJ
# nmj_notifier kicks off its library update when the notify_download is issued (inside notifiers)
# do the library update for Synology Indexer
notifiers.synoindex_notifier.addFile(ep_obj.location)
# do the library update for pyTivo
notifiers.pytivo_notifier.update_library(ep_obj)
# do the library update for Trakt
notifiers.trakt_notifier.update_library(ep_obj)
self._run_extra_scripts(ep_obj)
return True<|fim▁end|> |
# see if we can find the name with a TVDB lookup
for cur_name in name_list:
try: |
<|file_name|>template.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import re
class Templates:
TOKENS = re.compile('([A-Za-z]+|[^ ])')
SIMPLE = {
'l': '_n.l.ptb()',
'r': '_n.r.ptb()',
'<': 'addr(_n)',
'>': 'addl(_n)',
}
def compile(self, template):
python = self.parse(self.TOKENS.findall(template))
return eval("lambda _n: %s" % python)
def parse(self, tokens):
t = tokens.pop(0)
if t in '([':
if t == '(':
label = "'%s'" % tokens.pop(0)
args = self.parse_args(tokens, ')')
elif s[0] == '[':
label = 'None'
args = self.parse_args(tokens, ']')
return 'PTB(_n, %s, %s)' % (label, ', '.join(args))
elif t in self.SIMPLE:
return self.SIMPLE[t]
else:
raise SyntaxError, "unknown token '%s'" % t
def parse_args(self, tokens, delimiter):
args = []
while tokens:
if tokens[0] == delimiter:<|fim▁hole|>
templates = Templates()
t = templates.compile("<")<|fim▁end|> | tokens.pop(0)
return args
args.append(self.parse(tokens))
raise SyntaxError, "missing closing '%s'" % delimiter |
<|file_name|>error.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>//
// Caribon is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published
// by the Free Software Foundation, either version 2.1 of the License, or
// (at your option) any later version.
//
// Caribon is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with Caribon. If not, see <http://www.gnu.org/licenses/>.
use std::error;
use std::result;
use std::fmt;
#[derive(Debug)]
/// Caribon error type (just a String currently)
pub struct Error {
pub content: String,
}
impl Error {
pub fn new(s: &str) -> Error {
Error { content: s.to_owned() }
}
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str(&self.content)
}
}
impl error::Error for Error {
fn description(&self) -> &str {
&self.content
}
}
/// Caribon Result, used by some functions
pub type Result<T> = result::Result<T, Error>;<|fim▁end|> | // Copyright (C) 2015 Élisabeth HENRY.
//
// This file is part of Caribon. |
<|file_name|>metadados.js<|end_file_name|><|fim▁begin|>'use strict';
module.exports = {
controller: function (args) {
this.config = _.merge({
salvar: _.noop,
publicar: _.noop,
descartar: _.noop,
visualizar: _.noop,
editar: _.noop
}, args);
},
view: function (ctrl) {
var salvarView = '';
if (ctrl.config.salvar !== _.noop) {
salvarView = m.component(require('cabecalho/salvar-button'), {
salvar: ctrl.config.salvar,
salvandoServico: ctrl.config.salvandoServico,
caiuSessao: ctrl.config.caiuSessao,
orgaoId: ctrl.config.orgaoId
});
}
var visualizarView = '';
if (ctrl.config.visualizar !== _.noop) {
visualizarView = m.component(require('cabecalho/visualizar-button'), {
visualizar: ctrl.config.visualizar,
salvandoServico: ctrl.config.salvandoServico,
caiuSessao: ctrl.config.caiuSessao
});
}
var publicarView = '';
if (ctrl.config.publicar !== _.noop) {
publicarView = m.component(require('cabecalho/publicar-view'), {
publicar: ctrl.config.publicar,
descartar: ctrl.config.descartar,
metadados: ctrl.config.cabecalho.metadados(),
salvandoServico: ctrl.config.salvandoServico,
caiuSessao: ctrl.config.caiuSessao,
orgaoId: ctrl.config.orgaoId
});
}
var editarView = '';
if (ctrl.config.editar !== _.noop) {
editarView = m.component(require('cabecalho/editar-button'), {
editar: ctrl.config.editar
});
}
<|fim▁hole|> m.component(require('componentes/status-conexao'), {
salvandoServico: ctrl.config.salvandoServico,
caiuSessao: ctrl.config.caiuSessao
}),
salvarView,
visualizarView,
publicarView,
editarView,
]);
}
};<|fim▁end|> | return m('#metadados', [ |
<|file_name|>spawn.js<|end_file_name|><|fim▁begin|>'use strict'
const { spawn } = require('@malept/cross-spawn-promise')
const which = require('which')
function updateExecutableMissingException (err, hasLogger) {
if (hasLogger && err.code === 'ENOENT' && err.syscall === 'spawn mono') {
let installer
let pkg
if (process.platform === 'darwin') {
installer = 'brew'
pkg = 'mono'
} else if (which.sync('dnf', { nothrow: true })) {
installer = 'dnf'
pkg = 'mono-core'
} else { // assume apt-based Linux distro
installer = 'apt'
pkg = 'mono-runtime'
}
err.message = `Your system is missing the ${pkg} package. Try, e.g. '${installer} install ${pkg}'`
}
}
module.exports = async function (cmd, args, logger) {
if (process.platform !== 'win32') {
args.unshift(cmd)
cmd = 'mono'
}
return spawn(cmd, args, {
logger,
updateErrorCallback: updateExecutableMissingException<|fim▁hole|><|fim▁end|> | })
} |
<|file_name|>bcm_host.py<|end_file_name|><|fim▁begin|># vim: set et sw=4 sts=4 fileencoding=utf-8:
#
# Python header conversion
# Copyright (c) 2013,2014 Dave Hughes <[email protected]>
#
# Original headers
# Copyright (c) 2012, Broadcom Europe Ltd
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from __future__ import (
unicode_literals,
print_function,
division,
absolute_import,
)
# Make Py2's str equivalent to Py3's
str = type('')
import ctypes as ct
import warnings
_lib = ct.CDLL('libbcm_host.so')
# bcm_host.h #################################################################
bcm_host_init = _lib.bcm_host_init
bcm_host_init.argtypes = []
bcm_host_init.restype = None
bcm_host_deinit = _lib.bcm_host_deinit
bcm_host_deinit.argtypes = []
bcm_host_deinit.restype = None
<|fim▁hole|><|fim▁end|> | graphics_get_display_size = _lib.graphics_get_display_size
graphics_get_display_size.argtypes = [ct.c_uint16, ct.POINTER(ct.c_uint32), ct.POINTER(ct.c_uint32)]
graphics_get_display_size.restype = ct.c_int32 |
<|file_name|>borrowck-anon-fields-variant.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at<|fim▁hole|>// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Tests that we are able to distinguish when loans borrow different
// anonymous fields of an enum variant vs the same anonymous field.
enum Foo {
X, Y(uint, uint)
}
fn distinct_variant() {
let mut y = Foo::Y(1, 2);
let a = match y {
Foo::Y(ref mut a, _) => a,
Foo::X => panic!()
};
let b = match y {
Foo::Y(_, ref mut b) => b,
Foo::X => panic!()
};
*a += 1;
*b += 1;
}
fn same_variant() {
let mut y = Foo::Y(1, 2);
let a = match y {
Foo::Y(ref mut a, _) => a,
Foo::X => panic!()
};
let b = match y {
Foo::Y(ref mut b, _) => b, //~ ERROR cannot borrow
Foo::X => panic!()
};
*a += 1;
*b += 1;
}
fn main() {
}<|fim▁end|> | |
<|file_name|>PeristentBibTexEntry.java<|end_file_name|><|fim▁begin|>/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package de.comci.bigbib;
import com.mongodb.BasicDBObject;
import com.mongodb.DBObject;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
import org.bson.types.ObjectId;
import org.codehaus.jackson.annotate.JsonProperty;
import org.jbibtex.BibTeXEntry;
import org.jbibtex.Key;
import org.jbibtex.StringValue;
import org.jbibtex.Value;
/**
*
* @author Sebastian
*/
@XmlRootElement()
@XmlAccessorType(XmlAccessType.NONE)
public class PeristentBibTexEntry extends BibTeXEntry {
private ObjectId id;
public PeristentBibTexEntry(Key type, Key key) {
super(type, key);
}
static Map<String, Key> keyMapping = new HashMap<String, Key>();
static {
keyMapping.put("address", BibTeXEntry.KEY_ADDRESS);
keyMapping.put("annote", BibTeXEntry.KEY_ANNOTE);
keyMapping.put("author", BibTeXEntry.KEY_AUTHOR);
keyMapping.put("booktitle", BibTeXEntry.KEY_BOOKTITLE);
keyMapping.put("chapter", BibTeXEntry.KEY_CHAPTER);
keyMapping.put("crossref", BibTeXEntry.KEY_CROSSREF);
keyMapping.put("doi", BibTeXEntry.KEY_DOI);
keyMapping.put("edition", BibTeXEntry.KEY_EDITION);
keyMapping.put("editor", BibTeXEntry.KEY_EDITOR);
keyMapping.put("eprint", BibTeXEntry.KEY_EPRINT);
keyMapping.put("howpublished", BibTeXEntry.KEY_HOWPUBLISHED);
keyMapping.put("institution", BibTeXEntry.KEY_INSTITUTION);
keyMapping.put("journal", BibTeXEntry.KEY_JOURNAL);
keyMapping.put("key", BibTeXEntry.KEY_KEY);
keyMapping.put("month", BibTeXEntry.KEY_MONTH);
keyMapping.put("note", BibTeXEntry.KEY_NOTE);
keyMapping.put("number", BibTeXEntry.KEY_NUMBER);
keyMapping.put("organization", BibTeXEntry.KEY_ORGANIZATION);
keyMapping.put("pages", BibTeXEntry.KEY_PAGES);
keyMapping.put("publisher", BibTeXEntry.KEY_PUBLISHER);
keyMapping.put("school", BibTeXEntry.KEY_SCHOOL);
keyMapping.put("series", BibTeXEntry.KEY_SERIES);
keyMapping.put("title", BibTeXEntry.KEY_TITLE);
keyMapping.put("type", BibTeXEntry.KEY_TYPE);
keyMapping.put("url", BibTeXEntry.KEY_URL);
keyMapping.put("volume", BibTeXEntry.KEY_VOLUME);
keyMapping.put("year", BibTeXEntry.KEY_YEAR);
}
public PeristentBibTexEntry(DBObject persistentObject) {
super(
new Key((String) persistentObject.get("type")),
new Key((String) persistentObject.get("key"))
);
BasicDBObject fields = (BasicDBObject) persistentObject.get("fields");
id = (ObjectId) persistentObject.get("_id");
for (String key : fields.keySet()) {
if (keyMapping.containsKey(key)) {
this.addField(keyMapping.get(key), new StringValue(fields.getString(key), StringValue.Style.BRACED));
} else {
this.addField(new Key(key), new StringValue(fields.getString(key), StringValue.Style.BRACED));
}
}
}
@JsonProperty("id")
@XmlElement(name="id")
public String getStringId() {
return id.toString();
}
@JsonProperty("key")
@XmlElement(name="key")
public String getStringKey() {
return super.getKey().getValue();
}
<|fim▁hole|> public String getStringType() {
return super.getType().getValue();
}
@JsonProperty("fields")
@XmlElement(name="fields")
public Map<String, String> getStringFields() {
Map<String, String> fields = new HashMap<String, String>();
for (Entry<Key,Value> e : getFields().entrySet()) {
if (e.getKey() != null && e.getValue() != null) {
fields.put(e.getKey().getValue(), e.getValue().toUserString());
}
}
return fields;
}
@Override
public String toString() {
return String.format("[%s:%s] %s: %s (%s)",
this.getType(),
this.getKey(),
this.getField(KEY_AUTHOR).toUserString(),
this.getField(KEY_TITLE).toUserString(),
this.getField(KEY_YEAR).toUserString());
}
}<|fim▁end|> | @JsonProperty("type")
@XmlElement(name="type") |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>#![feature(plugin, asm)]
#![no_std]
#![plugin(macro_zinc)]
extern crate zinc;
use zinc::hal::mem_init::{init_data, init_stack};
#[zinc_main]
fn run() {
init_data();<|fim▁hole|>}<|fim▁end|> | init_stack();
unsafe { asm!("nop") } |
<|file_name|>openssl_certificate.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2016-2017, Yanis Guenane <[email protected]>
# Copyright: (c) 2017, Markus Teufelberger <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: openssl_certificate
version_added: "2.4"
short_description: Generate and/or check OpenSSL certificates
description:
- This module allows one to (re)generate OpenSSL certificates.
- It implements a notion of provider (ie. C(selfsigned), C(ownca), C(acme), C(assertonly))
for your certificate.
- The C(assertonly) provider is intended for use cases where one is only interested in
checking properties of a supplied certificate.
- The C(ownca) provider is intended for generate OpenSSL certificate signed with your own
CA (Certificate Authority) certificate (self-signed certificate).
- Many properties that can be specified in this module are for validation of an
existing or newly generated certificate. The proper place to specify them, if you
want to receive a certificate with these properties is a CSR (Certificate Signing Request).
- "Please note that the module regenerates existing certificate if it doesn't match the module's
options, or if it seems to be corrupt. If you are concerned that this could overwrite
your existing certificate, consider using the I(backup) option."
- It uses the pyOpenSSL or cryptography python library to interact with OpenSSL.
- If both the cryptography and PyOpenSSL libraries are available (and meet the minimum version requirements)
cryptography will be preferred as a backend over PyOpenSSL (unless the backend is forced with C(select_crypto_backend))
requirements:
- PyOpenSSL >= 0.15 or cryptography >= 1.6 (if using C(selfsigned) or C(assertonly) provider)
- acme-tiny (if using the C(acme) provider)
author:
- Yanis Guenane (@Spredzy)
- Markus Teufelberger (@MarkusTeufelberger)
options:
state:
description:
- Whether the certificate should exist or not, taking action if the state is different from what is stated.
type: str
default: present
choices: [ absent, present ]
path:
description:
- Remote absolute path where the generated certificate file should be created or is already located.
type: path
required: true
provider:
description:
- Name of the provider to use to generate/retrieve the OpenSSL certificate.
- The C(assertonly) provider will not generate files and fail if the certificate file is missing.
type: str
required: true
choices: [ acme, assertonly, ownca, selfsigned ]
force:
description:
- Generate the certificate, even if it already exists.
type: bool
default: no
csr_path:
description:
- Path to the Certificate Signing Request (CSR) used to generate this certificate.
- This is not required in C(assertonly) mode.
type: path
privatekey_path:
description:
- Path to the private key to use when signing the certificate.
type: path
privatekey_passphrase:
description:
- The passphrase for the I(privatekey_path).
- This is required if the private key is password protected.
type: str
selfsigned_version:
description:
- Version of the C(selfsigned) certificate.
- Nowadays it should almost always be C(3).
- This is only used by the C(selfsigned) provider.
type: int
default: 3
version_added: "2.5"
selfsigned_digest:
description:
- Digest algorithm to be used when self-signing the certificate.
- This is only used by the C(selfsigned) provider.
type: str
default: sha256
selfsigned_not_before:
description:
- The point in time the certificate is valid from.
- Time can be specified either as relative time or as absolute timestamp.
- Time will always be interpreted as UTC.
- Valid format is C([+-]timespec | ASN.1 TIME) where timespec can be an integer
+ C([w | d | h | m | s]) (e.g. C(+32w1d2h).
- Note that if using relative time this module is NOT idempotent.
- If this value is not specified, the certificate will start being valid from now.
- This is only used by the C(selfsigned) provider.
type: str
default: +0s
aliases: [ selfsigned_notBefore ]
selfsigned_not_after:
description:
- The point in time at which the certificate stops being valid.
- Time can be specified either as relative time or as absolute timestamp.
- Time will always be interpreted as UTC.
- Valid format is C([+-]timespec | ASN.1 TIME) where timespec can be an integer
+ C([w | d | h | m | s]) (e.g. C(+32w1d2h).
- Note that if using relative time this module is NOT idempotent.
- If this value is not specified, the certificate will stop being valid 10 years from now.
- This is only used by the C(selfsigned) provider.
type: str
default: +3650d
aliases: [ selfsigned_notAfter ]
ownca_path:
description:
- Remote absolute path of the CA (Certificate Authority) certificate.
- This is only used by the C(ownca) provider.
type: path
version_added: "2.7"
ownca_privatekey_path:
description:
- Path to the CA (Certificate Authority) private key to use when signing the certificate.
- This is only used by the C(ownca) provider.
type: path
version_added: "2.7"
ownca_privatekey_passphrase:
description:
- The passphrase for the I(ownca_privatekey_path).
- This is only used by the C(ownca) provider.
type: str
version_added: "2.7"
ownca_digest:
description:
- The digest algorithm to be used for the C(ownca) certificate.
- This is only used by the C(ownca) provider.
type: str
default: sha256
version_added: "2.7"
ownca_version:
description:
- The version of the C(ownca) certificate.
- Nowadays it should almost always be C(3).
- This is only used by the C(ownca) provider.
type: int
default: 3
version_added: "2.7"
ownca_not_before:
description:
- The point in time the certificate is valid from.
- Time can be specified either as relative time or as absolute timestamp.
- Time will always be interpreted as UTC.
- Valid format is C([+-]timespec | ASN.1 TIME) where timespec can be an integer
+ C([w | d | h | m | s]) (e.g. C(+32w1d2h).
- Note that if using relative time this module is NOT idempotent.
- If this value is not specified, the certificate will start being valid from now.
- This is only used by the C(ownca) provider.
type: str
default: +0s
version_added: "2.7"
ownca_not_after:
description:
- The point in time at which the certificate stops being valid.
- Time can be specified either as relative time or as absolute timestamp.
- Time will always be interpreted as UTC.
- Valid format is C([+-]timespec | ASN.1 TIME) where timespec can be an integer
+ C([w | d | h | m | s]) (e.g. C(+32w1d2h).
- Note that if using relative time this module is NOT idempotent.
- If this value is not specified, the certificate will stop being valid 10 years from now.
- This is only used by the C(ownca) provider.
type: str
default: +3650d
version_added: "2.7"
acme_accountkey_path:
description:
- The path to the accountkey for the C(acme) provider.
- This is only used by the C(acme) provider.
type: path
acme_challenge_path:
description:
- The path to the ACME challenge directory that is served on U(http://<HOST>:80/.well-known/acme-challenge/)
- This is only used by the C(acme) provider.
type: path
acme_chain:
description:
- Include the intermediate certificate to the generated certificate
- This is only used by the C(acme) provider.
- Note that this is only available for older versions of C(acme-tiny).
New versions include the chain automatically, and setting I(acme_chain) to C(yes) results in an error.
type: bool
default: no
version_added: "2.5"
signature_algorithms:
description:
- A list of algorithms that you would accept the certificate to be signed with
(e.g. ['sha256WithRSAEncryption', 'sha512WithRSAEncryption']).
- This is only used by the C(assertonly) provider.
type: list
issuer:
description:
- The key/value pairs that must be present in the issuer name field of the certificate.
- If you need to specify more than one value with the same key, use a list as value.
- This is only used by the C(assertonly) provider.
type: dict
issuer_strict:
description:
- If set to C(yes), the I(issuer) field must contain only these values.
- This is only used by the C(assertonly) provider.
type: bool
default: no
version_added: "2.5"
subject:
description:
- The key/value pairs that must be present in the subject name field of the certificate.
- If you need to specify more than one value with the same key, use a list as value.
- This is only used by the C(assertonly) provider.
type: dict
subject_strict:
description:
- If set to C(yes), the I(subject) field must contain only these values.
- This is only used by the C(assertonly) provider.
type: bool
default: no
version_added: "2.5"
has_expired:
description:
- Checks if the certificate is expired/not expired at the time the module is executed.
- This is only used by the C(assertonly) provider.
type: bool
default: no
version:
description:
- The version of the certificate.
- Nowadays it should almost always be 3.
- This is only used by the C(assertonly) provider.
type: int
valid_at:
description:
- The certificate must be valid at this point in time.
- The timestamp is formatted as an ASN.1 TIME.
- This is only used by the C(assertonly) provider.
type: str
invalid_at:
description:
- The certificate must be invalid at this point in time.
- The timestamp is formatted as an ASN.1 TIME.
- This is only used by the C(assertonly) provider.
type: str
not_before:
description:
- The certificate must start to become valid at this point in time.
- The timestamp is formatted as an ASN.1 TIME.
- This is only used by the C(assertonly) provider.
type: str
aliases: [ notBefore ]
not_after:
description:
- The certificate must expire at this point in time.
- The timestamp is formatted as an ASN.1 TIME.
- This is only used by the C(assertonly) provider.
type: str
aliases: [ notAfter ]
valid_in:
description:
- The certificate must still be valid at this relative time offset from now.
- Valid format is C([+-]timespec | number_of_seconds) where timespec can be an integer
+ C([w | d | h | m | s]) (e.g. C(+32w1d2h).
- Note that if using this parameter, this module is NOT idempotent.
- This is only used by the C(assertonly) provider.
type: str
key_usage:
description:
- The I(key_usage) extension field must contain all these values.
- This is only used by the C(assertonly) provider.
type: list
aliases: [ keyUsage ]
key_usage_strict:
description:
- If set to C(yes), the I(key_usage) extension field must contain only these values.
- This is only used by the C(assertonly) provider.
type: bool
default: no
aliases: [ keyUsage_strict ]
extended_key_usage:<|fim▁hole|> - This is only used by the C(assertonly) provider.
type: list
aliases: [ extendedKeyUsage ]
extended_key_usage_strict:
description:
- If set to C(yes), the I(extended_key_usage) extension field must contain only these values.
- This is only used by the C(assertonly) provider.
type: bool
default: no
aliases: [ extendedKeyUsage_strict ]
subject_alt_name:
description:
- The I(subject_alt_name) extension field must contain these values.
- This is only used by the C(assertonly) provider.
type: list
aliases: [ subjectAltName ]
subject_alt_name_strict:
description:
- If set to C(yes), the I(subject_alt_name) extension field must contain only these values.
- This is only used by the C(assertonly) provider.
type: bool
default: no
aliases: [ subjectAltName_strict ]
select_crypto_backend:
description:
- Determines which crypto backend to use.
- The default choice is C(auto), which tries to use C(cryptography) if available, and falls back to C(pyopenssl).
- If set to C(pyopenssl), will try to use the L(pyOpenSSL,https://pypi.org/project/pyOpenSSL/) library.
- If set to C(cryptography), will try to use the L(cryptography,https://cryptography.io/) library.
type: str
default: auto
choices: [ auto, cryptography, pyopenssl ]
version_added: "2.8"
backup:
description:
- Create a backup file including a timestamp so you can get the original
certificate back if you overwrote it with a new one by accident.
- This is not used by the C(assertonly) provider.
type: bool
default: no
version_added: "2.8"
extends_documentation_fragment: files
notes:
- All ASN.1 TIME values should be specified following the YYYYMMDDHHMMSSZ pattern.
- Date specified should be UTC. Minutes and seconds are mandatory.
- For security reason, when you use C(ownca) provider, you should NOT run M(openssl_certificate) on
a target machine, but on a dedicated CA machine. It is recommended not to store the CA private key
on the target machine. Once signed, the certificate can be moved to the target machine.
seealso:
- module: openssl_csr
- module: openssl_dhparam
- module: openssl_pkcs12
- module: openssl_privatekey
- module: openssl_publickey
'''
EXAMPLES = r'''
- name: Generate a Self Signed OpenSSL certificate
openssl_certificate:
path: /etc/ssl/crt/ansible.com.crt
privatekey_path: /etc/ssl/private/ansible.com.pem
csr_path: /etc/ssl/csr/ansible.com.csr
provider: selfsigned
- name: Generate an OpenSSL certificate signed with your own CA certificate
openssl_certificate:
path: /etc/ssl/crt/ansible.com.crt
csr_path: /etc/ssl/csr/ansible.com.csr
ownca_path: /etc/ssl/crt/ansible_CA.crt
ownca_privatekey_path: /etc/ssl/private/ansible_CA.pem
provider: ownca
- name: Generate a Let's Encrypt Certificate
openssl_certificate:
path: /etc/ssl/crt/ansible.com.crt
csr_path: /etc/ssl/csr/ansible.com.csr
provider: acme
acme_accountkey_path: /etc/ssl/private/ansible.com.pem
acme_challenge_path: /etc/ssl/challenges/ansible.com/
- name: Force (re-)generate a new Let's Encrypt Certificate
openssl_certificate:
path: /etc/ssl/crt/ansible.com.crt
csr_path: /etc/ssl/csr/ansible.com.csr
provider: acme
acme_accountkey_path: /etc/ssl/private/ansible.com.pem
acme_challenge_path: /etc/ssl/challenges/ansible.com/
force: yes
# Examples for some checks one could use the assertonly provider for:
# How to use the assertonly provider to implement and trigger your own custom certificate generation workflow:
- name: Check if a certificate is currently still valid, ignoring failures
openssl_certificate:
path: /etc/ssl/crt/example.com.crt
provider: assertonly
has_expired: no
ignore_errors: yes
register: validity_check
- name: Run custom task(s) to get a new, valid certificate in case the initial check failed
command: superspecialSSL recreate /etc/ssl/crt/example.com.crt
when: validity_check.failed
- name: Check the new certificate again for validity with the same parameters, this time failing the play if it is still invalid
openssl_certificate:
path: /etc/ssl/crt/example.com.crt
provider: assertonly
has_expired: no
when: validity_check.failed
# Some other checks that assertonly could be used for:
- name: Verify that an existing certificate was issued by the Let's Encrypt CA and is currently still valid
openssl_certificate:
path: /etc/ssl/crt/example.com.crt
provider: assertonly
issuer:
O: Let's Encrypt
has_expired: no
- name: Ensure that a certificate uses a modern signature algorithm (no SHA1, MD5 or DSA)
openssl_certificate:
path: /etc/ssl/crt/example.com.crt
provider: assertonly
signature_algorithms:
- sha224WithRSAEncryption
- sha256WithRSAEncryption
- sha384WithRSAEncryption
- sha512WithRSAEncryption
- sha224WithECDSAEncryption
- sha256WithECDSAEncryption
- sha384WithECDSAEncryption
- sha512WithECDSAEncryption
- name: Ensure that the existing certificate belongs to the specified private key
openssl_certificate:
path: /etc/ssl/crt/example.com.crt
privatekey_path: /etc/ssl/private/example.com.pem
provider: assertonly
- name: Ensure that the existing certificate is still valid at the winter solstice 2017
openssl_certificate:
path: /etc/ssl/crt/example.com.crt
provider: assertonly
valid_at: 20171221162800Z
- name: Ensure that the existing certificate is still valid 2 weeks (1209600 seconds) from now
openssl_certificate:
path: /etc/ssl/crt/example.com.crt
provider: assertonly
valid_in: 1209600
- name: Ensure that the existing certificate is only used for digital signatures and encrypting other keys
openssl_certificate:
path: /etc/ssl/crt/example.com.crt
provider: assertonly
key_usage:
- digitalSignature
- keyEncipherment
key_usage_strict: true
- name: Ensure that the existing certificate can be used for client authentication
openssl_certificate:
path: /etc/ssl/crt/example.com.crt
provider: assertonly
extended_key_usage:
- clientAuth
- name: Ensure that the existing certificate can only be used for client authentication and time stamping
openssl_certificate:
path: /etc/ssl/crt/example.com.crt
provider: assertonly
extended_key_usage:
- clientAuth
- 1.3.6.1.5.5.7.3.8
extended_key_usage_strict: true
- name: Ensure that the existing certificate has a certain domain in its subjectAltName
openssl_certificate:
path: /etc/ssl/crt/example.com.crt
provider: assertonly
subject_alt_name:
- www.example.com
- test.example.com
'''
RETURN = r'''
filename:
description: Path to the generated Certificate
returned: changed or success
type: str
sample: /etc/ssl/crt/www.ansible.com.crt
backup_file:
description: Name of backup file created.
returned: changed and if I(backup) is C(yes)
type: str
sample: /path/to/www.ansible.com.crt.2019-03-09@11:22~
'''
from random import randint
import abc
import datetime
import os
import traceback
from distutils.version import LooseVersion
from ansible.module_utils import crypto as crypto_utils
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
from ansible.module_utils._text import to_native, to_bytes, to_text
from ansible.module_utils.compat import ipaddress as compat_ipaddress
MINIMAL_CRYPTOGRAPHY_VERSION = '1.6'
MINIMAL_PYOPENSSL_VERSION = '0.15'
PYOPENSSL_IMP_ERR = None
try:
import OpenSSL
from OpenSSL import crypto
PYOPENSSL_VERSION = LooseVersion(OpenSSL.__version__)
except ImportError:
PYOPENSSL_IMP_ERR = traceback.format_exc()
PYOPENSSL_FOUND = False
else:
PYOPENSSL_FOUND = True
CRYPTOGRAPHY_IMP_ERR = None
try:
import cryptography
from cryptography import x509
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.serialization import Encoding
from cryptography.x509 import NameAttribute, Name
CRYPTOGRAPHY_VERSION = LooseVersion(cryptography.__version__)
except ImportError:
CRYPTOGRAPHY_IMP_ERR = traceback.format_exc()
CRYPTOGRAPHY_FOUND = False
else:
CRYPTOGRAPHY_FOUND = True
class CertificateError(crypto_utils.OpenSSLObjectError):
pass
class Certificate(crypto_utils.OpenSSLObject):
def __init__(self, module, backend):
super(Certificate, self).__init__(
module.params['path'],
module.params['state'],
module.params['force'],
module.check_mode
)
self.provider = module.params['provider']
self.privatekey_path = module.params['privatekey_path']
self.privatekey_passphrase = module.params['privatekey_passphrase']
self.csr_path = module.params['csr_path']
self.cert = None
self.privatekey = None
self.csr = None
self.backend = backend
self.module = module
self.backup = module.params['backup']
self.backup_file = None
def get_relative_time_option(self, input_string, input_name):
"""Return an ASN1 formatted string if a relative timespec
or an ASN1 formatted string is provided."""
result = input_string
if result.startswith("+") or result.startswith("-"):
result_datetime = crypto_utils.convert_relative_to_datetime(
result)
if self.backend == 'pyopenssl':
return result_datetime.strftime("%Y%m%d%H%M%SZ")
elif self.backend == 'cryptography':
return result_datetime
if result is None:
raise CertificateError(
'The timespec "%s" for %s is not valid' %
input_string, input_name)
if self.backend == 'cryptography':
for date_fmt in ['%Y%m%d%H%M%SZ', '%Y%m%d%H%MZ', '%Y%m%d%H%M%S%z', '%Y%m%d%H%M%z']:
try:
result = datetime.datetime.strptime(input_string, date_fmt)
break
except ValueError:
pass
if not isinstance(result, datetime.datetime):
raise CertificateError(
'The time spec "%s" for %s is invalid' %
(input_string, input_name)
)
return result
def _validate_privatekey(self):
if self.backend == 'pyopenssl':
ctx = OpenSSL.SSL.Context(OpenSSL.SSL.TLSv1_2_METHOD)
ctx.use_privatekey(self.privatekey)
ctx.use_certificate(self.cert)
try:
ctx.check_privatekey()
return True
except OpenSSL.SSL.Error:
return False
elif self.backend == 'cryptography':
return self.cert.public_key().public_numbers() == self.privatekey.public_key().public_numbers()
def _validate_csr(self):
if self.backend == 'pyopenssl':
# Verify that CSR is signed by certificate's private key
try:
self.csr.verify(self.cert.get_pubkey())
except OpenSSL.crypto.Error:
return False
# Check subject
if self.csr.get_subject() != self.cert.get_subject():
return False
# Check extensions
csr_extensions = self.csr.get_extensions()
cert_extension_count = self.cert.get_extension_count()
if len(csr_extensions) != cert_extension_count:
return False
for extension_number in range(0, cert_extension_count):
cert_extension = self.cert.get_extension(extension_number)
csr_extension = filter(lambda extension: extension.get_short_name() == cert_extension.get_short_name(), csr_extensions)
if cert_extension.get_data() != list(csr_extension)[0].get_data():
return False
return True
elif self.backend == 'cryptography':
# Verify that CSR is signed by certificate's private key
if not self.csr.is_signature_valid:
return False
if self.csr.public_key().public_numbers() != self.cert.public_key().public_numbers():
return False
# Check subject
if self.csr.subject != self.cert.subject:
return False
# Check extensions
cert_exts = self.cert.extensions
csr_exts = self.csr.extensions
if len(cert_exts) != len(csr_exts):
return False
for cert_ext in cert_exts:
try:
csr_ext = csr_exts.get_extension_for_oid(cert_ext.oid)
if cert_ext != csr_ext:
return False
except cryptography.x509.ExtensionNotFound as dummy:
return False
return True
def remove(self, module):
if self.backup:
self.backup_file = module.backup_local(self.path)
super(Certificate, self).remove(module)
def check(self, module, perms_required=True):
"""Ensure the resource is in its desired state."""
state_and_perms = super(Certificate, self).check(module, perms_required)
if not state_and_perms:
return False
try:
self.cert = crypto_utils.load_certificate(self.path, backend=self.backend)
except Exception as dummy:
return False
if self.privatekey_path:
try:
self.privatekey = crypto_utils.load_privatekey(
self.privatekey_path,
self.privatekey_passphrase,
backend=self.backend
)
except crypto_utils.OpenSSLBadPassphraseError as exc:
raise CertificateError(exc)
if not self._validate_privatekey():
return False
if self.csr_path:
self.csr = crypto_utils.load_certificate_request(self.csr_path, backend=self.backend)
if not self._validate_csr():
return False
return True
class CertificateAbsent(Certificate):
def __init__(self, module):
super(CertificateAbsent, self).__init__(module, 'cryptography') # backend doesn't matter
def generate(self, module):
pass
def dump(self, check_mode=False):
# Use only for absent
result = {
'changed': self.changed,
'filename': self.path,
'privatekey': self.privatekey_path,
'csr': self.csr_path
}
if self.backup_file:
result['backup_file'] = self.backup_file
return result
class SelfSignedCertificateCryptography(Certificate):
"""Generate the self-signed certificate, using the cryptography backend"""
def __init__(self, module):
super(SelfSignedCertificateCryptography, self).__init__(module, 'cryptography')
self.notBefore = self.get_relative_time_option(module.params['selfsigned_not_before'], 'selfsigned_not_before')
self.notAfter = self.get_relative_time_option(module.params['selfsigned_not_after'], 'selfsigned_not_after')
self.digest = crypto_utils.select_message_digest(module.params['selfsigned_digest'])
self.version = module.params['selfsigned_version']
self.serial_number = x509.random_serial_number()
self.csr = crypto_utils.load_certificate_request(self.csr_path, backend=self.backend)
self._module = module
try:
self.privatekey = crypto_utils.load_privatekey(
self.privatekey_path, self.privatekey_passphrase, backend=self.backend
)
except crypto_utils.OpenSSLBadPassphraseError as exc:
module.fail_json(msg=to_native(exc))
if self.digest is None:
raise CertificateError(
'The digest %s is not supported with the cryptography backend' % module.params['selfsigned_digest']
)
def generate(self, module):
if not os.path.exists(self.privatekey_path):
raise CertificateError(
'The private key %s does not exist' % self.privatekey_path
)
if not os.path.exists(self.csr_path):
raise CertificateError(
'The certificate signing request file %s does not exist' % self.csr_path
)
if not self.check(module, perms_required=False) or self.force:
try:
cert_builder = x509.CertificateBuilder()
cert_builder = cert_builder.subject_name(self.csr.subject)
cert_builder = cert_builder.issuer_name(self.csr.subject)
cert_builder = cert_builder.serial_number(self.serial_number)
cert_builder = cert_builder.not_valid_before(self.notBefore)
cert_builder = cert_builder.not_valid_after(self.notAfter)
cert_builder = cert_builder.public_key(self.privatekey.public_key())
for extension in self.csr.extensions:
cert_builder = cert_builder.add_extension(extension.value, critical=extension.critical)
except ValueError as e:
raise CertificateError(str(e))
certificate = cert_builder.sign(
private_key=self.privatekey, algorithm=self.digest,
backend=default_backend()
)
self.cert = certificate
if self.backup:
self.backup_file = module.backup_local(self.path)
crypto_utils.write_file(module, certificate.public_bytes(Encoding.PEM))
self.changed = True
else:
self.cert = crypto_utils.load_certificate(self.path, backend=self.backend)
file_args = module.load_file_common_arguments(module.params)
if module.set_fs_attributes_if_different(file_args, False):
self.changed = True
def dump(self, check_mode=False):
result = {
'changed': self.changed,
'filename': self.path,
'privatekey': self.privatekey_path,
'csr': self.csr_path
}
if self.backup_file:
result['backup_file'] = self.backup_file
if check_mode:
result.update({
'notBefore': self.notBefore.strftime("%Y%m%d%H%M%SZ"),
'notAfter': self.notAfter.strftime("%Y%m%d%H%M%SZ"),
'serial_number': self.serial_number,
})
else:
result.update({
'notBefore': self.cert.not_valid_before.strftime("%Y%m%d%H%M%SZ"),
'notAfter': self.cert.not_valid_after.strftime("%Y%m%d%H%M%SZ"),
'serial_number': self.cert.serial_number,
})
return result
class SelfSignedCertificate(Certificate):
"""Generate the self-signed certificate."""
def __init__(self, module):
super(SelfSignedCertificate, self).__init__(module, 'pyopenssl')
self.notBefore = self.get_relative_time_option(module.params['selfsigned_not_before'], 'selfsigned_not_before')
self.notAfter = self.get_relative_time_option(module.params['selfsigned_not_after'], 'selfsigned_not_after')
self.digest = module.params['selfsigned_digest']
self.version = module.params['selfsigned_version']
self.serial_number = randint(1000, 99999)
self.csr = crypto_utils.load_certificate_request(self.csr_path)
try:
self.privatekey = crypto_utils.load_privatekey(
self.privatekey_path, self.privatekey_passphrase
)
except crypto_utils.OpenSSLBadPassphraseError as exc:
module.fail_json(msg=str(exc))
def generate(self, module):
if not os.path.exists(self.privatekey_path):
raise CertificateError(
'The private key %s does not exist' % self.privatekey_path
)
if not os.path.exists(self.csr_path):
raise CertificateError(
'The certificate signing request file %s does not exist' % self.csr_path
)
if not self.check(module, perms_required=False) or self.force:
cert = crypto.X509()
cert.set_serial_number(self.serial_number)
cert.set_notBefore(to_bytes(self.notBefore))
cert.set_notAfter(to_bytes(self.notAfter))
cert.set_subject(self.csr.get_subject())
cert.set_issuer(self.csr.get_subject())
cert.set_version(self.version - 1)
cert.set_pubkey(self.csr.get_pubkey())
cert.add_extensions(self.csr.get_extensions())
cert.sign(self.privatekey, self.digest)
self.cert = cert
if self.backup:
self.backup_file = module.backup_local(self.path)
crypto_utils.write_file(module, crypto.dump_certificate(crypto.FILETYPE_PEM, self.cert))
self.changed = True
file_args = module.load_file_common_arguments(module.params)
if module.set_fs_attributes_if_different(file_args, False):
self.changed = True
def dump(self, check_mode=False):
result = {
'changed': self.changed,
'filename': self.path,
'privatekey': self.privatekey_path,
'csr': self.csr_path
}
if self.backup_file:
result['backup_file'] = self.backup_file
if check_mode:
result.update({
'notBefore': self.notBefore,
'notAfter': self.notAfter,
'serial_number': self.serial_number,
})
else:
result.update({
'notBefore': self.cert.get_notBefore(),
'notAfter': self.cert.get_notAfter(),
'serial_number': self.cert.get_serial_number(),
})
return result
class OwnCACertificateCryptography(Certificate):
"""Generate the own CA certificate. Using the cryptography backend"""
def __init__(self, module):
super(OwnCACertificateCryptography, self).__init__(module, 'cryptography')
self.notBefore = self.get_relative_time_option(module.params['ownca_not_before'], 'ownca_not_before')
self.notAfter = self.get_relative_time_option(module.params['ownca_not_after'], 'ownca_not_after')
self.digest = crypto_utils.select_message_digest(module.params['ownca_digest'])
self.version = module.params['ownca_version']
self.serial_number = x509.random_serial_number()
self.ca_cert_path = module.params['ownca_path']
self.ca_privatekey_path = module.params['ownca_privatekey_path']
self.ca_privatekey_passphrase = module.params['ownca_privatekey_passphrase']
self.csr = crypto_utils.load_certificate_request(self.csr_path, backend=self.backend)
self.ca_cert = crypto_utils.load_certificate(self.ca_cert_path, backend=self.backend)
try:
self.ca_private_key = crypto_utils.load_privatekey(
self.ca_privatekey_path, self.ca_privatekey_passphrase, backend=self.backend
)
except crypto_utils.OpenSSLBadPassphraseError as exc:
module.fail_json(msg=str(exc))
def generate(self, module):
if not os.path.exists(self.ca_cert_path):
raise CertificateError(
'The CA certificate %s does not exist' % self.ca_cert_path
)
if not os.path.exists(self.ca_privatekey_path):
raise CertificateError(
'The CA private key %s does not exist' % self.ca_privatekey_path
)
if not os.path.exists(self.csr_path):
raise CertificateError(
'The certificate signing request file %s does not exist' % self.csr_path
)
if not self.check(module, perms_required=False) or self.force:
cert_builder = x509.CertificateBuilder()
cert_builder = cert_builder.subject_name(self.csr.subject)
cert_builder = cert_builder.issuer_name(self.ca_cert.subject)
cert_builder = cert_builder.serial_number(self.serial_number)
cert_builder = cert_builder.not_valid_before(self.notBefore)
cert_builder = cert_builder.not_valid_after(self.notAfter)
cert_builder = cert_builder.public_key(self.csr.public_key())
for extension in self.csr.extensions:
cert_builder = cert_builder.add_extension(extension.value, critical=extension.critical)
certificate = cert_builder.sign(
private_key=self.ca_private_key, algorithm=self.digest,
backend=default_backend()
)
self.cert = certificate
if self.backup:
self.backup_file = module.backup_local(self.path)
crypto_utils.write_file(module, certificate.public_bytes(Encoding.PEM))
self.changed = True
else:
self.cert = crypto_utils.load_certificate(self.path, backend=self.backend)
file_args = module.load_file_common_arguments(module.params)
if module.set_fs_attributes_if_different(file_args, False):
self.changed = True
def dump(self, check_mode=False):
result = {
'changed': self.changed,
'filename': self.path,
'privatekey': self.privatekey_path,
'csr': self.csr_path,
'ca_cert': self.ca_cert_path,
'ca_privatekey': self.ca_privatekey_path
}
if self.backup_file:
result['backup_file'] = self.backup_file
if check_mode:
result.update({
'notBefore': self.notBefore.strftime("%Y%m%d%H%M%SZ"),
'notAfter': self.notAfter.strftime("%Y%m%d%H%M%SZ"),
'serial_number': self.serial_number,
})
else:
result.update({
'notBefore': self.cert.not_valid_before.strftime("%Y%m%d%H%M%SZ"),
'notAfter': self.cert.not_valid_after.strftime("%Y%m%d%H%M%SZ"),
'serial_number': self.cert.serial_number,
})
return result
class OwnCACertificate(Certificate):
"""Generate the own CA certificate."""
def __init__(self, module):
super(OwnCACertificate, self).__init__(module, 'pyopenssl')
self.notBefore = self.get_relative_time_option(module.params['ownca_not_before'], 'ownca_not_before')
self.notAfter = self.get_relative_time_option(module.params['ownca_not_after'], 'ownca_not_after')
self.digest = module.params['ownca_digest']
self.version = module.params['ownca_version']
self.serial_number = randint(1000, 99999)
self.ca_cert_path = module.params['ownca_path']
self.ca_privatekey_path = module.params['ownca_privatekey_path']
self.ca_privatekey_passphrase = module.params['ownca_privatekey_passphrase']
self.csr = crypto_utils.load_certificate_request(self.csr_path)
self.ca_cert = crypto_utils.load_certificate(self.ca_cert_path)
try:
self.ca_privatekey = crypto_utils.load_privatekey(
self.ca_privatekey_path, self.ca_privatekey_passphrase
)
except crypto_utils.OpenSSLBadPassphraseError as exc:
module.fail_json(msg=str(exc))
def generate(self, module):
if not os.path.exists(self.ca_cert_path):
raise CertificateError(
'The CA certificate %s does not exist' % self.ca_cert_path
)
if not os.path.exists(self.ca_privatekey_path):
raise CertificateError(
'The CA private key %s does not exist' % self.ca_privatekey_path
)
if not os.path.exists(self.csr_path):
raise CertificateError(
'The certificate signing request file %s does not exist' % self.csr_path
)
if not self.check(module, perms_required=False) or self.force:
cert = crypto.X509()
cert.set_serial_number(self.serial_number)
cert.set_notBefore(to_bytes(self.notBefore))
cert.set_notAfter(to_bytes(self.notAfter))
cert.set_subject(self.csr.get_subject())
cert.set_issuer(self.ca_cert.get_subject())
cert.set_version(self.version - 1)
cert.set_pubkey(self.csr.get_pubkey())
cert.add_extensions(self.csr.get_extensions())
cert.sign(self.ca_privatekey, self.digest)
self.cert = cert
if self.backup:
self.backup_file = module.backup_local(self.path)
crypto_utils.write_file(module, crypto.dump_certificate(crypto.FILETYPE_PEM, self.cert))
self.changed = True
file_args = module.load_file_common_arguments(module.params)
if module.set_fs_attributes_if_different(file_args, False):
self.changed = True
def dump(self, check_mode=False):
result = {
'changed': self.changed,
'filename': self.path,
'privatekey': self.privatekey_path,
'csr': self.csr_path,
'ca_cert': self.ca_cert_path,
'ca_privatekey': self.ca_privatekey_path
}
if self.backup_file:
result['backup_file'] = self.backup_file
if check_mode:
result.update({
'notBefore': self.notBefore,
'notAfter': self.notAfter,
'serial_number': self.serial_number,
})
else:
result.update({
'notBefore': self.cert.get_notBefore(),
'notAfter': self.cert.get_notAfter(),
'serial_number': self.cert.get_serial_number(),
})
return result
def compare_sets(subset, superset, equality=False):
if equality:
return set(subset) == set(superset)
else:
return all(x in superset for x in subset)
def compare_dicts(subset, superset, equality=False):
if equality:
return subset == superset
else:
return all(superset.get(x) == v for x, v in subset.items())
NO_EXTENSION = 'no extension'
class AssertOnlyCertificateBase(Certificate):
def __init__(self, module, backend):
super(AssertOnlyCertificateBase, self).__init__(module, backend)
self.signature_algorithms = module.params['signature_algorithms']
if module.params['subject']:
self.subject = crypto_utils.parse_name_field(module.params['subject'])
else:
self.subject = []
self.subject_strict = module.params['subject_strict']
if module.params['issuer']:
self.issuer = crypto_utils.parse_name_field(module.params['issuer'])
else:
self.issuer = []
self.issuer_strict = module.params['issuer_strict']
self.has_expired = module.params['has_expired']
self.version = module.params['version']
self.key_usage = module.params['key_usage']
self.key_usage_strict = module.params['key_usage_strict']
self.extended_key_usage = module.params['extended_key_usage']
self.extended_key_usage_strict = module.params['extended_key_usage_strict']
self.subject_alt_name = module.params['subject_alt_name']
self.subject_alt_name_strict = module.params['subject_alt_name_strict']
self.not_before = module.params['not_before']
self.not_after = module.params['not_after']
self.valid_at = module.params['valid_at']
self.invalid_at = module.params['invalid_at']
self.valid_in = module.params['valid_in']
if self.valid_in and not self.valid_in.startswith("+") and not self.valid_in.startswith("-"):
try:
int(self.valid_in)
except ValueError:
module.fail_json(msg='The supplied value for "valid_in" (%s) is not an integer or a valid timespec' % self.valid_in)
self.valid_in = "+" + self.valid_in + "s"
# Load objects
self.cert = crypto_utils.load_certificate(self.path, backend=self.backend)
if self.privatekey_path is not None:
try:
self.privatekey = crypto_utils.load_privatekey(
self.privatekey_path,
self.privatekey_passphrase,
backend=self.backend
)
except crypto_utils.OpenSSLBadPassphraseError as exc:
raise CertificateError(exc)
if self.csr_path is not None:
self.csr = crypto_utils.load_certificate_request(self.csr_path, backend=self.backend)
@abc.abstractmethod
def _validate_privatekey(self):
pass
@abc.abstractmethod
def _validate_csr_signature(self):
pass
@abc.abstractmethod
def _validate_csr_subject(self):
pass
@abc.abstractmethod
def _validate_csr_extensions(self):
pass
@abc.abstractmethod
def _validate_signature_algorithms(self):
pass
@abc.abstractmethod
def _validate_subject(self):
pass
@abc.abstractmethod
def _validate_issuer(self):
pass
@abc.abstractmethod
def _validate_has_expired(self):
pass
@abc.abstractmethod
def _validate_version(self):
pass
@abc.abstractmethod
def _validate_key_usage(self):
pass
@abc.abstractmethod
def _validate_extended_key_usage(self):
pass
@abc.abstractmethod
def _validate_subject_alt_name(self):
pass
@abc.abstractmethod
def _validate_not_before(self):
pass
@abc.abstractmethod
def _validate_not_after(self):
pass
@abc.abstractmethod
def _validate_valid_at(self):
pass
@abc.abstractmethod
def _validate_invalid_at(self):
pass
@abc.abstractmethod
def _validate_valid_in(self):
pass
def assertonly(self, module):
messages = []
if self.privatekey_path is not None:
if not self._validate_privatekey():
messages.append(
'Certificate %s and private key %s do not match' %
(self.path, self.privatekey_path)
)
if self.csr_path is not None:
if not self._validate_csr_signature():
messages.append(
'Certificate %s and CSR %s do not match: private key mismatch' %
(self.path, self.csr_path)
)
if not self._validate_csr_subject():
messages.append(
'Certificate %s and CSR %s do not match: subject mismatch' %
(self.path, self.csr_path)
)
if not self._validate_csr_extensions():
messages.append(
'Certificate %s and CSR %s do not match: extensions mismatch' %
(self.path, self.csr_path)
)
if self.signature_algorithms is not None:
wrong_alg = self._validate_signature_algorithms()
if wrong_alg:
messages.append(
'Invalid signature algorithm (got %s, expected one of %s)' %
(wrong_alg, self.signature_algorithms)
)
if self.subject is not None:
failure = self._validate_subject()
if failure:
dummy, cert_subject = failure
messages.append(
'Invalid subject component (got %s, expected all of %s to be present)' %
(cert_subject, self.subject)
)
if self.issuer is not None:
failure = self._validate_issuer()
if failure:
dummy, cert_issuer = failure
messages.append(
'Invalid issuer component (got %s, expected all of %s to be present)' % (cert_issuer, self.issuer)
)
if self.has_expired is not None:
cert_expired = self._validate_has_expired()
if cert_expired != self.has_expired:
messages.append(
'Certificate expiration check failed (certificate expiration is %s, expected %s)' %
(cert_expired, self.has_expired)
)
if self.version is not None:
cert_version = self._validate_version()
if cert_version != self.version:
messages.append(
'Invalid certificate version number (got %s, expected %s)' %
(cert_version, self.version)
)
if self.key_usage is not None:
failure = self._validate_key_usage()
if failure == NO_EXTENSION:
messages.append('Found no keyUsage extension')
elif failure:
dummy, cert_key_usage = failure
messages.append(
'Invalid keyUsage components (got %s, expected all of %s to be present)' %
(cert_key_usage, self.key_usage)
)
if self.extended_key_usage is not None:
failure = self._validate_extended_key_usage()
if failure == NO_EXTENSION:
messages.append('Found no extendedKeyUsage extension')
elif failure:
dummy, ext_cert_key_usage = failure
messages.append(
'Invalid extendedKeyUsage component (got %s, expected all of %s to be present)' % (ext_cert_key_usage, self.extended_key_usage)
)
if self.subject_alt_name is not None:
failure = self._validate_subject_alt_name()
if failure == NO_EXTENSION:
messages.append('Found no subjectAltName extension')
elif failure:
dummy, cert_san = failure
messages.append(
'Invalid subjectAltName component (got %s, expected all of %s to be present)' %
(cert_san, self.subject_alt_name)
)
if self.not_before is not None:
cert_not_valid_before = self._validate_not_before()
if cert_not_valid_before != self.get_relative_time_option(self.not_before, 'not_before'):
messages.append(
'Invalid not_before component (got %s, expected %s to be present)' %
(cert_not_valid_before, self.not_before)
)
if self.not_after is not None:
cert_not_valid_after = self._validate_not_after()
if cert_not_valid_after != self.get_relative_time_option(self.not_after, 'not_after'):
messages.append(
'Invalid not_after component (got %s, expected %s to be present)' %
(cert_not_valid_after, self.not_after)
)
if self.valid_at is not None:
not_before, valid_at, not_after = self._validate_valid_at()
if not (not_before <= valid_at <= not_after):
messages.append(
'Certificate is not valid for the specified date (%s) - not_before: %s - not_after: %s' %
(self.valid_at, not_before, not_after)
)
if self.invalid_at is not None:
not_before, invalid_at, not_after = self._validate_invalid_at()
if (invalid_at <= not_before) or (invalid_at >= not_after):
messages.append(
'Certificate is not invalid for the specified date (%s) - not_before: %s - not_after: %s' %
(self.invalid_at, not_before, not_after)
)
if self.valid_in is not None:
not_before, valid_in, not_after = self._validate_valid_in()
if not not_before <= valid_in <= not_after:
messages.append(
'Certificate is not valid in %s from now (that would be %s) - not_before: %s - not_after: %s' %
(self.valid_in, valid_in, not_before, not_after)
)
return messages
def generate(self, module):
"""Don't generate anything - only assert"""
messages = self.assertonly(module)
if messages:
module.fail_json(msg=' | '.join(messages))
def check(self, module, perms_required=False):
"""Ensure the resource is in its desired state."""
messages = self.assertonly(module)
return len(messages) == 0
def dump(self, check_mode=False):
result = {
'changed': self.changed,
'filename': self.path,
'privatekey': self.privatekey_path,
'csr': self.csr_path,
}
return result
class AssertOnlyCertificateCryptography(AssertOnlyCertificateBase):
"""Validate the supplied cert, using the cryptography backend"""
def __init__(self, module):
super(AssertOnlyCertificateCryptography, self).__init__(module, 'cryptography')
def _validate_privatekey(self):
return self.cert.public_key().public_numbers() == self.privatekey.public_key().public_numbers()
def _validate_csr_signature(self):
if not self.csr.is_signature_valid:
return False
return self.csr.public_key().public_numbers() == self.cert.public_key().public_numbers()
def _validate_csr_subject(self):
return self.csr.subject == self.cert.subject
def _validate_csr_extensions(self):
cert_exts = self.cert.extensions
csr_exts = self.csr.extensions
if len(cert_exts) != len(csr_exts):
return False
for cert_ext in cert_exts:
try:
csr_ext = csr_exts.get_extension_for_oid(cert_ext.oid)
if cert_ext != csr_ext:
return False
except cryptography.x509.ExtensionNotFound as dummy:
return False
return True
def _validate_signature_algorithms(self):
if self.cert.signature_algorithm_oid._name not in self.signature_algorithms:
return self.cert.signature_algorithm_oid._name
def _validate_subject(self):
expected_subject = Name([NameAttribute(oid=crypto_utils.cryptography_name_to_oid(sub[0]), value=to_text(sub[1]))
for sub in self.subject])
cert_subject = self.cert.subject
if not compare_sets(expected_subject, cert_subject, self.subject_strict):
return expected_subject, cert_subject
def _validate_issuer(self):
expected_issuer = Name([NameAttribute(oid=crypto_utils.cryptography_name_to_oid(iss[0]), value=to_text(iss[1]))
for iss in self.issuer])
cert_issuer = self.cert.issuer
if not compare_sets(expected_issuer, cert_issuer, self.issuer_strict):
return self.issuer, cert_issuer
def _validate_has_expired(self):
cert_not_after = self.cert.not_valid_after
cert_expired = cert_not_after < datetime.datetime.utcnow()
return cert_expired
def _validate_version(self):
if self.cert.version == x509.Version.v1:
return 1
if self.cert.version == x509.Version.v3:
return 3
return "unknown"
def _validate_key_usage(self):
try:
current_key_usage = self.cert.extensions.get_extension_for_class(x509.KeyUsage).value
test_key_usage = dict(
digital_signature=current_key_usage.digital_signature,
content_commitment=current_key_usage.content_commitment,
key_encipherment=current_key_usage.key_encipherment,
data_encipherment=current_key_usage.data_encipherment,
key_agreement=current_key_usage.key_agreement,
key_cert_sign=current_key_usage.key_cert_sign,
crl_sign=current_key_usage.crl_sign,
encipher_only=False,
decipher_only=False
)
if test_key_usage['key_agreement']:
test_key_usage.update(dict(
encipher_only=current_key_usage.encipher_only,
decipher_only=current_key_usage.decipher_only
))
key_usages = crypto_utils.cryptography_parse_key_usage_params(self.key_usage)
if not compare_dicts(key_usages, test_key_usage, self.key_usage_strict):
return self.key_usage, [x for x in test_key_usage if x is True]
except cryptography.x509.ExtensionNotFound:
# This is only bad if the user specified a non-empty list
if self.key_usage:
return NO_EXTENSION
def _validate_extended_key_usage(self):
try:
current_ext_keyusage = self.cert.extensions.get_extension_for_class(x509.ExtendedKeyUsage).value
usages = [crypto_utils.cryptography_name_to_oid(usage) for usage in self.extended_key_usage]
expected_ext_keyusage = x509.ExtendedKeyUsage(usages)
if not compare_sets(expected_ext_keyusage, current_ext_keyusage, self.extended_key_usage_strict):
return [eku.value for eku in expected_ext_keyusage], [eku.value for eku in current_ext_keyusage]
except cryptography.x509.ExtensionNotFound:
# This is only bad if the user specified a non-empty list
if self.extended_key_usage:
return NO_EXTENSION
def _validate_subject_alt_name(self):
try:
current_san = self.cert.extensions.get_extension_for_class(x509.SubjectAlternativeName).value
expected_san = [crypto_utils.cryptography_get_name(san) for san in self.subject_alt_name]
if not compare_sets(expected_san, current_san, self.subject_alt_name_strict):
return self.subject_alt_name, current_san
except cryptography.x509.ExtensionNotFound:
# This is only bad if the user specified a non-empty list
if self.subject_alt_name:
return NO_EXTENSION
def _validate_not_before(self):
return self.cert.not_valid_before
def _validate_not_after(self):
return self.cert.not_valid_after
def _validate_valid_at(self):
rt = self.get_relative_time_option(self.valid_at, 'valid_at')
return self.cert.not_valid_before, rt, self.cert.not_valid_after
def _validate_invalid_at(self):
rt = self.get_relative_time_option(self.valid_at, 'valid_at')
return self.cert.not_valid_before, rt, self.cert.not_valid_after
def _validate_valid_in(self):
valid_in_date = self.get_relative_time_option(self.valid_in, "valid_in")
return self.cert.not_valid_before, valid_in_date, self.cert.not_valid_after
class AssertOnlyCertificate(AssertOnlyCertificateBase):
"""validate the supplied certificate."""
def __init__(self, module):
super(AssertOnlyCertificate, self).__init__(module, 'pyopenssl')
# Ensure inputs are properly sanitized before comparison.
for param in ['signature_algorithms', 'key_usage', 'extended_key_usage',
'subject_alt_name', 'subject', 'issuer', 'not_before',
'not_after', 'valid_at', 'invalid_at']:
attr = getattr(self, param)
if isinstance(attr, list) and attr:
if isinstance(attr[0], str):
setattr(self, param, [to_bytes(item) for item in attr])
elif isinstance(attr[0], tuple):
setattr(self, param, [(to_bytes(item[0]), to_bytes(item[1])) for item in attr])
elif isinstance(attr, tuple):
setattr(self, param, dict((to_bytes(k), to_bytes(v)) for (k, v) in attr.items()))
elif isinstance(attr, dict):
setattr(self, param, dict((to_bytes(k), to_bytes(v)) for (k, v) in attr.items()))
elif isinstance(attr, str):
setattr(self, param, to_bytes(attr))
def _validate_privatekey(self):
ctx = OpenSSL.SSL.Context(OpenSSL.SSL.TLSv1_2_METHOD)
ctx.use_privatekey(self.privatekey)
ctx.use_certificate(self.cert)
try:
ctx.check_privatekey()
return True
except OpenSSL.SSL.Error:
return False
def _validate_csr_signature(self):
try:
self.csr.verify(self.cert.get_pubkey())
except OpenSSL.crypto.Error:
return False
def _validate_csr_subject(self):
if self.csr.get_subject() != self.cert.get_subject():
return False
def _validate_csr_extensions(self):
csr_extensions = self.csr.get_extensions()
cert_extension_count = self.cert.get_extension_count()
if len(csr_extensions) != cert_extension_count:
return False
for extension_number in range(0, cert_extension_count):
cert_extension = self.cert.get_extension(extension_number)
csr_extension = filter(lambda extension: extension.get_short_name() == cert_extension.get_short_name(), csr_extensions)
if cert_extension.get_data() != list(csr_extension)[0].get_data():
return False
return True
def _validate_signature_algorithms(self):
if self.cert.get_signature_algorithm() not in self.signature_algorithms:
return self.cert.get_signature_algorithm()
def _validate_subject(self):
expected_subject = [(OpenSSL._util.lib.OBJ_txt2nid(sub[0]), sub[1]) for sub in self.subject]
cert_subject = self.cert.get_subject().get_components()
current_subject = [(OpenSSL._util.lib.OBJ_txt2nid(sub[0]), sub[1]) for sub in cert_subject]
if not compare_sets(expected_subject, current_subject, self.subject_strict):
return expected_subject, current_subject
def _validate_issuer(self):
expected_issuer = [(OpenSSL._util.lib.OBJ_txt2nid(iss[0]), iss[1]) for iss in self.issuer]
cert_issuer = self.cert.get_issuer().get_components()
current_issuer = [(OpenSSL._util.lib.OBJ_txt2nid(iss[0]), iss[1]) for iss in cert_issuer]
if not compare_sets(expected_issuer, current_issuer, self.issuer_strict):
return self.issuer, cert_issuer
def _validate_has_expired(self):
# The following 3 lines are the same as the current PyOpenSSL code for cert.has_expired().
# Older version of PyOpenSSL have a buggy implementation,
# to avoid issues with those we added the code from a more recent release here.
time_string = to_native(self.cert.get_notAfter())
not_after = datetime.datetime.strptime(time_string, "%Y%m%d%H%M%SZ")
cert_expired = not_after < datetime.datetime.utcnow()
return cert_expired
def _validate_version(self):
# Version numbers in certs are off by one:
# v1: 0, v2: 1, v3: 2 ...
return self.cert.get_version() + 1
def _validate_key_usage(self):
found = False
for extension_idx in range(0, self.cert.get_extension_count()):
extension = self.cert.get_extension(extension_idx)
if extension.get_short_name() == b'keyUsage':
found = True
key_usage = [OpenSSL._util.lib.OBJ_txt2nid(key_usage) for key_usage in self.key_usage]
current_ku = [OpenSSL._util.lib.OBJ_txt2nid(usage.strip()) for usage in
to_bytes(extension, errors='surrogate_or_strict').split(b',')]
if not compare_sets(key_usage, current_ku, self.key_usage_strict):
return self.key_usage, str(extension).split(', ')
if not found:
# This is only bad if the user specified a non-empty list
if self.key_usage:
return NO_EXTENSION
def _validate_extended_key_usage(self):
found = False
for extension_idx in range(0, self.cert.get_extension_count()):
extension = self.cert.get_extension(extension_idx)
if extension.get_short_name() == b'extendedKeyUsage':
found = True
extKeyUsage = [OpenSSL._util.lib.OBJ_txt2nid(keyUsage) for keyUsage in self.extended_key_usage]
current_xku = [OpenSSL._util.lib.OBJ_txt2nid(usage.strip()) for usage in
to_bytes(extension, errors='surrogate_or_strict').split(b',')]
if not compare_sets(extKeyUsage, current_xku, self.extended_key_usage_strict):
return self.extended_key_usage, str(extension).split(', ')
if not found:
# This is only bad if the user specified a non-empty list
if self.extended_key_usage:
return NO_EXTENSION
def _normalize_san(self, san):
# Apparently OpenSSL returns 'IP address' not 'IP' as specifier when converting the subjectAltName to string
# although it won't accept this specifier when generating the CSR. (https://github.com/openssl/openssl/issues/4004)
if san.startswith('IP Address:'):
san = 'IP:' + san[len('IP Address:'):]
if san.startswith('IP:'):
ip = compat_ipaddress.ip_address(san[3:])
san = 'IP:{0}'.format(ip.compressed)
return san
def _validate_subject_alt_name(self):
found = False
for extension_idx in range(0, self.cert.get_extension_count()):
extension = self.cert.get_extension(extension_idx)
if extension.get_short_name() == b'subjectAltName':
found = True
l_altnames = [self._normalize_san(altname.strip()) for altname in
to_text(extension, errors='surrogate_or_strict').split(', ')]
sans = [self._normalize_san(to_text(san, errors='surrogate_or_strict')) for san in self.subject_alt_name]
if not compare_sets(sans, l_altnames, self.subject_alt_name_strict):
return self.subject_alt_name, l_altnames
if not found:
# This is only bad if the user specified a non-empty list
if self.subject_alt_name:
return NO_EXTENSION
def _validate_not_before(self):
return self.cert.get_notBefore()
def _validate_not_after(self):
return self.cert.get_notAfter()
def _validate_valid_at(self):
return self.cert.get_notBefore(), self.valid_at, self.cert.get_notAfter()
def _validate_invalid_at(self):
return self.cert.get_notBefore(), self.valid_at, self.cert.get_notAfter()
def _validate_valid_in(self):
valid_in_asn1 = self.get_relative_time_option(self.valid_in, "valid_in")
valid_in_date = to_bytes(valid_in_asn1, errors='surrogate_or_strict')
return self.cert.get_notBefore(), valid_in_date, self.cert.get_notAfter()
class AcmeCertificate(Certificate):
"""Retrieve a certificate using the ACME protocol."""
# Since there's no real use of the backend,
# other than the 'self.check' function, we just pass the backend to the constructor
def __init__(self, module, backend):
super(AcmeCertificate, self).__init__(module, backend)
self.accountkey_path = module.params['acme_accountkey_path']
self.challenge_path = module.params['acme_challenge_path']
self.use_chain = module.params['acme_chain']
def generate(self, module):
if not os.path.exists(self.privatekey_path):
raise CertificateError(
'The private key %s does not exist' % self.privatekey_path
)
if not os.path.exists(self.csr_path):
raise CertificateError(
'The certificate signing request file %s does not exist' % self.csr_path
)
if not os.path.exists(self.accountkey_path):
raise CertificateError(
'The account key %s does not exist' % self.accountkey_path
)
if not os.path.exists(self.challenge_path):
raise CertificateError(
'The challenge path %s does not exist' % self.challenge_path
)
if not self.check(module, perms_required=False) or self.force:
acme_tiny_path = self.module.get_bin_path('acme-tiny', required=True)
command = [acme_tiny_path]
if self.use_chain:
command.append('--chain')
command.extend(['--account-key', self.accountkey_path])
command.extend(['--csr', self.csr_path])
command.extend(['--acme-dir', self.challenge_path])
try:
crt = module.run_command(command, check_rc=True)[1]
if self.backup:
self.backup_file = module.backup_local(self.path)
crypto_utils.write_file(module, to_bytes(crt))
self.changed = True
except OSError as exc:
raise CertificateError(exc)
file_args = module.load_file_common_arguments(module.params)
if module.set_fs_attributes_if_different(file_args, False):
self.changed = True
def dump(self, check_mode=False):
result = {
'changed': self.changed,
'filename': self.path,
'privatekey': self.privatekey_path,
'accountkey': self.accountkey_path,
'csr': self.csr_path,
}
if self.backup_file:
result['backup_file'] = self.backup_file
return result
def main():
module = AnsibleModule(
argument_spec=dict(
state=dict(type='str', default='present', choices=['present', 'absent']),
path=dict(type='path', required=True),
provider=dict(type='str', choices=['acme', 'assertonly', 'ownca', 'selfsigned']),
force=dict(type='bool', default=False,),
csr_path=dict(type='path'),
backup=dict(type='bool', default=False),
select_crypto_backend=dict(type='str', default='auto', choices=['auto', 'cryptography', 'pyopenssl']),
# General properties of a certificate
privatekey_path=dict(type='path'),
privatekey_passphrase=dict(type='str', no_log=True),
# provider: assertonly
signature_algorithms=dict(type='list', elements='str'),
subject=dict(type='dict'),
subject_strict=dict(type='bool', default=False),
issuer=dict(type='dict'),
issuer_strict=dict(type='bool', default=False),
has_expired=dict(type='bool', default=False),
version=dict(type='int'),
key_usage=dict(type='list', elements='str', aliases=['keyUsage']),
key_usage_strict=dict(type='bool', default=False, aliases=['keyUsage_strict']),
extended_key_usage=dict(type='list', elements='str', aliases=['extendedKeyUsage']),
extended_key_usage_strict=dict(type='bool', default=False, aliases=['extendedKeyUsage_strict']),
subject_alt_name=dict(type='list', elements='str', aliases=['subjectAltName']),
subject_alt_name_strict=dict(type='bool', default=False, aliases=['subjectAltName_strict']),
not_before=dict(type='str', aliases=['notBefore']),
not_after=dict(type='str', aliases=['notAfter']),
valid_at=dict(type='str'),
invalid_at=dict(type='str'),
valid_in=dict(type='str'),
# provider: selfsigned
selfsigned_version=dict(type='int', default=3),
selfsigned_digest=dict(type='str', default='sha256'),
selfsigned_not_before=dict(type='str', default='+0s', aliases=['selfsigned_notBefore']),
selfsigned_not_after=dict(type='str', default='+3650d', aliases=['selfsigned_notAfter']),
# provider: ownca
ownca_path=dict(type='path'),
ownca_privatekey_path=dict(type='path'),
ownca_privatekey_passphrase=dict(type='str', no_log=True),
ownca_digest=dict(type='str', default='sha256'),
ownca_version=dict(type='int', default=3),
ownca_not_before=dict(type='str', default='+0s'),
ownca_not_after=dict(type='str', default='+3650d'),
# provider: acme
acme_accountkey_path=dict(type='path'),
acme_challenge_path=dict(type='path'),
acme_chain=dict(type='bool', default=False),
),
supports_check_mode=True,
add_file_common_args=True,
)
try:
if module.params['state'] == 'absent':
certificate = CertificateAbsent(module)
else:
if module.params['provider'] != 'assertonly' and module.params['csr_path'] is None:
module.fail_json(msg='csr_path is required when provider is not assertonly')
base_dir = os.path.dirname(module.params['path']) or '.'
if not os.path.isdir(base_dir):
module.fail_json(
name=base_dir,
msg='The directory %s does not exist or the file is not a directory' % base_dir
)
provider = module.params['provider']
backend = module.params['select_crypto_backend']
if backend == 'auto':
# Detect what backend we can use
can_use_cryptography = CRYPTOGRAPHY_FOUND and CRYPTOGRAPHY_VERSION >= LooseVersion(MINIMAL_CRYPTOGRAPHY_VERSION)
can_use_pyopenssl = PYOPENSSL_FOUND and PYOPENSSL_VERSION >= LooseVersion(MINIMAL_PYOPENSSL_VERSION)
# If cryptography is available we'll use it
if can_use_cryptography:
backend = 'cryptography'
elif can_use_pyopenssl:
backend = 'pyopenssl'
if module.params['selfsigned_version'] == 2 or module.params['ownca_version'] == 2:
module.warn('crypto backend forced to pyopenssl. The cryptography library does not support v2 certificates')
backend = 'pyopenssl'
# Fail if no backend has been found
if backend == 'auto':
module.fail_json(msg=("Can't detect any of the required Python libraries "
"cryptography (>= {0}) or PyOpenSSL (>= {1})").format(
MINIMAL_CRYPTOGRAPHY_VERSION,
MINIMAL_PYOPENSSL_VERSION))
if backend == 'pyopenssl':
if not PYOPENSSL_FOUND:
module.fail_json(msg=missing_required_lib('pyOpenSSL >= {0}'.format(MINIMAL_PYOPENSSL_VERSION)),
exception=PYOPENSSL_IMP_ERR)
if module.params['provider'] in ['selfsigned', 'ownca', 'assertonly']:
try:
getattr(crypto.X509Req, 'get_extensions')
except AttributeError:
module.fail_json(msg='You need to have PyOpenSSL>=0.15')
if provider == 'selfsigned':
certificate = SelfSignedCertificate(module)
elif provider == 'acme':
certificate = AcmeCertificate(module, 'pyopenssl')
elif provider == 'ownca':
certificate = OwnCACertificate(module)
else:
certificate = AssertOnlyCertificate(module)
elif backend == 'cryptography':
if not CRYPTOGRAPHY_FOUND:
module.fail_json(msg=missing_required_lib('cryptography >= {0}'.format(MINIMAL_CRYPTOGRAPHY_VERSION)),
exception=CRYPTOGRAPHY_IMP_ERR)
if module.params['selfsigned_version'] == 2 or module.params['ownca_version'] == 2:
module.fail_json(msg='The cryptography backend does not support v2 certificates, '
'use select_crypto_backend=pyopenssl for v2 certificates')
if provider == 'selfsigned':
certificate = SelfSignedCertificateCryptography(module)
elif provider == 'acme':
certificate = AcmeCertificate(module, 'cryptography')
elif provider == 'ownca':
certificate = OwnCACertificateCryptography(module)
else:
certificate = AssertOnlyCertificateCryptography(module)
if module.params['state'] == 'present':
if module.check_mode:
result = certificate.dump(check_mode=True)
result['changed'] = module.params['force'] or not certificate.check(module)
module.exit_json(**result)
certificate.generate(module)
else:
if module.check_mode:
result = certificate.dump(check_mode=True)
result['changed'] = os.path.exists(module.params['path'])
module.exit_json(**result)
certificate.remove(module)
result = certificate.dump()
module.exit_json(**result)
except crypto_utils.OpenSSLObjectError as exc:
module.fail_json(msg=to_native(exc))
if __name__ == "__main__":
main()<|fim▁end|> | description:
- The I(extended_key_usage) extension field must contain all these values. |
<|file_name|>bitcoin_it.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" ?><!DOCTYPE TS><TS language="it" version="2.0">
<defaultcodec>UTF-8</defaultcodec>
<context>
<name>AboutDialog</name>
<message>
<location filename="../forms/aboutdialog.ui" line="+14"/>
<source>About MyBroCoin</source>
<translation>Info su MyBroCoin</translation>
</message>
<message>
<location line="+39"/>
<source><b>MyBroCoin</b> version</source>
<translation>Versione di <b>MyBroCoin</b></translation>
</message>
<message>
<location line="+57"/>
<source>
This is experimental software.
Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php.
This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young ([email protected]) and UPnP software written by Thomas Bernard.</source>
<translation>
Questo è un software sperimentale.
Distribuito sotto la licenza software MIT/X11, vedi il file COPYING incluso oppure su http://www.opensource.org/licenses/mit-license.php.
Questo prodotto include software sviluppato dal progetto OpenSSL per l'uso del Toolkit OpenSSL (http://www.openssl.org/), software crittografico scritto da Eric Young ([email protected]) e software UPnP scritto da Thomas Bernard.</translation>
</message>
<message>
<location filename="../aboutdialog.cpp" line="+14"/>
<source>Copyright</source>
<translation>Copyright</translation>
</message>
<message>
<location line="+0"/>
<source>The MyBroCoin developers</source>
<translation>Sviluppatori di MyBroCoin</translation>
</message>
</context>
<context>
<name>AddressBookPage</name>
<message>
<location filename="../forms/addressbookpage.ui" line="+14"/>
<source>Address Book</source>
<translation>Rubrica</translation>
</message>
<message>
<location line="+19"/>
<source>Double-click to edit address or label</source>
<translation>Fai doppio click per modificare o cancellare l'etichetta</translation>
</message>
<message>
<location line="+27"/>
<source>Create a new address</source>
<translation>Crea un nuovo indirizzo</translation>
</message>
<message>
<location line="+14"/>
<source>Copy the currently selected address to the system clipboard</source>
<translation>Copia l'indirizzo attualmente selezionato nella clipboard</translation>
</message>
<message>
<location line="-11"/>
<source>&New Address</source>
<translation>&Nuovo indirizzo</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="+63"/>
<source>These are your MyBroCoin addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source>
<translation>Questi sono i tuoi indirizzi MyBroCoin per ricevere pagamenti. Potrai darne uno diverso ad ognuno per tenere così traccia di chi ti sta pagando.</translation>
</message>
<message>
<location filename="../forms/addressbookpage.ui" line="+14"/>
<source>&Copy Address</source>
<translation>&Copia l'indirizzo</translation>
</message>
<message>
<location line="+11"/>
<source>Show &QR Code</source>
<translation>Mostra il codice &QR</translation>
</message>
<message>
<location line="+11"/>
<source>Sign a message to prove you own a MyBroCoin address</source>
<translation>Firma un messaggio per dimostrare di possedere questo indirizzo</translation>
</message>
<message>
<location line="+3"/>
<source>Sign &Message</source>
<translation>Firma il &messaggio</translation>
</message>
<message>
<location line="+25"/>
<source>Delete the currently selected address from the list</source>
<translation>Cancella l'indirizzo attualmente selezionato dalla lista</translation>
</message>
<message>
<location line="+27"/>
<source>Export the data in the current tab to a file</source>
<translation>Esporta i dati nella tabella corrente su un file</translation>
</message>
<message>
<location line="+3"/>
<source>&Export</source>
<translation>&Esporta...</translation>
</message>
<message>
<location line="-44"/>
<source>Verify a message to ensure it was signed with a specified MyBroCoin address</source>
<translation>Verifica un messaggio per accertarsi che sia firmato con un indirizzo MyBroCoin specifico</translation>
</message>
<message>
<location line="+3"/>
<source>&Verify Message</source>
<translation>&Verifica Messaggio</translation>
</message>
<message>
<location line="+14"/>
<source>&Delete</source>
<translation>&Cancella</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="-5"/>
<source>These are your MyBroCoin addresses for sending payments. Always check the amount and the receiving address before sending coins.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Copy &Label</source>
<translation>Copia &l'etichetta</translation>
</message>
<message>
<location line="+1"/>
<source>&Edit</source>
<translation>&Modifica</translation>
</message>
<message>
<location line="+1"/>
<source>Send &Coins</source>
<translation>Invia &MyBroCoin</translation>
</message>
<message>
<location line="+260"/>
<source>Export Address Book Data</source>
<translation>Esporta gli indirizzi della rubrica</translation>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation>Testo CSV (*.csv)</translation>
</message>
<message>
<location line="+13"/>
<source>Error exporting</source>
<translation>Errore nell'esportazione</translation>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation>Impossibile scrivere sul file %1.</translation>
</message>
</context>
<context>
<name>AddressTableModel</name>
<message>
<location filename="../addresstablemodel.cpp" line="+144"/>
<source>Label</source>
<translation>Etichetta</translation>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation>Indirizzo</translation>
</message>
<message>
<location line="+36"/>
<source>(no label)</source>
<translation>(nessuna etichetta)</translation>
</message>
</context>
<context>
<name>AskPassphraseDialog</name>
<message>
<location filename="../forms/askpassphrasedialog.ui" line="+26"/>
<source>Passphrase Dialog</source>
<translation>Finestra passphrase</translation>
</message>
<message>
<location line="+21"/>
<source>Enter passphrase</source>
<translation>Inserisci la passphrase</translation>
</message>
<message>
<location line="+14"/>
<source>New passphrase</source>
<translation>Nuova passphrase</translation>
</message>
<message>
<location line="+14"/>
<source>Repeat new passphrase</source>
<translation>Ripeti la passphrase</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="+33"/>
<source>Enter the new passphrase to the wallet.<br/>Please use a passphrase of <b>10 or more random characters</b>, or <b>eight or more words</b>.</source>
<translation>Inserisci la passphrase per il portamonete.<br/>Per piacere usare unapassphrase di <b>10 o più caratteri casuali</b>, o <b>otto o più parole</b>.</translation>
</message>
<message>
<location line="+1"/>
<source>Encrypt wallet</source>
<translation>Cifra il portamonete</translation>
</message>
<message>
<location line="+3"/>
<source>This operation needs your wallet passphrase to unlock the wallet.</source>
<translation>Quest'operazione necessita della passphrase per sbloccare il portamonete.</translation>
</message>
<message>
<location line="+5"/>
<source>Unlock wallet</source>
<translation>Sblocca il portamonete</translation>
</message>
<message>
<location line="+3"/>
<source>This operation needs your wallet passphrase to decrypt the wallet.</source>
<translation>Quest'operazione necessita della passphrase per decifrare il portamonete,</translation>
</message>
<message>
<location line="+5"/>
<source>Decrypt wallet</source>
<translation>Decifra il portamonete</translation>
</message>
<message>
<location line="+3"/>
<source>Change passphrase</source>
<translation>Cambia la passphrase</translation>
</message>
<message>
<location line="+1"/>
<source>Enter the old and new passphrase to the wallet.</source>
<translation>Inserisci la vecchia e la nuova passphrase per il portamonete.</translation>
</message>
<message>
<location line="+46"/>
<source>Confirm wallet encryption</source>
<translation>Conferma la cifratura del portamonete</translation>
</message>
<message>
<location line="+1"/>
<source>Warning: If you encrypt your wallet and lose your passphrase, you will <b>LOSE ALL OF YOUR LITECOINS</b>!</source>
<translation>Attenzione: se si cifra il portamonete e si perde la frase d'ordine, <b>SI PERDERANNO TUTTI I PROPRI LITECOIN</b>!</translation>
</message>
<message>
<location line="+0"/>
<source>Are you sure you wish to encrypt your wallet?</source>
<translation>Si è sicuri di voler cifrare il portamonete?</translation>
</message>
<message>
<location line="+15"/>
<source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source>
<translation>IMPORTANTE: qualsiasi backup del portafoglio effettuato precedentemente dovrebbe essere sostituito con il file del portafoglio criptato appena generato. Per ragioni di sicurezza, i backup precedenti del file del portafoglio non criptato diventeranno inservibili non appena si inizi ad usare il nuovo portafoglio criptato.</translation>
</message>
<message>
<location line="+100"/>
<location line="+24"/>
<source>Warning: The Caps Lock key is on!</source>
<translation>Attenzione: tasto Blocco maiuscole attivo.</translation>
</message>
<message>
<location line="-130"/>
<location line="+58"/>
<source>Wallet encrypted</source>
<translation>Portamonete cifrato</translation>
</message>
<message>
<location line="-56"/>
<source>MyBroCoin will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your mariobroscoins from being stolen by malware infecting your computer.</source>
<translation>MyBroCoin verrà ora chiuso per finire il processo di crittazione. Ricorda che criptare il tuo portamonete non può fornire una protezione totale contro furti causati da malware che dovessero infettare il tuo computer.</translation>
</message>
<message>
<location line="+13"/>
<location line="+7"/>
<location line="+42"/>
<location line="+6"/>
<source>Wallet encryption failed</source>
<translation>Cifratura del portamonete fallita</translation>
</message>
<message>
<location line="-54"/>
<source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source>
<translation>Cifratura del portamonete fallita a causa di un errore interno. Il portamonete non è stato cifrato.</translation>
</message>
<message>
<location line="+7"/>
<location line="+48"/>
<source>The supplied passphrases do not match.</source>
<translation>Le passphrase inserite non corrispondono.</translation>
</message>
<message>
<location line="-37"/>
<source>Wallet unlock failed</source>
<translation>Sblocco del portamonete fallito</translation>
</message>
<message>
<location line="+1"/>
<location line="+11"/>
<location line="+19"/>
<source>The passphrase entered for the wallet decryption was incorrect.</source>
<translation>La passphrase inserita per la decifrazione del portamonete è errata.</translation>
</message>
<message>
<location line="-20"/>
<source>Wallet decryption failed</source>
<translation>Decifrazione del portamonete fallita</translation>
</message>
<message>
<location line="+14"/>
<source>Wallet passphrase was successfully changed.</source>
<translation>Passphrase del portamonete modificata con successo.</translation>
</message>
</context>
<context>
<name>BitcoinGUI</name>
<message>
<location filename="../bitcoingui.cpp" line="+233"/>
<source>Sign &message...</source>
<translation>Firma il &messaggio...</translation>
</message>
<message>
<location line="+280"/>
<source>Synchronizing with network...</source>
<translation>Sto sincronizzando con la rete...</translation>
</message>
<message>
<location line="-349"/>
<source>&Overview</source>
<translation>&Sintesi</translation>
</message>
<message>
<location line="+1"/>
<source>Show general overview of wallet</source>
<translation>Mostra lo stato generale del portamonete</translation>
</message>
<message>
<location line="+20"/>
<source>&Transactions</source>
<translation>&Transazioni</translation>
</message>
<message>
<location line="+1"/>
<source>Browse transaction history</source>
<translation>Cerca nelle transazioni</translation>
</message>
<message>
<location line="+7"/>
<source>Edit the list of stored addresses and labels</source>
<translation>Modifica la lista degli indirizzi salvati e delle etichette</translation>
</message>
<message>
<location line="-14"/>
<source>Show the list of addresses for receiving payments</source>
<translation>Mostra la lista di indirizzi su cui ricevere pagamenti</translation>
</message>
<message>
<location line="+31"/>
<source>E&xit</source>
<translation>&Esci</translation>
</message>
<message>
<location line="+1"/>
<source>Quit application</source>
<translation>Chiudi applicazione</translation>
</message>
<message>
<location line="+4"/>
<source>Show information about MyBroCoin</source>
<translation>Mostra informazioni su MyBroCoin</translation>
</message>
<message>
<location line="+2"/>
<source>About &Qt</source>
<translation>Informazioni su &Qt</translation>
</message>
<message>
<location line="+1"/>
<source>Show information about Qt</source>
<translation>Mostra informazioni su Qt</translation>
</message>
<message>
<location line="+2"/>
<source>&Options...</source>
<translation>&Opzioni...</translation>
</message>
<message>
<location line="+6"/>
<source>&Encrypt Wallet...</source>
<translation>&Cifra il portamonete...</translation>
</message>
<message>
<location line="+3"/>
<source>&Backup Wallet...</source>
<translation>&Backup Portamonete...</translation>
</message>
<message>
<location line="+2"/>
<source>&Change Passphrase...</source>
<translation>&Cambia la passphrase...</translation>
</message>
<message>
<location line="+285"/>
<source>Importing blocks from disk...</source>
<translation>Importa blocchi dal disco...</translation>
</message>
<message>
<location line="+3"/>
<source>Reindexing blocks on disk...</source>
<translation>Re-indicizzazione blocchi su disco...</translation>
</message>
<message>
<location line="-347"/>
<source>Send coins to a MyBroCoin address</source>
<translation>Invia monete ad un indirizzo mariobroscoin</translation>
</message>
<message>
<location line="+49"/>
<source>Modify configuration options for MyBroCoin</source>
<translation>Modifica configurazione opzioni per mariobroscoin</translation>
</message>
<message>
<location line="+9"/>
<source>Backup wallet to another location</source>
<translation>Backup portamonete in un'altra locazione</translation>
</message>
<message>
<location line="+2"/>
<source>Change the passphrase used for wallet encryption</source>
<translation>Cambia la passphrase per la cifratura del portamonete</translation>
</message>
<message>
<location line="+6"/>
<source>&Debug window</source>
<translation>Finestra &Debug</translation>
</message>
<message>
<location line="+1"/>
<source>Open debugging and diagnostic console</source>
<translation>Apri la console di degugging e diagnostica</translation>
</message>
<message>
<location line="-4"/>
<source>&Verify message...</source>
<translation>&Verifica messaggio...</translation>
</message>
<message>
<location line="-165"/>
<location line="+530"/>
<source>MyBroCoin</source>
<translation>MyBroCoin</translation>
</message>
<message>
<location line="-530"/>
<source>Wallet</source>
<translation>Portamonete</translation>
</message>
<message>
<location line="+101"/>
<source>&Send</source>
<translation>&Spedisci</translation>
</message>
<message>
<location line="+7"/>
<source>&Receive</source>
<translation>&Ricevi</translation>
</message>
<message>
<location line="+14"/>
<source>&Addresses</source>
<translation>&Indirizzi</translation>
</message>
<message>
<location line="+22"/>
<source>&About MyBroCoin</source>
<translation>&Info su MyBroCoin</translation>
</message>
<message>
<location line="+9"/>
<source>&Show / Hide</source>
<translation>&Mostra/Nascondi</translation>
</message>
<message>
<location line="+1"/>
<source>Show or hide the main Window</source>
<translation>Mostra o nascondi la Finestra principale</translation>
</message>
<message>
<location line="+3"/>
<source>Encrypt the private keys that belong to your wallet</source>
<translation>Crittografa le chiavi private che appartengono al tuo portafoglio</translation>
</message>
<message>
<location line="+7"/>
<source>Sign messages with your MyBroCoin addresses to prove you own them</source>
<translation>Firma i messaggi con il tuo indirizzo MyBroCoin per dimostrare di possederli</translation>
</message>
<message>
<location line="+2"/>
<source>Verify messages to ensure they were signed with specified MyBroCoin addresses</source>
<translation>Verifica i messaggi per accertarsi che siano stati firmati con gli indirizzi MyBroCoin specificati</translation>
</message>
<message>
<location line="+28"/>
<source>&File</source>
<translation>&File</translation>
</message>
<message>
<location line="+7"/>
<source>&Settings</source>
<translation>&Impostazioni</translation>
</message>
<message>
<location line="+6"/>
<source>&Help</source>
<translation>&Aiuto</translation>
</message>
<message>
<location line="+9"/>
<source>Tabs toolbar</source>
<translation>Barra degli strumenti "Tabs"</translation>
</message>
<message>
<location line="+17"/>
<location line="+10"/>
<source>[testnet]</source>
<translation>[testnet]</translation>
</message>
<message>
<location line="+47"/>
<source>MyBroCoin client</source>
<translation>MyBroCoin client</translation>
</message>
<message numerus="yes">
<location line="+141"/>
<source>%n active connection(s) to MyBroCoin network</source>
<translation><numerusform>%n connessione attiva alla rete MyBroCoin</numerusform><numerusform>%n connessioni attive alla rete MyBroCoin</numerusform></translation>
</message>
<message>
<location line="+22"/>
<source>No block source available...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+12"/>
<source>Processed %1 of %2 (estimated) blocks of transaction history.</source>
<translation>Processati %1 di %2 (circa) blocchi della cronologia transazioni.</translation>
</message>
<message>
<location line="+4"/>
<source>Processed %1 blocks of transaction history.</source>
<translation>Processati %1 blocchi della cronologia transazioni.</translation>
</message>
<message numerus="yes">
<location line="+20"/>
<source>%n hour(s)</source>
<translation><numerusform>%n ora</numerusform><numerusform>%n ore</numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n day(s)</source>
<translation><numerusform>%n giorno</numerusform><numerusform>%n giorni</numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n week(s)</source>
<translation><numerusform>%n settimana</numerusform><numerusform>%n settimane</numerusform></translation>
</message>
<message>
<location line="+4"/>
<source>%1 behind</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Last received block was generated %1 ago.</source>
<translation>L'ultimo blocco ricevuto è stato generato %1 fa.</translation>
</message>
<message>
<location line="+2"/>
<source>Transactions after this will not yet be visible.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+22"/>
<source>Error</source>
<translation>Errore</translation>
</message>
<message>
<location line="+3"/>
<source>Warning</source>
<translation>Attenzione</translation>
</message>
<message>
<location line="+3"/>
<source>Information</source>
<translation>Informazione</translation>
</message>
<message>
<location line="+70"/>
<source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source>
<translation>Questa transazione è superiore al limite di dimensione. È comunque possibile inviarla con una commissione di %1, che va ai nodi che processano la tua transazione e contribuisce a sostenere la rete. Vuoi pagare la commissione?</translation>
</message>
<message>
<location line="-140"/>
<source>Up to date</source>
<translation>Aggiornato</translation>
</message>
<message>
<location line="+31"/>
<source>Catching up...</source>
<translation>In aggiornamento...</translation>
</message>
<message>
<location line="+113"/>
<source>Confirm transaction fee</source>
<translation>Conferma compenso transazione</translation>
</message>
<message>
<location line="+8"/>
<source>Sent transaction</source>
<translation>Transazione inviata</translation>
</message>
<message>
<location line="+0"/>
<source>Incoming transaction</source>
<translation>Transazione ricevuta</translation>
</message>
<message>
<location line="+1"/>
<source>Date: %1
Amount: %2
Type: %3
Address: %4
</source>
<translation>Data: %1
Quantità: %2
Tipo: %3
Indirizzo: %4
</translation>
</message>
<message>
<location line="+33"/>
<location line="+23"/>
<source>URI handling</source>
<translation>Gestione URI</translation>
</message>
<message>
<location line="-23"/>
<location line="+23"/>
<source>URI can not be parsed! This can be caused by an invalid MyBroCoin address or malformed URI parameters.</source>
<translation>Impossibile interpretare l'URI! Ciò può essere causato da un indirizzo MyBroCoin invalido o da parametri URI non corretti.</translation>
</message>
<message>
<location line="+17"/>
<source>Wallet is <b>encrypted</b> and currently <b>unlocked</b></source>
<translation>Il portamonete è <b>cifrato</b> e attualmente <b>sbloccato</b></translation>
</message>
<message><|fim▁hole|> </message>
<message>
<location filename="../bitcoin.cpp" line="+111"/>
<source>A fatal error occurred. MyBroCoin can no longer continue safely and will quit.</source>
<translation>Riscontrato un errore irreversibile. MyBroCoin non può più continuare in sicurezza e verrà terminato.</translation>
</message>
</context>
<context>
<name>ClientModel</name>
<message>
<location filename="../clientmodel.cpp" line="+104"/>
<source>Network Alert</source>
<translation>Avviso di rete</translation>
</message>
</context>
<context>
<name>EditAddressDialog</name>
<message>
<location filename="../forms/editaddressdialog.ui" line="+14"/>
<source>Edit Address</source>
<translation>Modifica l'indirizzo</translation>
</message>
<message>
<location line="+11"/>
<source>&Label</source>
<translation>&Etichetta</translation>
</message>
<message>
<location line="+10"/>
<source>The label associated with this address book entry</source>
<translation>L'etichetta associata a questo indirizzo nella rubrica</translation>
</message>
<message>
<location line="+7"/>
<source>&Address</source>
<translation>&Indirizzo</translation>
</message>
<message>
<location line="+10"/>
<source>The address associated with this address book entry. This can only be modified for sending addresses.</source>
<translation>L'indirizzo associato a questa voce della rubrica. Si può modificare solo negli indirizzi di spedizione.</translation>
</message>
<message>
<location filename="../editaddressdialog.cpp" line="+21"/>
<source>New receiving address</source>
<translation>Nuovo indirizzo di ricezione</translation>
</message>
<message>
<location line="+4"/>
<source>New sending address</source>
<translation>Nuovo indirizzo d'invio</translation>
</message>
<message>
<location line="+3"/>
<source>Edit receiving address</source>
<translation>Modifica indirizzo di ricezione</translation>
</message>
<message>
<location line="+4"/>
<source>Edit sending address</source>
<translation>Modifica indirizzo d'invio</translation>
</message>
<message>
<location line="+76"/>
<source>The entered address "%1" is already in the address book.</source>
<translation>L'indirizzo inserito "%1" è già in rubrica.</translation>
</message>
<message>
<location line="-5"/>
<source>The entered address "%1" is not a valid MyBroCoin address.</source>
<translation>L'indirizzo inserito "%1" non è un indirizzo mariobroscoin valido.</translation>
</message>
<message>
<location line="+10"/>
<source>Could not unlock wallet.</source>
<translation>Impossibile sbloccare il portamonete.</translation>
</message>
<message>
<location line="+5"/>
<source>New key generation failed.</source>
<translation>Generazione della nuova chiave non riuscita.</translation>
</message>
</context>
<context>
<name>GUIUtil::HelpMessageBox</name>
<message>
<location filename="../guiutil.cpp" line="+424"/>
<location line="+12"/>
<source>MyBroCoin-Qt</source>
<translation>MyBroCoin-Qt</translation>
</message>
<message>
<location line="-12"/>
<source>version</source>
<translation>versione</translation>
</message>
<message>
<location line="+2"/>
<source>Usage:</source>
<translation>Utilizzo:</translation>
</message>
<message>
<location line="+1"/>
<source>command-line options</source>
<translation>opzioni riga di comando</translation>
</message>
<message>
<location line="+4"/>
<source>UI options</source>
<translation>UI opzioni</translation>
</message>
<message>
<location line="+1"/>
<source>Set language, for example "de_DE" (default: system locale)</source>
<translation>Imposta lingua, ad esempio "it_IT" (predefinita: lingua di sistema)</translation>
</message>
<message>
<location line="+1"/>
<source>Start minimized</source>
<translation>Parti in icona
</translation>
</message>
<message>
<location line="+1"/>
<source>Show splash screen on startup (default: 1)</source>
<translation>Mostra finestra di presentazione all'avvio (default: 1)</translation>
</message>
</context>
<context>
<name>OptionsDialog</name>
<message>
<location filename="../forms/optionsdialog.ui" line="+14"/>
<source>Options</source>
<translation>Opzioni</translation>
</message>
<message>
<location line="+16"/>
<source>&Main</source>
<translation>&Principale</translation>
</message>
<message>
<location line="+6"/>
<source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Pay transaction &fee</source>
<translation>Paga la &commissione</translation>
</message>
<message>
<location line="+31"/>
<source>Automatically start MyBroCoin after logging in to the system.</source>
<translation>Avvia automaticamente MyBroCoin all'accensione del computer</translation>
</message>
<message>
<location line="+3"/>
<source>&Start MyBroCoin on system login</source>
<translation>&Fai partire MyBroCoin all'avvio del sistema</translation>
</message>
<message>
<location line="+35"/>
<source>Reset all client options to default.</source>
<translation>Ripristina tutte le opzioni del client alle predefinite.</translation>
</message>
<message>
<location line="+3"/>
<source>&Reset Options</source>
<translation>&Ripristina Opzioni</translation>
</message>
<message>
<location line="+13"/>
<source>&Network</source>
<translation>Rete</translation>
</message>
<message>
<location line="+6"/>
<source>Automatically open the MyBroCoin client port on the router. This only works when your router supports UPnP and it is enabled.</source>
<translation>Apri automaticamente la porta del client MyBroCoin sul router. Questo funziona solo se il router supporta UPnP ed è abilitato.</translation>
</message>
<message>
<location line="+3"/>
<source>Map port using &UPnP</source>
<translation>Mappa le porte tramite l'&UPnP</translation>
</message>
<message>
<location line="+7"/>
<source>Connect to the MyBroCoin network through a SOCKS proxy (e.g. when connecting through Tor).</source>
<translation>Connettiti alla rete Bitcon attraverso un proxy SOCKS (ad esempio quando ci si collega via Tor)</translation>
</message>
<message>
<location line="+3"/>
<source>&Connect through SOCKS proxy:</source>
<translation>&Collegati tramite SOCKS proxy:</translation>
</message>
<message>
<location line="+9"/>
<source>Proxy &IP:</source>
<translation>&IP del proxy:</translation>
</message>
<message>
<location line="+19"/>
<source>IP address of the proxy (e.g. 127.0.0.1)</source>
<translation>Indirizzo IP del proxy (ad esempio 127.0.0.1)</translation>
</message>
<message>
<location line="+7"/>
<source>&Port:</source>
<translation>&Porta:</translation>
</message>
<message>
<location line="+19"/>
<source>Port of the proxy (e.g. 9050)</source>
<translation>Porta del proxy (es. 9050)</translation>
</message>
<message>
<location line="+7"/>
<source>SOCKS &Version:</source>
<translation>SOCKS &Version:</translation>
</message>
<message>
<location line="+13"/>
<source>SOCKS version of the proxy (e.g. 5)</source>
<translation>Versione SOCKS del proxy (es. 5)</translation>
</message>
<message>
<location line="+36"/>
<source>&Window</source>
<translation>&Finestra</translation>
</message>
<message>
<location line="+6"/>
<source>Show only a tray icon after minimizing the window.</source>
<translation>Mostra solo un'icona nel tray quando si minimizza la finestra</translation>
</message>
<message>
<location line="+3"/>
<source>&Minimize to the tray instead of the taskbar</source>
<translation>&Minimizza sul tray invece che sulla barra delle applicazioni</translation>
</message>
<message>
<location line="+7"/>
<source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source>
<translation>Riduci ad icona, invece di uscire dall'applicazione quando la finestra viene chiusa. Quando questa opzione è attivata, l'applicazione verrà chiusa solo dopo aver selezionato Esci nel menu.</translation>
</message>
<message>
<location line="+3"/>
<source>M&inimize on close</source>
<translation>M&inimizza alla chiusura</translation>
</message>
<message>
<location line="+21"/>
<source>&Display</source>
<translation>&Mostra</translation>
</message>
<message>
<location line="+8"/>
<source>User Interface &language:</source>
<translation>&Lingua Interfaccia Utente:</translation>
</message>
<message>
<location line="+13"/>
<source>The user interface language can be set here. This setting will take effect after restarting MyBroCoin.</source>
<translation>La lingua dell'interfaccia utente può essere impostata qui. L'impostazione avrà effetto dopo il riavvio di MyBroCoin.</translation>
</message>
<message>
<location line="+11"/>
<source>&Unit to show amounts in:</source>
<translation>&Unità di misura degli importi in:</translation>
</message>
<message>
<location line="+13"/>
<source>Choose the default subdivision unit to show in the interface and when sending coins.</source>
<translation>Scegli l'unità di suddivisione di default per l'interfaccia e per l'invio di monete</translation>
</message>
<message>
<location line="+9"/>
<source>Whether to show MyBroCoin addresses in the transaction list or not.</source>
<translation>Se mostrare l'indirizzo MyBroCoin nella transazione o meno.</translation>
</message>
<message>
<location line="+3"/>
<source>&Display addresses in transaction list</source>
<translation>&Mostra gli indirizzi nella lista delle transazioni</translation>
</message>
<message>
<location line="+71"/>
<source>&OK</source>
<translation>&OK</translation>
</message>
<message>
<location line="+7"/>
<source>&Cancel</source>
<translation>&Cancella</translation>
</message>
<message>
<location line="+10"/>
<source>&Apply</source>
<translation>&Applica</translation>
</message>
<message>
<location filename="../optionsdialog.cpp" line="+53"/>
<source>default</source>
<translation>default</translation>
</message>
<message>
<location line="+130"/>
<source>Confirm options reset</source>
<translation>Conferma ripristino opzioni</translation>
</message>
<message>
<location line="+1"/>
<source>Some settings may require a client restart to take effect.</source>
<translation>Alcune modifiche necessitano del riavvio del programma per essere salvate.</translation>
</message>
<message>
<location line="+0"/>
<source>Do you want to proceed?</source>
<translation>Vuoi procedere?</translation>
</message>
<message>
<location line="+42"/>
<location line="+9"/>
<source>Warning</source>
<translation>Attenzione</translation>
</message>
<message>
<location line="-9"/>
<location line="+9"/>
<source>This setting will take effect after restarting MyBroCoin.</source>
<translation>L'impostazione avrà effetto dopo il riavvio di MyBroCoin.</translation>
</message>
<message>
<location line="+29"/>
<source>The supplied proxy address is invalid.</source>
<translation>L'indirizzo proxy che hai fornito è invalido.</translation>
</message>
</context>
<context>
<name>OverviewPage</name>
<message>
<location filename="../forms/overviewpage.ui" line="+14"/>
<source>Form</source>
<translation>Modulo</translation>
</message>
<message>
<location line="+50"/>
<location line="+166"/>
<source>The displayed information may be out of date. Your wallet automatically synchronizes with the MyBroCoin network after a connection is established, but this process has not completed yet.</source>
<translation>Le informazioni visualizzate sono datate. Il tuo partafogli verrà sincronizzato automaticamente con il network MyBroCoin dopo che la connessione è stabilita, ma questo processo non può essere completato ora.</translation>
</message>
<message>
<location line="-124"/>
<source>Balance:</source>
<translation>Saldo</translation>
</message>
<message>
<location line="+29"/>
<source>Unconfirmed:</source>
<translation>Non confermato:</translation>
</message>
<message>
<location line="-78"/>
<source>Wallet</source>
<translation>Portamonete</translation>
</message>
<message>
<location line="+107"/>
<source>Immature:</source>
<translation>Immaturo:</translation>
</message>
<message>
<location line="+13"/>
<source>Mined balance that has not yet matured</source>
<translation>Importo scavato che non è ancora maturato</translation>
</message>
<message>
<location line="+46"/>
<source><b>Recent transactions</b></source>
<translation><b>Transazioni recenti</b></translation>
</message>
<message>
<location line="-101"/>
<source>Your current balance</source>
<translation>Saldo attuale</translation>
</message>
<message>
<location line="+29"/>
<source>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</source>
<translation>Totale delle transazioni in corso di conferma, che non sono ancora incluse nel saldo attuale</translation>
</message>
<message>
<location filename="../overviewpage.cpp" line="+116"/>
<location line="+1"/>
<source>out of sync</source>
<translation>fuori sincrono</translation>
</message>
</context>
<context>
<name>PaymentServer</name>
<message>
<location filename="../paymentserver.cpp" line="+107"/>
<source>Cannot start mariobroscoin: click-to-pay handler</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>QRCodeDialog</name>
<message>
<location filename="../forms/qrcodedialog.ui" line="+14"/>
<source>QR Code Dialog</source>
<translation>Codice QR di dialogo</translation>
</message>
<message>
<location line="+59"/>
<source>Request Payment</source>
<translation>Richiedi pagamento</translation>
</message>
<message>
<location line="+56"/>
<source>Amount:</source>
<translation>Importo:</translation>
</message>
<message>
<location line="-44"/>
<source>Label:</source>
<translation>Etichetta:</translation>
</message>
<message>
<location line="+19"/>
<source>Message:</source>
<translation>Messaggio:</translation>
</message>
<message>
<location line="+71"/>
<source>&Save As...</source>
<translation>&Salva come...</translation>
</message>
<message>
<location filename="../qrcodedialog.cpp" line="+62"/>
<source>Error encoding URI into QR Code.</source>
<translation>Errore nella codifica URI nel codice QR</translation>
</message>
<message>
<location line="+40"/>
<source>The entered amount is invalid, please check.</source>
<translation>L'importo specificato non è valido, prego verificare.</translation>
</message>
<message>
<location line="+23"/>
<source>Resulting URI too long, try to reduce the text for label / message.</source>
<translation>L'URI risulta troppo lungo, prova a ridurre il testo nell'etichetta / messaggio.</translation>
</message>
<message>
<location line="+25"/>
<source>Save QR Code</source>
<translation>Salva codice QR</translation>
</message>
<message>
<location line="+0"/>
<source>PNG Images (*.png)</source>
<translation>Immagini PNG (*.png)</translation>
</message>
</context>
<context>
<name>RPCConsole</name>
<message>
<location filename="../forms/rpcconsole.ui" line="+46"/>
<source>Client name</source>
<translation>Nome del client</translation>
</message>
<message>
<location line="+10"/>
<location line="+23"/>
<location line="+26"/>
<location line="+23"/>
<location line="+23"/>
<location line="+36"/>
<location line="+53"/>
<location line="+23"/>
<location line="+23"/>
<location filename="../rpcconsole.cpp" line="+339"/>
<source>N/A</source>
<translation>N/D</translation>
</message>
<message>
<location line="-217"/>
<source>Client version</source>
<translation>Versione client</translation>
</message>
<message>
<location line="-45"/>
<source>&Information</source>
<translation>&Informazione</translation>
</message>
<message>
<location line="+68"/>
<source>Using OpenSSL version</source>
<translation>Versione OpenSSL in uso</translation>
</message>
<message>
<location line="+49"/>
<source>Startup time</source>
<translation>Tempo di avvio</translation>
</message>
<message>
<location line="+29"/>
<source>Network</source>
<translation>Rete</translation>
</message>
<message>
<location line="+7"/>
<source>Number of connections</source>
<translation>Numero connessioni</translation>
</message>
<message>
<location line="+23"/>
<source>On testnet</source>
<translation>Nel testnet</translation>
</message>
<message>
<location line="+23"/>
<source>Block chain</source>
<translation>Block chain</translation>
</message>
<message>
<location line="+7"/>
<source>Current number of blocks</source>
<translation>Numero attuale di blocchi</translation>
</message>
<message>
<location line="+23"/>
<source>Estimated total blocks</source>
<translation>Numero totale stimato di blocchi</translation>
</message>
<message>
<location line="+23"/>
<source>Last block time</source>
<translation>Ora dell blocco piu recente</translation>
</message>
<message>
<location line="+52"/>
<source>&Open</source>
<translation>&Apri</translation>
</message>
<message>
<location line="+16"/>
<source>Command-line options</source>
<translation>opzioni riga di comando</translation>
</message>
<message>
<location line="+7"/>
<source>Show the MyBroCoin-Qt help message to get a list with possible MyBroCoin command-line options.</source>
<translation>Mostra il messaggio di aiuto di MyBroCoin-QT per avere la lista di tutte le opzioni della riga di comando di MyBroCoin.</translation>
</message>
<message>
<location line="+3"/>
<source>&Show</source>
<translation>&Mostra</translation>
</message>
<message>
<location line="+24"/>
<source>&Console</source>
<translation>&Console</translation>
</message>
<message>
<location line="-260"/>
<source>Build date</source>
<translation>Data di creazione</translation>
</message>
<message>
<location line="-104"/>
<source>MyBroCoin - Debug window</source>
<translation>MyBroCoin - Finestra debug</translation>
</message>
<message>
<location line="+25"/>
<source>MyBroCoin Core</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+279"/>
<source>Debug log file</source>
<translation>File log del Debug</translation>
</message>
<message>
<location line="+7"/>
<source>Open the MyBroCoin debug log file from the current data directory. This can take a few seconds for large log files.</source>
<translation>Apri il file di log del debug di MyBroCoin dalla cartella attuale. Può richiedere alcuni secondi per file di log grandi.</translation>
</message>
<message>
<location line="+102"/>
<source>Clear console</source>
<translation>Svuota console</translation>
</message>
<message>
<location filename="../rpcconsole.cpp" line="-30"/>
<source>Welcome to the MyBroCoin RPC console.</source>
<translation>Benvenuto nella console RPC di MyBroCoin</translation>
</message>
<message>
<location line="+1"/>
<source>Use up and down arrows to navigate history, and <b>Ctrl-L</b> to clear screen.</source>
<translation>Usa le frecce direzionali per navigare la cronologia, and <b>Ctrl-L</b> per cancellarla.</translation>
</message>
<message>
<location line="+1"/>
<source>Type <b>help</b> for an overview of available commands.</source>
<translation>Scrivi <b>help</b> per un riassunto dei comandi disponibili</translation>
</message>
</context>
<context>
<name>SendCoinsDialog</name>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="+14"/>
<location filename="../sendcoinsdialog.cpp" line="+124"/>
<location line="+5"/>
<location line="+5"/>
<location line="+5"/>
<location line="+6"/>
<location line="+5"/>
<location line="+5"/>
<source>Send Coins</source>
<translation>Spedisci MyBroCoin</translation>
</message>
<message>
<location line="+50"/>
<source>Send to multiple recipients at once</source>
<translation>Spedisci a diversi beneficiari in una volta sola</translation>
</message>
<message>
<location line="+3"/>
<source>Add &Recipient</source>
<translation>&Aggiungi beneficiario</translation>
</message>
<message>
<location line="+20"/>
<source>Remove all transaction fields</source>
<translation>Rimuovi tutti i campi della transazione</translation>
</message>
<message>
<location line="+3"/>
<source>Clear &All</source>
<translation>Cancella &tutto</translation>
</message>
<message>
<location line="+22"/>
<source>Balance:</source>
<translation>Saldo:</translation>
</message>
<message>
<location line="+10"/>
<source>123.456 BTC</source>
<translation>123,456 BTC</translation>
</message>
<message>
<location line="+31"/>
<source>Confirm the send action</source>
<translation>Conferma la spedizione</translation>
</message>
<message>
<location line="+3"/>
<source>S&end</source>
<translation>&Spedisci</translation>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="-59"/>
<source><b>%1</b> to %2 (%3)</source>
<translation><b>%1</b> to %2 (%3)</translation>
</message>
<message>
<location line="+5"/>
<source>Confirm send coins</source>
<translation>Conferma la spedizione di mariobroscoin</translation>
</message>
<message>
<location line="+1"/>
<source>Are you sure you want to send %1?</source>
<translation>Si è sicuri di voler spedire %1?</translation>
</message>
<message>
<location line="+0"/>
<source> and </source>
<translation> e </translation>
</message>
<message>
<location line="+23"/>
<source>The recipient address is not valid, please recheck.</source>
<translation>L'indirizzo del beneficiario non è valido, per cortesia controlla.</translation>
</message>
<message>
<location line="+5"/>
<source>The amount to pay must be larger than 0.</source>
<translation>L'importo da pagare dev'essere maggiore di 0.</translation>
</message>
<message>
<location line="+5"/>
<source>The amount exceeds your balance.</source>
<translation>L'importo è superiore al saldo attuale</translation>
</message>
<message>
<location line="+5"/>
<source>The total exceeds your balance when the %1 transaction fee is included.</source>
<translation>Il totale è superiore al saldo attuale includendo la commissione %1.</translation>
</message>
<message>
<location line="+6"/>
<source>Duplicate address found, can only send to each address once per send operation.</source>
<translation>Trovato un indirizzo doppio, si può spedire solo una volta a ciascun indirizzo in una singola operazione.</translation>
</message>
<message>
<location line="+5"/>
<source>Error: Transaction creation failed!</source>
<translation>Errore: Creazione transazione fallita!</translation>
</message>
<message>
<location line="+5"/>
<source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation>Errore: la transazione è stata rifiutata. Ciò accade se alcuni mariobroscoin nel portamonete sono stati già spesi, ad esempio se è stata usata una copia del file wallet.dat e i mariobroscoin sono stati spesi dalla copia ma non segnati come spesi qui.</translation>
</message>
</context>
<context>
<name>SendCoinsEntry</name>
<message>
<location filename="../forms/sendcoinsentry.ui" line="+14"/>
<source>Form</source>
<translation>Modulo</translation>
</message>
<message>
<location line="+15"/>
<source>A&mount:</source>
<translation>&Importo:</translation>
</message>
<message>
<location line="+13"/>
<source>Pay &To:</source>
<translation>Paga &a:</translation>
</message>
<message>
<location line="+34"/>
<source>The address to send the payment to (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source>
<translation>L'indirizzo del beneficiario a cui inviare il pagamento (ad esempio Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation>
</message>
<message>
<location line="+60"/>
<location filename="../sendcoinsentry.cpp" line="+26"/>
<source>Enter a label for this address to add it to your address book</source>
<translation>Inserisci un'etichetta per questo indirizzo, per aggiungerlo nella rubrica</translation>
</message>
<message>
<location line="-78"/>
<source>&Label:</source>
<translation>&Etichetta</translation>
</message>
<message>
<location line="+28"/>
<source>Choose address from address book</source>
<translation>Scegli l'indirizzo dalla rubrica</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<location line="+7"/>
<source>Paste address from clipboard</source>
<translation>Incollare l'indirizzo dagli appunti</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<location line="+7"/>
<source>Remove this recipient</source>
<translation>Rimuovere questo beneficiario</translation>
</message>
<message>
<location filename="../sendcoinsentry.cpp" line="+1"/>
<source>Enter a MyBroCoin address (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source>
<translation>Inserisci un indirizzo MyBroCoin (ad esempio Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation>
</message>
</context>
<context>
<name>SignVerifyMessageDialog</name>
<message>
<location filename="../forms/signverifymessagedialog.ui" line="+14"/>
<source>Signatures - Sign / Verify a Message</source>
<translation>Firme - Firma / Verifica un messaggio</translation>
</message>
<message>
<location line="+13"/>
<source>&Sign Message</source>
<translation>&Firma il messaggio</translation>
</message>
<message>
<location line="+6"/>
<source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source>
<translation>Puoi firmare messeggi con i tuoi indirizzi per dimostrare che sono tuoi. Fai attenzione a non firmare niente di vago, visto che gli attacchi di phishing potrebbero cercare di spingerti a mettere la tua firma su di loro. Firma solo dichiarazioni completamente dettagliate con cui sei d'accordo.</translation>
</message>
<message>
<location line="+18"/>
<source>The address to sign the message with (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source>
<translation>Inserisci un indirizzo MyBroCoin (ad esempio Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation>
</message>
<message>
<location line="+10"/>
<location line="+213"/>
<source>Choose an address from the address book</source>
<translation>Scegli l'indirizzo dalla rubrica</translation>
</message>
<message>
<location line="-203"/>
<location line="+213"/>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<location line="-203"/>
<source>Paste address from clipboard</source>
<translation>Incollare l'indirizzo dagli appunti</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<location line="+12"/>
<source>Enter the message you want to sign here</source>
<translation>Inserisci qui il messaggio che vuoi firmare</translation>
</message>
<message>
<location line="+7"/>
<source>Signature</source>
<translation>Firma</translation>
</message>
<message>
<location line="+27"/>
<source>Copy the current signature to the system clipboard</source>
<translation>Copia la firma corrente nella clipboard</translation>
</message>
<message>
<location line="+21"/>
<source>Sign the message to prove you own this MyBroCoin address</source>
<translation>Firma un messaggio per dimostrare di possedere questo indirizzo</translation>
</message>
<message>
<location line="+3"/>
<source>Sign &Message</source>
<translation>Firma &messaggio</translation>
</message>
<message>
<location line="+14"/>
<source>Reset all sign message fields</source>
<translation>Reimposta tutti i campi della firma</translation>
</message>
<message>
<location line="+3"/>
<location line="+146"/>
<source>Clear &All</source>
<translation>Cancella &tutto</translation>
</message>
<message>
<location line="-87"/>
<source>&Verify Message</source>
<translation>&Verifica Messaggio</translation>
</message>
<message>
<location line="+6"/>
<source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>The address the message was signed with (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source>
<translation>Inserisci un indirizzo MyBroCoin (ad esempio Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation>
</message>
<message>
<location line="+40"/>
<source>Verify the message to ensure it was signed with the specified MyBroCoin address</source>
<translation>Verifica il messaggio per assicurarsi che sia stato firmato con l'indirizzo MyBroCoin specificato</translation>
</message>
<message>
<location line="+3"/>
<source>Verify &Message</source>
<translation>&Verifica Messaggio</translation>
</message>
<message>
<location line="+14"/>
<source>Reset all verify message fields</source>
<translation>Reimposta tutti i campi della verifica messaggio</translation>
</message>
<message>
<location filename="../signverifymessagedialog.cpp" line="+27"/>
<location line="+3"/>
<source>Enter a MyBroCoin address (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source>
<translation>Inserisci un indirizzo MyBroCoin (ad esempio Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation>
</message>
<message>
<location line="-2"/>
<source>Click "Sign Message" to generate signature</source>
<translation>Clicca "Firma il messaggio" per ottenere la firma</translation>
</message>
<message>
<location line="+3"/>
<source>Enter MyBroCoin signature</source>
<translation>Inserisci firma MyBroCoin</translation>
</message>
<message>
<location line="+82"/>
<location line="+81"/>
<source>The entered address is invalid.</source>
<translation>L'indirizzo inserito non è valido.</translation>
</message>
<message>
<location line="-81"/>
<location line="+8"/>
<location line="+73"/>
<location line="+8"/>
<source>Please check the address and try again.</source>
<translation>Per favore controlla l'indirizzo e prova ancora</translation>
</message>
<message>
<location line="-81"/>
<location line="+81"/>
<source>The entered address does not refer to a key.</source>
<translation>L'indirizzo mariobroscoin inserito non è associato a nessuna chiave.</translation>
</message>
<message>
<location line="-73"/>
<source>Wallet unlock was cancelled.</source>
<translation>Sblocco del portafoglio annullato.</translation>
</message>
<message>
<location line="+8"/>
<source>Private key for the entered address is not available.</source>
<translation>La chiave privata per l'indirizzo inserito non è disponibile.</translation>
</message>
<message>
<location line="+12"/>
<source>Message signing failed.</source>
<translation>Firma messaggio fallita.</translation>
</message>
<message>
<location line="+5"/>
<source>Message signed.</source>
<translation>Messaggio firmato.</translation>
</message>
<message>
<location line="+59"/>
<source>The signature could not be decoded.</source>
<translation>Non è stato possibile decodificare la firma.</translation>
</message>
<message>
<location line="+0"/>
<location line="+13"/>
<source>Please check the signature and try again.</source>
<translation>Per favore controlla la firma e prova ancora.</translation>
</message>
<message>
<location line="+0"/>
<source>The signature did not match the message digest.</source>
<translation>La firma non corrisponde al sunto del messaggio.</translation>
</message>
<message>
<location line="+7"/>
<source>Message verification failed.</source>
<translation>Verifica messaggio fallita.</translation>
</message>
<message>
<location line="+5"/>
<source>Message verified.</source>
<translation>Messaggio verificato.</translation>
</message>
</context>
<context>
<name>SplashScreen</name>
<message>
<location filename="../splashscreen.cpp" line="+22"/>
<source>The MyBroCoin developers</source>
<translation>Sviluppatori di MyBroCoin</translation>
</message>
<message>
<location line="+1"/>
<source>[testnet]</source>
<translation>[testnet]</translation>
</message>
</context>
<context>
<name>TransactionDesc</name>
<message>
<location filename="../transactiondesc.cpp" line="+20"/>
<source>Open until %1</source>
<translation>Aperto fino a %1</translation>
</message>
<message>
<location line="+6"/>
<source>%1/offline</source>
<translation>%1/offline</translation>
</message>
<message>
<location line="+2"/>
<source>%1/unconfirmed</source>
<translation>%1/non confermato</translation>
</message>
<message>
<location line="+2"/>
<source>%1 confirmations</source>
<translation>%1 conferme</translation>
</message>
<message>
<location line="+18"/>
<source>Status</source>
<translation>Stato</translation>
</message>
<message numerus="yes">
<location line="+7"/>
<source>, broadcast through %n node(s)</source>
<translation><numerusform>, trasmesso attraverso %n nodo</numerusform><numerusform>, trasmesso attraverso %n nodi</numerusform></translation>
</message>
<message>
<location line="+4"/>
<source>Date</source>
<translation>Data</translation>
</message>
<message>
<location line="+7"/>
<source>Source</source>
<translation>Sorgente</translation>
</message>
<message>
<location line="+0"/>
<source>Generated</source>
<translation>Generato</translation>
</message>
<message>
<location line="+5"/>
<location line="+17"/>
<source>From</source>
<translation>Da</translation>
</message>
<message>
<location line="+1"/>
<location line="+22"/>
<location line="+58"/>
<source>To</source>
<translation>A</translation>
</message>
<message>
<location line="-77"/>
<location line="+2"/>
<source>own address</source>
<translation>proprio indirizzo</translation>
</message>
<message>
<location line="-2"/>
<source>label</source>
<translation>etichetta</translation>
</message>
<message>
<location line="+37"/>
<location line="+12"/>
<location line="+45"/>
<location line="+17"/>
<location line="+30"/>
<source>Credit</source>
<translation>Credito</translation>
</message>
<message numerus="yes">
<location line="-102"/>
<source>matures in %n more block(s)</source>
<translation><numerusform>matura in %n ulteriore blocco</numerusform><numerusform>matura in altri %n blocchi</numerusform></translation>
</message>
<message>
<location line="+2"/>
<source>not accepted</source>
<translation>non accettate</translation>
</message>
<message>
<location line="+44"/>
<location line="+8"/>
<location line="+15"/>
<location line="+30"/>
<source>Debit</source>
<translation>Debito</translation>
</message>
<message>
<location line="-39"/>
<source>Transaction fee</source>
<translation>Tranzakciós díj</translation>
</message>
<message>
<location line="+16"/>
<source>Net amount</source>
<translation>Importo netto</translation>
</message>
<message>
<location line="+6"/>
<source>Message</source>
<translation>Messaggio</translation>
</message>
<message>
<location line="+2"/>
<source>Comment</source>
<translation>Commento</translation>
</message>
<message>
<location line="+2"/>
<source>Transaction ID</source>
<translation>ID della transazione</translation>
</message>
<message>
<location line="+3"/>
<source>Generated coins must mature 120 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to "not accepted" and it won't be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source>
<translation>Bisogna attendere 120 blocchi prima di spendere I mariobroscoin generati. Quando è stato generato questo blocco, è stato trasmesso alla rete per aggiungerlo alla catena di blocchi. Se non riesce a entrare nella catena, verrà modificato in "non accettato" e non sarà spendibile. Questo può accadere a volte, se un altro nodo genera un blocco entro pochi secondi del tuo.</translation>
</message>
<message>
<location line="+7"/>
<source>Debug information</source>
<translation>Informazione di debug</translation>
</message>
<message>
<location line="+8"/>
<source>Transaction</source>
<translation>Transazione</translation>
</message>
<message>
<location line="+3"/>
<source>Inputs</source>
<translation>Input</translation>
</message>
<message>
<location line="+23"/>
<source>Amount</source>
<translation>Importo</translation>
</message>
<message>
<location line="+1"/>
<source>true</source>
<translation>vero</translation>
</message>
<message>
<location line="+0"/>
<source>false</source>
<translation>falso</translation>
</message>
<message>
<location line="-209"/>
<source>, has not been successfully broadcast yet</source>
<translation>, non è stato ancora trasmesso con successo</translation>
</message>
<message numerus="yes">
<location line="-35"/>
<source>Open for %n more block(s)</source>
<translation><numerusform>Aperto per %n altro blocco</numerusform><numerusform>Aperto per altri %n blocchi</numerusform></translation>
</message>
<message>
<location line="+70"/>
<source>unknown</source>
<translation>sconosciuto</translation>
</message>
</context>
<context>
<name>TransactionDescDialog</name>
<message>
<location filename="../forms/transactiondescdialog.ui" line="+14"/>
<source>Transaction details</source>
<translation>Dettagli sulla transazione</translation>
</message>
<message>
<location line="+6"/>
<source>This pane shows a detailed description of the transaction</source>
<translation>Questo pannello mostra una descrizione dettagliata della transazione</translation>
</message>
</context>
<context>
<name>TransactionTableModel</name>
<message>
<location filename="../transactiontablemodel.cpp" line="+225"/>
<source>Date</source>
<translation>Data</translation>
</message>
<message>
<location line="+0"/>
<source>Type</source>
<translation>Tipo</translation>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation>Indirizzo</translation>
</message>
<message>
<location line="+0"/>
<source>Amount</source>
<translation>Importo</translation>
</message>
<message numerus="yes">
<location line="+57"/>
<source>Open for %n more block(s)</source>
<translation><numerusform>Aperto per %n altro blocco</numerusform><numerusform>Aperto per altri %n blocchi</numerusform></translation>
</message>
<message>
<location line="+3"/>
<source>Open until %1</source>
<translation>Aperto fino a %1</translation>
</message>
<message>
<location line="+3"/>
<source>Offline (%1 confirmations)</source>
<translation>Offline (%1 conferme)</translation>
</message>
<message>
<location line="+3"/>
<source>Unconfirmed (%1 of %2 confirmations)</source>
<translation>Non confermati (%1 su %2 conferme)</translation>
</message>
<message>
<location line="+3"/>
<source>Confirmed (%1 confirmations)</source>
<translation>Confermato (%1 conferme)</translation>
</message>
<message numerus="yes">
<location line="+8"/>
<source>Mined balance will be available when it matures in %n more block(s)</source>
<translation><numerusform>Il saldo generato sarà disponibile quando maturerà in %n altro blocco</numerusform><numerusform>Il saldo generato sarà disponibile quando maturerà in altri %n blocchi</numerusform></translation>
</message>
<message>
<location line="+5"/>
<source>This block was not received by any other nodes and will probably not be accepted!</source>
<translation>Questo blocco non è stato ricevuto da altri nodi e probabilmente non sarà accettato!</translation>
</message>
<message>
<location line="+3"/>
<source>Generated but not accepted</source>
<translation>Generati, ma non accettati</translation>
</message>
<message>
<location line="+43"/>
<source>Received with</source>
<translation>Ricevuto tramite</translation>
</message>
<message>
<location line="+2"/>
<source>Received from</source>
<translation>Ricevuto da</translation>
</message>
<message>
<location line="+3"/>
<source>Sent to</source>
<translation>Spedito a</translation>
</message>
<message>
<location line="+2"/>
<source>Payment to yourself</source>
<translation>Pagamento a te stesso</translation>
</message>
<message>
<location line="+2"/>
<source>Mined</source>
<translation>Ottenuto dal mining</translation>
</message>
<message>
<location line="+38"/>
<source>(n/a)</source>
<translation>(N / a)</translation>
</message>
<message>
<location line="+199"/>
<source>Transaction status. Hover over this field to show number of confirmations.</source>
<translation>Stato della transazione. Passare con il mouse su questo campo per vedere il numero di conferme.</translation>
</message>
<message>
<location line="+2"/>
<source>Date and time that the transaction was received.</source>
<translation>Data e ora in cui la transazione è stata ricevuta.</translation>
</message>
<message>
<location line="+2"/>
<source>Type of transaction.</source>
<translation>Tipo di transazione.</translation>
</message>
<message>
<location line="+2"/>
<source>Destination address of transaction.</source>
<translation>Indirizzo di destinazione della transazione.</translation>
</message>
<message>
<location line="+2"/>
<source>Amount removed from or added to balance.</source>
<translation>Importo rimosso o aggiunto al saldo.</translation>
</message>
</context>
<context>
<name>TransactionView</name>
<message>
<location filename="../transactionview.cpp" line="+52"/>
<location line="+16"/>
<source>All</source>
<translation>Tutti</translation>
</message>
<message>
<location line="-15"/>
<source>Today</source>
<translation>Oggi</translation>
</message>
<message>
<location line="+1"/>
<source>This week</source>
<translation>Questa settimana</translation>
</message>
<message>
<location line="+1"/>
<source>This month</source>
<translation>Questo mese</translation>
</message>
<message>
<location line="+1"/>
<source>Last month</source>
<translation>Il mese scorso</translation>
</message>
<message>
<location line="+1"/>
<source>This year</source>
<translation>Quest'anno</translation>
</message>
<message>
<location line="+1"/>
<source>Range...</source>
<translation>Intervallo...</translation>
</message>
<message>
<location line="+11"/>
<source>Received with</source>
<translation>Ricevuto tramite</translation>
</message>
<message>
<location line="+2"/>
<source>Sent to</source>
<translation>Spedito a</translation>
</message>
<message>
<location line="+2"/>
<source>To yourself</source>
<translation>A te</translation>
</message>
<message>
<location line="+1"/>
<source>Mined</source>
<translation>Ottenuto dal mining</translation>
</message>
<message>
<location line="+1"/>
<source>Other</source>
<translation>Altro</translation>
</message>
<message>
<location line="+7"/>
<source>Enter address or label to search</source>
<translation>Inserisci un indirizzo o un'etichetta da cercare</translation>
</message>
<message>
<location line="+7"/>
<source>Min amount</source>
<translation>Importo minimo</translation>
</message>
<message>
<location line="+34"/>
<source>Copy address</source>
<translation>Copia l'indirizzo</translation>
</message>
<message>
<location line="+1"/>
<source>Copy label</source>
<translation>Copia l'etichetta</translation>
</message>
<message>
<location line="+1"/>
<source>Copy amount</source>
<translation>Copia l'importo</translation>
</message>
<message>
<location line="+1"/>
<source>Copy transaction ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Edit label</source>
<translation>Modifica l'etichetta</translation>
</message>
<message>
<location line="+1"/>
<source>Show transaction details</source>
<translation>Mostra i dettagli della transazione</translation>
</message>
<message>
<location line="+139"/>
<source>Export Transaction Data</source>
<translation>Esporta i dati della transazione</translation>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation>Testo CSV (*.csv)</translation>
</message>
<message>
<location line="+8"/>
<source>Confirmed</source>
<translation>Confermato</translation>
</message>
<message>
<location line="+1"/>
<source>Date</source>
<translation>Data</translation>
</message>
<message>
<location line="+1"/>
<source>Type</source>
<translation>Tipo</translation>
</message>
<message>
<location line="+1"/>
<source>Label</source>
<translation>Etichetta</translation>
</message>
<message>
<location line="+1"/>
<source>Address</source>
<translation>Indirizzo</translation>
</message>
<message>
<location line="+1"/>
<source>Amount</source>
<translation>Importo</translation>
</message>
<message>
<location line="+1"/>
<source>ID</source>
<translation>ID</translation>
</message>
<message>
<location line="+4"/>
<source>Error exporting</source>
<translation>Errore nell'esportazione</translation>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation>Impossibile scrivere sul file %1.</translation>
</message>
<message>
<location line="+100"/>
<source>Range:</source>
<translation>Intervallo:</translation>
</message>
<message>
<location line="+8"/>
<source>to</source>
<translation>a</translation>
</message>
</context>
<context>
<name>WalletModel</name>
<message>
<location filename="../walletmodel.cpp" line="+193"/>
<source>Send Coins</source>
<translation>Spedisci MyBroCoin</translation>
</message>
</context>
<context>
<name>WalletView</name>
<message>
<location filename="../walletview.cpp" line="+42"/>
<source>&Export</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Export the data in the current tab to a file</source>
<translation>Esporta i dati nella tabella corrente su un file</translation>
</message>
<message>
<location line="+193"/>
<source>Backup Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Wallet Data (*.dat)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Backup Failed</source>
<translation>Backup fallito</translation>
</message>
<message>
<location line="+0"/>
<source>There was an error trying to save the wallet data to the new location.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Backup Successful</source>
<translation>Backup eseguito con successo</translation>
</message>
<message>
<location line="+0"/>
<source>The wallet data was successfully saved to the new location.</source>
<translation>Il portafoglio è stato correttamente salvato nella nuova cartella.</translation>
</message>
</context>
<context>
<name>bitcoin-core</name>
<message>
<location filename="../bitcoinstrings.cpp" line="+94"/>
<source>MyBroCoin version</source>
<translation>Versione di MyBroCoin</translation>
</message>
<message>
<location line="+102"/>
<source>Usage:</source>
<translation>Utilizzo:</translation>
</message>
<message>
<location line="-29"/>
<source>Send command to -server or mariobroscoind</source>
<translation>Manda il comando a -server o mariobroscoind
</translation>
</message>
<message>
<location line="-23"/>
<source>List commands</source>
<translation>Lista comandi
</translation>
</message>
<message>
<location line="-12"/>
<source>Get help for a command</source>
<translation>Aiuto su un comando
</translation>
</message>
<message>
<location line="+24"/>
<source>Options:</source>
<translation>Opzioni:
</translation>
</message>
<message>
<location line="+24"/>
<source>Specify configuration file (default: mariobroscoin.conf)</source>
<translation>Specifica il file di configurazione (di default: mariobroscoin.conf)
</translation>
</message>
<message>
<location line="+3"/>
<source>Specify pid file (default: mariobroscoind.pid)</source>
<translation>Specifica il file pid (default: mariobroscoind.pid)
</translation>
</message>
<message>
<location line="-1"/>
<source>Specify data directory</source>
<translation>Specifica la cartella dati
</translation>
</message>
<message>
<location line="-9"/>
<source>Set database cache size in megabytes (default: 25)</source>
<translation>Imposta la dimensione cache del database in megabyte (default: 25)</translation>
</message>
<message>
<location line="-28"/>
<source>Listen for connections on <port> (default: 9333 or testnet: 19333)</source>
<translation>Ascolta le connessioni JSON-RPC su <porta> (default: 9333 o testnet: 19333)</translation>
</message>
<message>
<location line="+5"/>
<source>Maintain at most <n> connections to peers (default: 125)</source>
<translation>Mantieni al massimo <n> connessioni ai peer (default: 125)</translation>
</message>
<message>
<location line="-48"/>
<source>Connect to a node to retrieve peer addresses, and disconnect</source>
<translation>Connessione ad un nodo per ricevere l'indirizzo del peer, e disconnessione</translation>
</message>
<message>
<location line="+82"/>
<source>Specify your own public address</source>
<translation>Specifica il tuo indirizzo pubblico</translation>
</message>
<message>
<location line="+3"/>
<source>Threshold for disconnecting misbehaving peers (default: 100)</source>
<translation>Soglia di disconnessione dei peer di cattiva qualità (default: 100)</translation>
</message>
<message>
<location line="-134"/>
<source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source>
<translation>Numero di secondi di sospensione che i peer di cattiva qualità devono trascorrere prima di riconnettersi (default: 86400)</translation>
</message>
<message>
<location line="-29"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source>
<translation>Errore riscontrato durante l'impostazione della porta RPC %u per l'ascolto su IPv4: %s</translation>
</message>
<message>
<location line="+27"/>
<source>Listen for JSON-RPC connections on <port> (default: 9332 or testnet: 19332)</source>
<translation>Attendi le connessioni JSON-RPC su <porta> (default: 9332 or testnet: 19332)</translation>
</message>
<message>
<location line="+37"/>
<source>Accept command line and JSON-RPC commands</source>
<translation>Accetta da linea di comando e da comandi JSON-RPC
</translation>
</message>
<message>
<location line="+76"/>
<source>Run in the background as a daemon and accept commands</source>
<translation>Esegui in background come demone e accetta i comandi
</translation>
</message>
<message>
<location line="+37"/>
<source>Use the test network</source>
<translation>Utilizza la rete di prova
</translation>
</message>
<message>
<location line="-112"/>
<source>Accept connections from outside (default: 1 if no -proxy or -connect)</source>
<translation>Accetta connessioni dall'esterno (default: 1 se no -proxy o -connect)</translation>
</message>
<message>
<location line="-80"/>
<source>%s, you must set a rpcpassword in the configuration file:
%s
It is recommended you use the following random password:
rpcuser=mariobroscoinrpc
rpcpassword=%s
(you do not need to remember this password)
The username and password MUST NOT be the same.
If the file does not exist, create it with owner-readable-only file permissions.
It is also recommended to set alertnotify so you are notified of problems;
for example: alertnotify=echo %%s | mail -s "MyBroCoin Alert" [email protected]
</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+17"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source>
<translation>Errore riscontrato durante l'impostazione della porta RPC %u per l'ascolto su IPv6, tornando su IPv4: %s</translation>
</message>
<message>
<location line="+3"/>
<source>Bind to given address and always listen on it. Use [host]:port notation for IPv6</source>
<translation>Collega all'indirizzo indicato e resta sempre in ascolto su questo. Usa la notazione [host]:porta per l'IPv6</translation>
</message>
<message>
<location line="+3"/>
<source>Cannot obtain a lock on data directory %s. MyBroCoin is probably already running.</source>
<translation>Non è possibile ottenere i dati sulla cartella %s. Probabilmente MyBroCoin è già in esecuzione.</translation>
</message>
<message>
<location line="+3"/>
<source>Error: The transaction was rejected! This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation>Errore: la transazione è stata rifiutata. Ciò accade se alcuni mariobroscoin nel portamonete sono stati già spesi, ad esempio se è stata usata una copia del file wallet.dat e i mariobroscoin sono stati spesi dalla copia ma non segnati come spesi qui.</translation>
</message>
<message>
<location line="+4"/>
<source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds!</source>
<translation>Errore: questa transazione necessita di una commissione di almeno %s a causa del suo ammontare, della sua complessità, o dell'uso di fondi recentemente ricevuti!</translation>
</message>
<message>
<location line="+3"/>
<source>Execute command when a relevant alert is received (%s in cmd is replaced by message)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source>
<translation>Esegui comando quando una transazione del portafoglio cambia (%s in cmd è sostituito da TxID)</translation>
</message>
<message>
<location line="+11"/>
<source>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</source>
<translation>Imposta dimensione massima delle transazioni ad alta priorità/bassa-tassa in bytes (predefinito: 27000)</translation>
</message>
<message>
<location line="+6"/>
<source>This is a pre-release test build - use at your own risk - do not use for mining or merchant applications</source>
<translation>Questa versione è una compilazione pre-rilascio - usala a tuo rischio - non utilizzarla per la generazione o per applicazioni di commercio</translation>
</message>
<message>
<location line="+5"/>
<source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source>
<translation>Attenzione: -paytxfee è molto alta. Questa è la commissione che si paga quando si invia una transazione.</translation>
</message>
<message>
<location line="+3"/>
<source>Warning: Displayed transactions may not be correct! You may need to upgrade, or other nodes may need to upgrade.</source>
<translation>Attenzione: le transazioni mostrate potrebbero essere sbagliate! Potresti aver bisogno di aggiornare, o altri nodi ne hanno bisogno.</translation>
</message>
<message>
<location line="+3"/>
<source>Warning: Please check that your computer's date and time are correct! If your clock is wrong MyBroCoin will not work properly.</source>
<translation>Attenzione: si prega di controllare che la data del computer e l'ora siano corrette. Se il vostro orologio è sbagliato MyBroCoin non funziona correttamente.</translation>
</message>
<message>
<location line="+3"/>
<source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source>
<translation>Attenzione: errore di lettura di wallet.dat! Tutte le chiave lette correttamente, ma i dati delle transazioni o le voci in rubrica potrebbero mancare o non essere corretti.</translation>
</message>
<message>
<location line="+3"/>
<source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source>
<translation>Attenzione: wallet.dat corrotto, dati salvati! Il wallet.dat originale salvato come wallet.{timestamp}.bak in %s; se il tuo bilancio o le transazioni non sono corrette dovresti ripristinare da un backup.</translation>
</message>
<message>
<location line="+14"/>
<source>Attempt to recover private keys from a corrupt wallet.dat</source>
<translation>Tenta di recuperare le chiavi private da un wallet.dat corrotto</translation>
</message>
<message>
<location line="+2"/>
<source>Block creation options:</source>
<translation>Opzioni creazione blocco:</translation>
</message>
<message>
<location line="+5"/>
<source>Connect only to the specified node(s)</source>
<translation>Connetti solo al nodo specificato</translation>
</message>
<message>
<location line="+3"/>
<source>Corrupted block database detected</source>
<translation>Rilevato database blocchi corrotto</translation>
</message>
<message>
<location line="+1"/>
<source>Discover own IP address (default: 1 when listening and no -externalip)</source>
<translation>Scopri proprio indirizzo IP (default: 1 se in ascolto e no -externalip)</translation>
</message>
<message>
<location line="+1"/>
<source>Do you want to rebuild the block database now?</source>
<translation>Vuoi ricostruire ora il database dei blocchi?</translation>
</message>
<message>
<location line="+2"/>
<source>Error initializing block database</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Error initializing wallet database environment %s!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Error loading block database</source>
<translation>Errore caricamento database blocchi</translation>
</message>
<message>
<location line="+4"/>
<source>Error opening block database</source>
<translation>Errore caricamento database blocchi</translation>
</message>
<message>
<location line="+2"/>
<source>Error: Disk space is low!</source>
<translation>Errore: la spazio libero sul disco è poco!</translation>
</message>
<message>
<location line="+1"/>
<source>Error: Wallet locked, unable to create transaction!</source>
<translation>Errore: portafoglio bloccato, impossibile creare la transazione!</translation>
</message>
<message>
<location line="+1"/>
<source>Error: system error: </source>
<translation>Errore: errore di sistema:</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to listen on any port. Use -listen=0 if you want this.</source>
<translation>Impossibile mettersi in ascolto su una porta. Usa -listen=0 se vuoi usare questa opzione.</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to read block info</source>
<translation>Lettura informazioni blocco fallita</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to read block</source>
<translation>Lettura blocco fallita</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to sync block index</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to write block index</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to write block info</source>
<translation>Scrittura informazioni blocco fallita</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write block</source>
<translation>Scrittura blocco fallita</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write file info</source>
<translation>Scrittura informazioni file fallita</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write to coin database</source>
<translation>Scrittura nel database dei mariobroscoin fallita</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write transaction index</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Failed to write undo data</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Find peers using DNS lookup (default: 1 unless -connect)</source>
<translation>Trova peer utilizzando la ricerca DNS (predefinito: 1 finché utilizzato -connect)</translation>
</message>
<message>
<location line="+1"/>
<source>Generate coins (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>How many blocks to check at startup (default: 288, 0 = all)</source>
<translation>Quanti blocchi da controllare all'avvio (predefinito: 288, 0 = tutti)</translation>
</message>
<message>
<location line="+1"/>
<source>How thorough the block verification is (0-4, default: 3)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Not enough file descriptors available.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Rebuild block chain index from current blk000??.dat files</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Set the number of threads to service RPC calls (default: 4)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+26"/>
<source>Verifying blocks...</source>
<translation>Verifica blocchi...</translation>
</message>
<message>
<location line="+1"/>
<source>Verifying wallet...</source>
<translation>Verifica portafoglio...</translation>
</message>
<message>
<location line="-69"/>
<source>Imports blocks from external blk000??.dat file</source>
<translation>Importa blocchi da un file blk000??.dat esterno</translation>
</message>
<message>
<location line="-76"/>
<source>Set the number of script verification threads (up to 16, 0 = auto, <0 = leave that many cores free, default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+77"/>
<source>Information</source>
<translation>Informazione</translation>
</message>
<message>
<location line="+3"/>
<source>Invalid -tor address: '%s'</source>
<translation>Indirizzo -tor non valido: '%s'</translation>
</message>
<message>
<location line="+1"/>
<source>Invalid amount for -minrelaytxfee=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Invalid amount for -mintxfee=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Maintain a full transaction index (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Maximum per-connection receive buffer, <n>*1000 bytes (default: 5000)</source>
<translation>Buffer di ricezione massimo per connessione, <n>*1000 byte (default: 5000)</translation>
</message>
<message>
<location line="+1"/>
<source>Maximum per-connection send buffer, <n>*1000 bytes (default: 1000)</source>
<translation>Buffer di invio massimo per connessione, <n>*1000 byte (default: 1000)</translation>
</message>
<message>
<location line="+2"/>
<source>Only accept block chain matching built-in checkpoints (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Only connect to nodes in network <net> (IPv4, IPv6 or Tor)</source>
<translation>Connetti solo a nodi nella rete <net> (IPv4, IPv6 o Tor)</translation>
</message>
<message>
<location line="+2"/>
<source>Output extra debugging information. Implies all other -debug* options</source>
<translation>Produci informazioni extra utili al debug. Implies all other -debug* options</translation>
</message>
<message>
<location line="+1"/>
<source>Output extra network debugging information</source>
<translation>Genera informazioni extra utili al debug della rete</translation>
</message>
<message>
<location line="+2"/>
<source>Prepend debug output with timestamp</source>
<translation>Anteponi all'output di debug una marca temporale</translation>
</message>
<message>
<location line="+5"/>
<source>SSL options: (see the MyBroCoin Wiki for SSL setup instructions)</source>
<translation>Opzioni SSL: (vedi il wiki di MyBroCoin per le istruzioni di configurazione SSL)</translation>
</message>
<message>
<location line="+1"/>
<source>Select the version of socks proxy to use (4-5, default: 5)</source>
<translation>Selezionare la versione del proxy socks da usare (4-5, default: 5)</translation>
</message>
<message>
<location line="+3"/>
<source>Send trace/debug info to console instead of debug.log file</source>
<translation>Invia le informazioni di trace/debug alla console invece che al file debug.log</translation>
</message>
<message>
<location line="+1"/>
<source>Send trace/debug info to debugger</source>
<translation>Invia le informazioni di trace/debug al debugger</translation>
</message>
<message>
<location line="+5"/>
<source>Set maximum block size in bytes (default: 250000)</source>
<translation>Imposta dimensione massima del blocco in bytes (predefinito: 250000)</translation>
</message>
<message>
<location line="+1"/>
<source>Set minimum block size in bytes (default: 0)</source>
<translation>Imposta dimensione minima del blocco in bytes (predefinito: 0)</translation>
</message>
<message>
<location line="+2"/>
<source>Shrink debug.log file on client startup (default: 1 when no -debug)</source>
<translation>Riduci il file debug.log all'avvio del client (predefinito: 1 se non impostato -debug)</translation>
</message>
<message>
<location line="+1"/>
<source>Signing transaction failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Specify connection timeout in milliseconds (default: 5000)</source>
<translation>Specifica il timeout di connessione in millisecondi (default: 5000)</translation>
</message>
<message>
<location line="+4"/>
<source>System error: </source>
<translation>Errore di sistema:</translation>
</message>
<message>
<location line="+4"/>
<source>Transaction amount too small</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Transaction amounts must be positive</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Transaction too large</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Use UPnP to map the listening port (default: 0)</source>
<translation>UPnP-használat engedélyezése a figyelő port feltérképezésénél (default: 0)</translation>
</message>
<message>
<location line="+1"/>
<source>Use UPnP to map the listening port (default: 1 when listening)</source>
<translation>UPnP-használat engedélyezése a figyelő port feltérképezésénél (default: 1 when listening)</translation>
</message>
<message>
<location line="+1"/>
<source>Use proxy to reach tor hidden services (default: same as -proxy)</source>
<translation>Usa un proxy per raggiungere servizi nascosti di tor (predefinito: uguale a -proxy)</translation>
</message>
<message>
<location line="+2"/>
<source>Username for JSON-RPC connections</source>
<translation>Nome utente per connessioni JSON-RPC
</translation>
</message>
<message>
<location line="+4"/>
<source>Warning</source>
<translation>Attenzione</translation>
</message>
<message>
<location line="+1"/>
<source>Warning: This version is obsolete, upgrade required!</source>
<translation>Attenzione: questa versione è obsoleta, aggiornamento necessario!</translation>
</message>
<message>
<location line="+1"/>
<source>You need to rebuild the databases using -reindex to change -txindex</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>wallet.dat corrupt, salvage failed</source>
<translation>wallet.dat corrotto, salvataggio fallito</translation>
</message>
<message>
<location line="-50"/>
<source>Password for JSON-RPC connections</source>
<translation>Password per connessioni JSON-RPC
</translation>
</message>
<message>
<location line="-67"/>
<source>Allow JSON-RPC connections from specified IP address</source>
<translation>Consenti connessioni JSON-RPC dall'indirizzo IP specificato
</translation>
</message>
<message>
<location line="+76"/>
<source>Send commands to node running on <ip> (default: 127.0.0.1)</source>
<translation>Inviare comandi al nodo in esecuzione su <ip> (default: 127.0.0.1)
</translation>
</message>
<message>
<location line="-120"/>
<source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source>
<translation>Esegui il comando quando il miglior block cambia(%s nel cmd è sostituito dall'hash del blocco)</translation>
</message>
<message>
<location line="+147"/>
<source>Upgrade wallet to latest format</source>
<translation>Aggiorna il wallet all'ultimo formato</translation>
</message>
<message>
<location line="-21"/>
<source>Set key pool size to <n> (default: 100)</source>
<translation>Impostare la quantità di chiavi di riserva a <n> (default: 100)
</translation>
</message>
<message>
<location line="-12"/>
<source>Rescan the block chain for missing wallet transactions</source>
<translation>Ripeti analisi della catena dei blocchi per cercare le transazioni mancanti dal portamonete
</translation>
</message>
<message>
<location line="+35"/>
<source>Use OpenSSL (https) for JSON-RPC connections</source>
<translation>Utilizzare OpenSSL (https) per le connessioni JSON-RPC
</translation>
</message>
<message>
<location line="-26"/>
<source>Server certificate file (default: server.cert)</source>
<translation>File certificato del server (default: server.cert)
</translation>
</message>
<message>
<location line="+1"/>
<source>Server private key (default: server.pem)</source>
<translation>Chiave privata del server (default: server.pem)
</translation>
</message>
<message>
<location line="-151"/>
<source>Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</source>
<translation>Cifrari accettabili (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)
</translation>
</message>
<message>
<location line="+165"/>
<source>This help message</source>
<translation>Questo messaggio di aiuto
</translation>
</message>
<message>
<location line="+6"/>
<source>Unable to bind to %s on this computer (bind returned error %d, %s)</source>
<translation>Impossibile collegarsi alla %s su questo computer (bind returned error %d, %s)</translation>
</message>
<message>
<location line="-91"/>
<source>Connect through socks proxy</source>
<translation>Connessione tramite socks proxy</translation>
</message>
<message>
<location line="-10"/>
<source>Allow DNS lookups for -addnode, -seednode and -connect</source>
<translation>Consenti ricerche DNS per aggiungere nodi e collegare
</translation>
</message>
<message>
<location line="+55"/>
<source>Loading addresses...</source>
<translation>Caricamento indirizzi...</translation>
</message>
<message>
<location line="-35"/>
<source>Error loading wallet.dat: Wallet corrupted</source>
<translation>Errore caricamento wallet.dat: Wallet corrotto</translation>
</message>
<message>
<location line="+1"/>
<source>Error loading wallet.dat: Wallet requires newer version of MyBroCoin</source>
<translation>Errore caricamento wallet.dat: il wallet richiede una versione nuova di MyBroCoin</translation>
</message>
<message>
<location line="+93"/>
<source>Wallet needed to be rewritten: restart MyBroCoin to complete</source>
<translation>Il portamonete deve essere riscritto: riavviare MyBroCoin per completare</translation>
</message>
<message>
<location line="-95"/>
<source>Error loading wallet.dat</source>
<translation>Errore caricamento wallet.dat</translation>
</message>
<message>
<location line="+28"/>
<source>Invalid -proxy address: '%s'</source>
<translation>Indirizzo -proxy non valido: '%s'</translation>
</message>
<message>
<location line="+56"/>
<source>Unknown network specified in -onlynet: '%s'</source>
<translation>Rete sconosciuta specificata in -onlynet: '%s'</translation>
</message>
<message>
<location line="-1"/>
<source>Unknown -socks proxy version requested: %i</source>
<translation>Versione -socks proxy sconosciuta richiesta: %i</translation>
</message>
<message>
<location line="-96"/>
<source>Cannot resolve -bind address: '%s'</source>
<translation>Impossibile risolvere -bind address: '%s'</translation>
</message>
<message>
<location line="+1"/>
<source>Cannot resolve -externalip address: '%s'</source>
<translation>Impossibile risolvere indirizzo -externalip: '%s'</translation>
</message>
<message>
<location line="+44"/>
<source>Invalid amount for -paytxfee=<amount>: '%s'</source>
<translation>Importo non valido per -paytxfee=<amount>: '%s'</translation>
</message>
<message>
<location line="+1"/>
<source>Invalid amount</source>
<translation>Importo non valido</translation>
</message>
<message>
<location line="-6"/>
<source>Insufficient funds</source>
<translation>Fondi insufficienti</translation>
</message>
<message>
<location line="+10"/>
<source>Loading block index...</source>
<translation>Caricamento dell'indice del blocco...</translation>
</message>
<message>
<location line="-57"/>
<source>Add a node to connect to and attempt to keep the connection open</source>
<translation>Elérendő csomópont megadása and attempt to keep the connection open</translation>
</message>
<message>
<location line="-25"/>
<source>Unable to bind to %s on this computer. MyBroCoin is probably already running.</source>
<translation>Impossibile collegarsi alla %s su questo computer. Probabilmente MyBroCoin è già in esecuzione.</translation>
</message>
<message>
<location line="+64"/>
<source>Fee per KB to add to transactions you send</source>
<translation>Commissione per KB da aggiungere alle transazioni in uscita</translation>
</message>
<message>
<location line="+19"/>
<source>Loading wallet...</source>
<translation>Caricamento portamonete...</translation>
</message>
<message>
<location line="-52"/>
<source>Cannot downgrade wallet</source>
<translation>Non è possibile retrocedere il wallet</translation>
</message>
<message>
<location line="+3"/>
<source>Cannot write default address</source>
<translation>Non è possibile scrivere l'indirizzo di default</translation>
</message>
<message>
<location line="+64"/>
<source>Rescanning...</source>
<translation>Ripetere la scansione...</translation>
</message>
<message>
<location line="-57"/>
<source>Done loading</source>
<translation>Caricamento completato</translation>
</message>
<message>
<location line="+82"/>
<source>To use the %s option</source>
<translation>Per usare la opzione %s</translation>
</message>
<message>
<location line="-74"/>
<source>Error</source>
<translation>Errore</translation>
</message>
<message>
<location line="-31"/>
<source>You must set rpcpassword=<password> in the configuration file:
%s
If the file does not exist, create it with owner-readable-only file permissions.</source>
<translation>Devi settare rpcpassword=<password> nel file di configurazione: %s Se il file non esiste, crealo con i permessi di amministratore</translation>
</message>
</context>
</TS><|fim▁end|> | <location line="+8"/>
<source>Wallet is <b>encrypted</b> and currently <b>locked</b></source>
<translation>Il portamonete è <b>cifrato</b> e attualmente <b>bloccato</b></translation> |
<|file_name|>LogEntryView.js<|end_file_name|><|fim▁begin|>/**
* @license
* Copyright 2015 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*/
CLASS({
name: 'LogEntryView',
package: 'foam.flow',
extends: 'foam.flow.Element',
constants: { ELEMENT_NAME: 'log-entry' },
properties: [
{
name: 'data',
// type: 'foam.flow.LogEntry'
}
],
templates: [
function toInnerHTML() {/*
<num>{{this.data.id}}</num><{{{this.data.mode}}}>{{this.data.contents}}</{{{this.data.mode}}}>
*/},
function CSS() {/*
log-entry {
display: flex;
}
log-entry > num {
min-width: 35px;<|fim▁hole|> display: inline-block;
text-align: right;
padding-right: 13px;
font-weight: bold;
-webkit-touch-callout: none;
-webkit-user-select: none;
-khtml-user-select: none;
-moz-user-select: none;
-ms-user-select: none;
user-select: none;
background: #E0E0E0;
}
log-entry > log, log-entry > warn, log-entry > error {
padding-left: 4px;
white-space: pre-wrap;
}
log-entry > log {
color: #333;
}
log-entry > warn {
color: #CC9900;
}
log-entry > error {
color: #C00;
}
*/}
]
});<|fim▁end|> | max-width: 35px; |
<|file_name|>FirstBlockLinePhylipFault.java<|end_file_name|><|fim▁begin|><|fim▁hole|>
import org.jiserte.bioformats.readers.faults.AlignmentReadingFault;
public class FirstBlockLinePhylipFault extends AlignmentReadingFault {
public FirstBlockLinePhylipFault() {
super();
this.setMessage("Sequences in the first block of data must have a description of 10 characters and then the sequence.");
}
}<|fim▁end|> | package org.jiserte.bioformats.readers.phylip; |
<|file_name|>wsgi.py<|end_file_name|><|fim▁begin|><|fim▁hole|>It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "Courseware.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()<|fim▁end|> | """
WSGI config for Courseware project.
|
<|file_name|>sans_serif_italic.rs<|end_file_name|><|fim▁begin|>/// Transform a character to it's mathematical sans-serif italic
/// equivalent.
pub fn math_sans_serif_italic(c: char) -> Option<char> {
match c {
// Latin capital letters.
'A' => Some('𝘈'),
'B' => Some('𝘉'),
'C' => Some('𝘊'),
'D' => Some('𝘋'),
'E' => Some('𝘌'),
'F' => Some('𝘍'),
'G' => Some('𝘎'),
'H' => Some('𝘏'),
'I' => Some('𝘐'),
'J' => Some('𝘑'),
'K' => Some('𝘒'),
'L' => Some('𝘓'),
'M' => Some('𝘔'),
'N' => Some('𝘕'),
'O' => Some('𝘖'),
'P' => Some('𝘗'),
'Q' => Some('𝘘'),
'R' => Some('𝘙'),
'S' => Some('𝘚'),
'T' => Some('𝘛'),
'U' => Some('𝘜'),
'V' => Some('𝘝'),
'W' => Some('𝘞'),
'X' => Some('𝘟'),
'Y' => Some('𝘠'),
'Z' => Some('𝘡'),
// Latin small letters.
'a' => Some('𝘢'),
'b' => Some('𝘣'),
'c' => Some('𝘤'),
'd' => Some('𝘥'),<|fim▁hole|> 'h' => Some('𝘩'),
'i' => Some('𝘪'),
'j' => Some('𝘫'),
'k' => Some('𝘬'),
'l' => Some('𝘭'),
'm' => Some('𝘮'),
'n' => Some('𝘯'),
'o' => Some('𝘰'),
'p' => Some('𝘱'),
'q' => Some('𝘲'),
'r' => Some('𝘳'),
's' => Some('𝘴'),
't' => Some('𝘵'),
'u' => Some('𝘶'),
'v' => Some('𝘷'),
'w' => Some('𝘸'),
'x' => Some('𝘹'),
'y' => Some('𝘺'),
'z' => Some('𝘻'),
// No equivalence.
_ => None,
}
}<|fim▁end|> | 'e' => Some('𝘦'),
'f' => Some('𝘧'),
'g' => Some('𝘨'), |
<|file_name|>test_qt3dinput.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import
import pytest
from qtpy import PYQT5, PYSIDE2
@pytest.mark.skipif(not (PYQT5 or PYSIDE2), reason="Only available in Qt5 bindings")
def test_qt3dinput():
"""Test the qtpy.Qt3DInput namespace"""
Qt3DInput = pytest.importorskip("qtpy.Qt3DInput")
assert Qt3DInput.QAxisAccumulator is not None
assert Qt3DInput.QInputSettings is not None
assert Qt3DInput.QAnalogAxisInput is not None
assert Qt3DInput.QAbstractAxisInput is not None
assert Qt3DInput.QMouseHandler is not None
assert Qt3DInput.QButtonAxisInput is not None
assert Qt3DInput.QInputSequence is not None
assert Qt3DInput.QWheelEvent is not None
assert Qt3DInput.QActionInput is not None
assert Qt3DInput.QKeyboardDevice is not None
assert Qt3DInput.QMouseDevice is not None
assert Qt3DInput.QAxis is not None
assert Qt3DInput.QInputChord is not None
assert Qt3DInput.QMouseEvent is not None
assert Qt3DInput.QKeyboardHandler is not None
assert Qt3DInput.QKeyEvent is not None
assert Qt3DInput.QAbstractActionInput is not None
assert Qt3DInput.QInputAspect is not None
assert Qt3DInput.QLogicalDevice is not None
assert Qt3DInput.QAction is not None<|fim▁hole|><|fim▁end|> | assert Qt3DInput.QAbstractPhysicalDevice is not None
assert Qt3DInput.QAxisSetting is not None |
<|file_name|>api.py<|end_file_name|><|fim▁begin|>from django.shortcuts import get_object_or_404<|fim▁hole|>from apps.canvas_auth.models import User
from canvas.api_decorators import api_decorator
from canvas.metrics import Metrics
from canvas.models import Comment
from canvas.view_guards import require_user
urlpatterns = []
api = api_decorator(urlpatterns)
@api('hide_comment')
@require_user
def hide_comment(request, comment_id):
comment = get_object_or_404(Comment, pk=comment_id)
request.user.redis.hidden_comments.hide_comment(comment)
Metrics.downvote_action.record(request, comment=comment.id)
Metrics.hide_comment.record(request)
@api('hide_thread')
@require_user
def hide_thread(request, comment_id):
"""
`comment_id` may be the thread OP or any reply in it.
Also downvotes.
"""
comment = get_object_or_404(Comment, pk=comment_id)
request.user.redis.hidden_threads.hide_thread(comment)
sticker_count = comment.downvote(request.user, ip=request.META['REMOTE_ADDR'])
Metrics.downvote_action.record(request, count=sticker_count, comment=comment.id)
Metrics.hide_thread.record(request)<|fim▁end|> | |
<|file_name|>metro.py<|end_file_name|><|fim▁begin|>#####################################################################
#
# metro.py
#
# Copyright (c) 2016, Eran Egozy
#
# Released under the MIT License (http://opensource.org/licenses/MIT)
#
#####################################################################
from clock import kTicksPerQuarter, quantize_tick_up
class Metronome(object):
"""Plays a steady click every beat.
"""
def __init__(self, sched, synth, channel = 0, patch=(128, 0), pitch = 60):
super(Metronome, self).__init__()
self.sched = sched
self.synth = synth
self.channel = channel
self.patch = patch
self.pitch = pitch
self.beat_len = kTicksPerQuarter
# run-time variables
self.on_cmd = None
self.off_cmd = None
self.playing = False
def start(self):<|fim▁hole|>
# set up the correct sound (program change)
self.synth.program(self.channel, self.patch[0], self.patch[1])
# find the tick of the next beat, and make it "beat aligned"
now = self.sched.get_tick()
next_beat = quantize_tick_up(now, self.beat_len)
# now, post the _noteon function (and remember this command)
self.on_cmd = self.sched.post_at_tick(next_beat, self._noteon)
def stop(self):
if not self.playing:
return
self.playing = False
# in case there is a note on hanging, turn it off immediately
if self.off_cmd:
self.off_cmd.execute()
# cancel anything pending in the future.
self.sched.remove(self.on_cmd)
self.sched.remove(self.off_cmd)
# reset these so we don't have a reference to old commands.
self.on_cmd = None
self.off_cmd = None
def toggle(self):
if self.playing:
self.stop()
else:
self.start()
def _noteon(self, tick, ignore):
# play the note right now:
self.synth.noteon(self.channel, self.pitch, 100)
# post the note off for half a beat later:
self.off_cmd = self.sched.post_at_tick(tick + self.beat_len/2, self._noteoff, self.pitch)
# schedule the next noteon for one beat later
next_beat = tick + self.beat_len
self.on_cmd = self.sched.post_at_tick(next_beat, self._noteon)
def _noteoff(self, tick, pitch):
# just turn off the currently sounding note.
self.synth.noteoff(self.channel, pitch)<|fim▁end|> | if self.playing:
return
self.playing = True |
<|file_name|>SpaceScene.go<|end_file_name|><|fim▁begin|>package game
import (
"fmt"
"github.com/vova616/GarageEngine/engine"
//"github.com/vova616/GarageEngine/engine/audio"
//"github.com/vova616/GarageEngine/engine/audio/ibxm"
//"github.com/vova616/GarageEngine/engine/audio/wav"
"github.com/vova616/GarageEngine/engine/components"
_ "image/jpeg"
_ "image/png"
//"gl"
"strconv"
"time"
//"strings"
//"math"
"github.com/vova616/GarageEngine/spaceCookies/server"
"github.com/vova616/chipmunk"
"github.com/vova616/chipmunk/vect"
//"image"
//"image/color"
"encoding/json"
"math/rand"
"os"
)
type GameScene struct {
*engine.SceneData
Layer1 *engine.GameObject
Layer2 *engine.GameObject
Layer3 *engine.GameObject
Layer4 *engine.GameObject
}
var (
GameSceneGeneral *GameScene
cir *engine.Texture
boxt *engine.Texture
cookie *engine.GameObject
defender *engine.GameObject
missle *Missle
Player *engine.GameObject
PlayerShip *ShipController
Explosion *engine.GameObject
PowerUpGO *engine.GameObject
Wall *engine.GameObject
atlas *engine.ManagedAtlas
atlasSpace *engine.ManagedAtlas
atlasPowerUp *engine.ManagedAtlas
backgroung *engine.Texture
ArialFont *engine.Font
ArialFont2 *engine.Font
Players map[server.ID]*engine.GameObject = make(map[server.ID]*engine.GameObject)
queenDead = false
//fireSound audio.AudioClip
)
const (
MissleTag = "Missle"
CookieTag = "Cookie"
)
var SpaceShip_A = "Ship"
var Explosion_ID engine.ID
var PowerUps_ID engine.ID
const Missle_A = 334
const HP_A = 123
const HPGUI_A = 124
const Queen_A = 666
const Jet_A = 125
func CheckError(err error) bool {
if err != nil {
fmt.Println(err)
return true
}
return false
}
func LoadTextures() {
atlas = engine.NewManagedAtlas(2048, 1024)
atlasSpace = engine.NewManagedAtlas(1024, 1024)
atlasPowerUp = engine.NewManagedAtlas(256, 256)
var e error
CheckError(atlas.LoadImageID("./data/spaceCookies/Ship1.png", SpaceShip_A))
CheckError(atlas.LoadImageID("./data/spaceCookies/missile.png", Missle_A))
e, Explosion_ID = atlas.LoadGroupSheet("./data/spaceCookies/Explosion.png", 128, 128, 6*8)
CheckError(e)
CheckError(atlas.LoadImageID("./data/spaceCookies/HealthBar.png", HP_A))
CheckError(atlas.LoadImageID("./data/spaceCookies/HealthBarGUI.png", HPGUI_A))
CheckError(atlas.LoadImageID("./data/spaceCookies/Queen.png", Queen_A))
CheckError(atlas.LoadImageID("./data/spaceCookies/Jet.png", Jet_A))
atlas.BuildAtlas()
atlas.BuildMipmaps()
atlas.SetFiltering(engine.MipMapLinearNearest, engine.Nearest)
atlas.Texture.SetReadOnly()
boxt, e = engine.LoadTexture("./data/spaceCookies/wall.png")
boxt.BuildMipmaps()
boxt.SetFiltering(engine.MipMapLinearNearest, engine.Nearest)
backgroung, e = engine.LoadTexture("./data/spaceCookies/background.png")
CheckError(e)
cir, e = engine.LoadTexture("./data/spaceCookies/Cookie.png")
CheckError(e)
cir.BuildMipmaps()
cir.SetFiltering(engine.MipMapLinearNearest, engine.Nearest)
backgroung.BuildMipmaps()
backgroung.SetFiltering(engine.MipMapLinearNearest, engine.Nearest)
CheckError(atlasSpace.LoadGroup("./data/spaceCookies/Space/"))
atlasSpace.BuildAtlas()
atlasSpace.BuildMipmaps()
atlasSpace.SetFiltering(engine.MipMapLinearNearest, engine.Nearest)
atlasSpace.Texture.SetReadOnly()
e, PowerUps_ID = atlasPowerUp.LoadGroupSheet("./data/spaceCookies/powerups.png", 61, 61, 3*4)
CheckError(e)
atlasPowerUp.BuildAtlas()
atlasPowerUp.SetFiltering(engine.Linear, engine.Linear)
ArialFont, e = engine.NewFont("./data/Fonts/arial.ttf", 48)
if e != nil {
panic(e)
}
ArialFont.Texture.SetReadOnly()
ArialFont2, e = engine.NewFont("./data/Fonts/arial.ttf", 24)
if e != nil {
panic(e)
}
ArialFont2.Texture.SetReadOnly()
}
func SpawnMainPlayer(spawnPlayer server.SpawnPlayer) {
Health := engine.NewGameObject("HP")
Health.Transform().SetParent2(GameSceneGeneral.Camera.GameObject())
Health.Transform().SetPositionf(-float32(engine.Width)/2+150, -float32(engine.Height)/2+50)
HealthGUI := engine.NewGameObject("HPGUI")
HealthGUI.AddComponent(engine.NewSprite2(atlas.Texture, engine.IndexUV(atlas, HPGUI_A)))
HealthGUI.Transform().SetParent2(Health)
HealthGUI.Transform().SetDepth(3)
HealthGUI.Transform().SetPositionf(0, 0)
HealthGUI.Transform().SetScalef(50, 50)
HealthBar := engine.NewGameObject("HealthBar")
HealthBar.Transform().SetParent2(Health)
HealthBar.Transform().SetPositionf(-82, 0)
HealthBar.Transform().SetScalef(100, 50)
uvHP := engine.IndexUV(atlas, HP_A)
HealthBarGUI := engine.NewGameObject("HealthBarGUI")
HealthBarGUI.Transform().SetParent2(HealthBar)
HealthBarGUI.AddComponent(engine.NewSprite2(atlas.Texture, uvHP))
HealthBarGUI.Transform().SetScalef(0.52, 1)
HealthBarGUI.Transform().SetDepth(2)
HealthBarGUI.Transform().SetPositionf((uvHP.Ratio/2)*HealthBarGUI.Transform().Scale().X, 0)
JetFire := engine.NewGameObject("Jet")
JetFire.AddComponent(engine.NewSprite2(atlas.Texture, engine.IndexUV(atlas, Jet_A)))
Player.AddComponent(engine.NewSprite2(atlas.Texture, engine.IndexUV(atlas, SpaceShip_A)))
PlayerShip = Player.AddComponent(NewShipController()).(*ShipController)
Player.Transform().SetWorldPositionf(spawnPlayer.PlayerTransform.X, spawnPlayer.PlayerTransform.Y)
Player.Transform().SetWorldRotationf(spawnPlayer.PlayerTransform.Rotation)
Player.Transform().SetWorldScalef(100, 100)
Player.AddComponent(components.NewSmoothFollow(nil, 2, 200))
shipHP := float32(1000)
PlayerShip.HPBar = HealthBar
PlayerShip.JetFire = JetFire
PlayerShip.Missle = missle
Player.AddComponent(NewDestoyable(shipHP, 1))
//as := audio.NewAudioSource(fireSound)
//Player.AddComponent(as)
//PlayerShip.FireSource = as
//as.Pause()
}
func SpawnPlayer(spawnPlayer server.SpawnPlayer) {
newPlayer, exists := Players[spawnPlayer.PlayerInfo.PlayerID]
if !exists {
newPlayer = engine.NewGameObject(spawnPlayer.PlayerInfo.Name)
}
newPlayer.Transform().SetParent2(GameSceneGeneral.Layer2)
newPlayer.Transform().SetWorldPositionf(spawnPlayer.PlayerTransform.X, spawnPlayer.PlayerTransform.Y)
newPlayer.Transform().SetWorldRotationf(spawnPlayer.PlayerTransform.Rotation)
newPlayer.Transform().SetWorldScalef(100, 100)
newPlayer.AddComponent(engine.NewSprite2(atlas.Texture, engine.IndexUV(atlas, SpaceShip_A)))
if !exists {
Players[spawnPlayer.PlayerInfo.PlayerID] = newPlayer
}
}
func (s *GameScene) Load() {
Players = make(map[server.ID]*engine.GameObject)
LoadTextures()
engine.SetTitle("Space Cookies")
queenDead = false
//var e error
//fireSound, e = wav.NewClip("./data/laser5.wav")
//if e != nil {
// fmt.Println(e)
//}
rand.Seed(time.Now().UnixNano())
GameSceneGeneral = s
s.Camera = engine.NewCamera()
cam := engine.NewGameObject("Camera")
cam.AddComponent(s.Camera)
cam.Transform().SetScalef(1, 1)
gui := engine.NewGameObject("GUI")
Layer1 := engine.NewGameObject("Layer1")
Layer2 := engine.NewGameObject("Layer2")
Layer3 := engine.NewGameObject("Layer3")
Layer4 := engine.NewGameObject("Layer3")
s.Layer1 = Layer1
s.Layer2 = Layer2
s.Layer3 = Layer3
s.Layer4 = Layer4
mouse := engine.NewGameObject("Mouse")
mouse.AddComponent(engine.NewMouse())
mouse.AddComponent(NewMouseDebugger())
mouse.Transform().SetParent2(cam)
FPSDrawer := engine.NewGameObject("FPS")
FPSDrawer.Transform().SetParent2(cam)
txt := FPSDrawer.AddComponent(components.NewUIText(ArialFont2, "")).(*components.UIText)
fps := FPSDrawer.AddComponent(engine.NewFPS()).(*engine.FPS)
fps.SetAction(func(fps float64) {
txt.SetString("FPS: " + strconv.FormatFloat(fps, 'f', 2, 32))
})
txt.SetAlign(engine.AlignLeft)
FPSDrawer.Transform().SetPositionf(-float32(engine.Width)/2+20, +float32(engine.Height)/2-20)
FPSDrawer.Transform().SetScalef(20, 20)
/*
label := engine.NewGameObject("Label")
label.Transform().SetParent2(cam)
label.Transform().SetPositionf(20, float32(engine.Height)-40)
label.Transform().SetScalef(20, 20)
txt2 := label.AddComponent(components.NewUIText(ArialFont2, "Input: ")).(*components.UIText)
txt2.SetFocus(true)
txt2.SetWritable(true)
txt2.SetAlign(engine.AlignLeft)
*/
//SPACCCEEEEE
engine.Space.Gravity.Y = 0
engine.Space.Iterations = 10
uvs, ind := engine.AnimatedGroupUVs(atlas, Explosion_ID)
Explosion = engine.NewGameObject("Explosion")
Explosion.AddComponent(engine.NewSprite3(atlas.Texture, uvs))
Explosion.Sprite.BindAnimations(ind)
Explosion.Sprite.AnimationSpeed = 25
Explosion.Sprite.AnimationEndCallback = func(sprite *engine.Sprite) {
sprite.GameObject().Destroy()
}
Explosion.Transform().SetScalef(30, 30)
Explosion.Transform().SetDepth(1)
missleGameObject := engine.NewGameObject("Missle")
missleGameObject.AddComponent(engine.NewSprite2(atlas.Texture, engine.IndexUV(atlas, Missle_A)))
missleGameObject.AddComponent(engine.NewPhysics(false))
missleGameObject.Transform().SetScalef(20, 20)
missleGameObject.AddComponent(NewDamageDealer(50))
missleGameObject.Physics.Shape.IsSensor = true
missle = NewMissle(30000)
missleGameObject.AddComponent(missle)
missle.Explosion = Explosion
ds := NewDestoyable(0, 1)
ds.SetDestroyTime(1)
missleGameObject.AddComponent(ds)
ship := engine.NewGameObject("Ship")
Player = ship
Player.Transform().SetParent2(Layer2)
Player.AddComponent(MyClient)
/*
Health := engine.NewGameObject("HP")
Health.Transform().SetParent2(cam)
Health.Transform().SetPositionf(150, 50)
HealthGUI := engine.NewGameObject("HPGUI")
HealthGUI.AddComponent(engine.NewSprite2(atlas.Texture, engine.IndexUV(atlas, HPGUI_A)))
HealthGUI.Transform().SetParent2(Health)
HealthGUI.Transform().SetScalef(50, 50)
HealthBar := engine.NewGameObject("HealthBar")
HealthBar.Transform().SetParent2(Health)
HealthBar.Transform().SetPositionf(-82, 0)
HealthBar.Transform().SetScalef(100, 50)
uvHP := engine.IndexUV(atlas, HP_A)
HealthBarGUI := engine.NewGameObject("HealthBarGUI")
HealthBarGUI.Transform().SetParent2(HealthBar)
HealthBarGUI.AddComponent(engine.NewSprite2(atlas.Texture, uvHP))
HealthBarGUI.Transform().SetScalef(0.52, 1)
HealthBarGUI.Transform().SetPositionf((uvHP.Ratio/2)*HealthBarGUI.Transform().Scale().X, 0)
JetFire := engine.NewGameObject("Jet")
JetFire.AddComponent(engine.NewSprite2(atlas.Texture, engine.IndexUV(atlas, Jet_A)))
ship.AddComponent(engine.NewSprite2(atlas.Texture, engine.IndexUV(atlas, SpaceShip_A)))
PlayerShip = ship.AddComponent(NewShipController()).(*ShipController)
ship.Transform().SetParent2(Layer2)
ship.Transform().SetPositionf(400, 200)
ship.Transform().SetScalef(100, 100)
shipHP := float32(1000)
PlayerShip.HPBar = HealthBar
PlayerShip.JetFire = JetFire
PlayerShip.Missle = missle
ship.AddComponent(NewDestoyable(shipHP, 1))
*/
cookie = engine.NewGameObject("Cookie")
cookie.AddComponent(engine.NewSprite(cir))
cookie.AddComponent(NewDestoyable(100, 2))
cookie.AddComponent(NewDamageDealer(20))
cookie.AddComponent(NewEnemeyAI(Player, Enemey_Cookie))
cookie.Transform().SetScalef(50, 50)
cookie.Transform().SetPositionf(400, 400)
cookie.AddComponent(engine.NewPhysicsShape(false, chipmunk.NewCircle(vect.Vect{0, 0}, 25)))
cookie.Tag = CookieTag
defender = engine.NewGameObject("Box")
ds = NewDestoyable(30, 3)
ds.SetDestroyTime(5)
defender.AddComponent(ds)
defender.AddComponent(engine.NewSprite(boxt))
defender.Tag = CookieTag
defender.Transform().SetScalef(50, 50)
phx := defender.AddComponent(engine.NewPhysics(false)).(*engine.Physics)
phx.Body.SetMass(2.5)
phx.Body.SetMoment(phx.Shape.Moment(2.5))
phx.Shape.SetFriction(0.5)
//phx.Shape.Group = 2
phx.Shape.SetElasticity(0.5)
QueenCookie := engine.NewGameObject("Cookie")
QueenCookie.AddComponent(engine.NewSprite2(atlas.Texture, engine.IndexUV(atlas, Queen_A)))
QueenCookie.AddComponent(NewDestoyable(5000, 2))
QueenCookie.AddComponent(NewDamageDealer(200))
QueenCookie.AddComponent(NewEnemeyAI(Player, Enemey_Boss))
QueenCookie.Transform().SetParent2(Layer2)
QueenCookie.Transform().SetScalef(300, 300)
QueenCookie.Transform().SetPositionf(2000, 2000)
QueenCookie.AddComponent(engine.NewPhysicsShape(false, chipmunk.NewCircle(vect.Vect{0, 0}, 25)))
QueenCookie.Tag = CookieTag
staticCookie := engine.NewGameObject("Cookie")
staticCookie.AddComponent(engine.NewSprite(cir))
staticCookie.Transform().SetScalef(400, 400)
staticCookie.Transform().SetPositionf(400, 400)
staticCookie.AddComponent(NewDestoyable(float32(engine.Inf), 2))
staticCookie.AddComponent(engine.NewPhysicsShape(true, chipmunk.NewCircle(vect.Vect{0, 0}, 200)))
staticCookie.Physics.Shape.SetElasticity(0)
staticCookie.Physics.Body.SetMass(999999999999)
staticCookie.Physics.Body.SetMoment(staticCookie.Physics.Shape.Moment(999999999999))
staticCookie.Tag = CookieTag
uvs, ind = engine.AnimatedGroupUVs(atlasSpace, "s")
Background := engine.NewGameObject("Background")
Background.AddComponent(engine.NewSprite3(atlasSpace.Texture, uvs))
Background.Sprite.BindAnimations(ind)
Background.Sprite.SetAnimation("s")
Background.Sprite.AnimationSpeed = 0
Background.Transform().SetScalef(50, 50)
Background.Transform().SetPositionf(400, 400)
uvs, ind = engine.AnimatedGroupUVs(atlasPowerUp, PowerUps_ID)
PowerUpGO = engine.NewGameObject("Background")
//PowerUpGO.Transform().SetParent2(Layer2)
PowerUpGO.AddComponent(engine.NewSprite3(atlasPowerUp.Texture, uvs))
PowerUpGO.AddComponent(engine.NewPhysics(false))
PowerUpGO.Physics.Shape.IsSensor = true
PowerUpGO.Sprite.BindAnimations(ind)
PowerUpGO.Sprite.SetAnimation(PowerUps_ID)
PowerUpGO.Sprite.AnimationSpeed = 0
index := (rand.Int() % 6) + 6
PowerUpGO.Sprite.SetAnimationIndex(int(index))
PowerUpGO.Transform().SetScalef(61, 61)
PowerUpGO.Transform().SetPositionf(0, 0)
background := engine.NewGameObject("Background")
background.AddComponent(engine.NewSprite(backgroung))
background.Transform().SetScalef(800, 800)
background.Transform().SetPositionf(0, 0)
background.Transform().SetDepth(-6)
background.Transform().SetParent2(cam)
for i := 0; i < 300; i++ {
c := Background.Clone()
c.Transform().SetParent2(Layer4)
c.Transform().SetDepth(-5)
size := 20 + rand.Float32()*50
p := engine.Vector{(rand.Float32() * 5000) - 1000, (rand.Float32() * 5000) - 1000, 1}
index := rand.Int() % 7
Background.Sprite.SetAnimationIndex(int(index))
c.Transform().SetRotationf(rand.Float32() * 360)
c.Transform().SetPosition(p)
c.Transform().SetScalef(size, size)
}
for i := 0; i < 600; i++ {
c := cookie.Clone()
//c.Tag = CookieTag
c.Transform().SetParent2(Layer2)
size := 40 + rand.Float32()*100
p := engine.Vector{(rand.Float32() * 4000), (rand.Float32() * 4000), 1}
if p.X < 1100 && p.Y < 800 {
p.X += 1100
p.Y += 800
}
c.Transform().SetPosition(p)
c.Transform().SetScalef(size, size)
}
Wall = engine.NewGameObject("Wall")
Wall.Transform().SetParent2(Layer2)
for i := 0; i < (4000/400)+2; i++ {
c := staticCookie.Clone()
c.Transform().SetParent2(Wall)
p := engine.Vector{float32(i) * 400, -200, 1}
c.Transform().SetPosition(p)
c.Transform().SetScalef(400, 400)
}
for i := 0; i < (4000/400)+2; i++ {
c := staticCookie.Clone()
c.Transform().SetParent2(Wall)
p := engine.Vector{float32(i) * 400, 4200, 1}
c.Transform().SetPosition(p)
c.Transform().SetScalef(400, 400)
}
for i := 0; i < (4000/400)+2; i++ {
c := staticCookie.Clone()
c.Transform().SetParent2(Wall)
p := engine.Vector{-200, float32(i) * 400, 1}
c.Transform().SetPosition(p)
c.Transform().SetScalef(400, 400)
}
for i := 0; i < (4000/400)+2; i++ {
c := staticCookie.Clone()
c.Transform().SetParent2(Wall)
p := engine.Vector{4200, float32(i) * 400, 1}
c.Transform().SetPosition(p)
c.Transform().SetScalef(400, 400)
}
//cam.AddComponent(audio.NewAudioListener())
//clip, e := ibxm.NewClip("./data/GameSong.xm")
//if e != nil {
// panic(e)
//}
//music := engine.NewGameObject("GameSong")
//as := audio.NewAudioSource(clip)
//music.AddComponent(as)
//as.SetLooping(true)
//as.SetGain(0.3)
s.AddGameObject(cam)
s.AddGameObject(gui)
s.AddGameObject(Layer1)
s.AddGameObject(Layer2)
s.AddGameObject(Layer3)
s.AddGameObject(Layer4)
//s.AddGameObject(music)
//s.AddGameObject(shadowShader)
fmt.Println("GameScene loaded")
}
func (s *GameScene) New() engine.Scene {
gs := new(GameScene)
gs.SceneData = engine.NewScene("GameScene")
return gs
}
func (s *GameScene) OldLoad() {
LoadTextures()
queenDead = false
rand.Seed(time.Now().UnixNano())
GameSceneGeneral = s
s.Camera = engine.NewCamera()
cam := engine.NewGameObject("Camera")
cam.AddComponent(s.Camera)
cam.Transform().SetScalef(1, 1)
gui := engine.NewGameObject("GUI")
Layer1 := engine.NewGameObject("Layer1")
Layer2 := engine.NewGameObject("Layer2")
Layer3 := engine.NewGameObject("Layer3")
Layer4 := engine.NewGameObject("Layer3")
s.Layer1 = Layer1
s.Layer2 = Layer2
s.Layer3 = Layer3
s.Layer4 = Layer4
mouse := engine.NewGameObject("Mouse")
mouse.AddComponent(engine.NewMouse())
mouse.AddComponent(NewMouseDebugger())
mouse.Transform().SetParent2(cam)
FPSDrawer := engine.NewGameObject("FPS")
FPSDrawer.Transform().SetParent2(cam)
txt := FPSDrawer.AddComponent(components.NewUIText(ArialFont2, "")).(*components.UIText)
fps := FPSDrawer.AddComponent(engine.NewFPS()).(*engine.FPS)
fps.SetAction(func(fps float64) {
txt.SetString("FPS: " + strconv.FormatFloat(fps, 'f', 2, 32))
})
txt.SetAlign(engine.AlignLeft)
FPSDrawer.Transform().SetPositionf(20, float32(engine.Height)-20)
FPSDrawer.Transform().SetScalef(20, 20)
label := engine.NewGameObject("Label")
label.Transform().SetParent2(cam)
label.Transform().SetPositionf(20, float32(engine.Height)-40)
label.Transform().SetScalef(20, 20)
txt2 := label.AddComponent(components.NewUIText(ArialFont2, "Input: ")).(*components.UIText)
txt2.SetFocus(true)
txt2.SetWritable(true)
txt2.SetAlign(engine.AlignLeft)
<|fim▁hole|> //SPACCCEEEEE
engine.Space.Gravity.Y = 0
engine.Space.Iterations = 10
Health := engine.NewGameObject("HP")
Health.Transform().SetParent2(cam)
Health.Transform().SetPositionf(150, 50)
HealthGUI := engine.NewGameObject("HPGUI")
HealthGUI.AddComponent(engine.NewSprite2(atlas.Texture, engine.IndexUV(atlas, HPGUI_A)))
HealthGUI.Transform().SetParent2(Health)
HealthGUI.Transform().SetScalef(50, 50)
HealthBar := engine.NewGameObject("HealthBar")
HealthBar.Transform().SetParent2(Health)
HealthBar.Transform().SetPositionf(-82, 0)
HealthBar.Transform().SetScalef(100, 50)
uvHP := engine.IndexUV(atlas, HP_A)
HealthBarGUI := engine.NewGameObject("HealthBarGUI")
HealthBarGUI.Transform().SetParent2(HealthBar)
HealthBarGUI.AddComponent(engine.NewSprite2(atlas.Texture, uvHP))
HealthBarGUI.Transform().SetScalef(0.52, 1)
HealthBarGUI.Transform().SetPositionf((uvHP.Ratio/2)*HealthBarGUI.Transform().Scale().X, 0)
JetFire := engine.NewGameObject("Jet")
JetFire.AddComponent(engine.NewSprite2(atlas.Texture, engine.IndexUV(atlas, Jet_A)))
ship := engine.NewGameObject("Ship")
Player = ship
ship.AddComponent(engine.NewSprite2(atlas.Texture, engine.IndexUV(atlas, SpaceShip_A)))
PlayerShip = ship.AddComponent(NewShipController()).(*ShipController)
ship.AddComponent(components.NewSmoothFollow(nil, 0, 50))
ship.Transform().SetParent2(Layer2)
ship.Transform().SetPositionf(400, 200)
ship.Transform().SetScalef(100, 100)
shipHP := float32(1000)
PlayerShip.HPBar = HealthBar
PlayerShip.JetFire = JetFire
settings := struct {
Ship *ShipController
PowerUpChance *int
PowerUpRepairChance *int
ShipHP *float32
Debug *bool
}{
PlayerShip,
&PowerUpChance,
&PowerUpRepairChance,
&shipHP,
&engine.Debug,
}
f, e := os.Open("./data/spaceCookies/game.dat")
if e != nil {
f, e = os.Create("./data/spaceCookies/game.dat")
if e != nil {
fmt.Println(e)
}
defer f.Close()
encoder := json.NewEncoder(f)
encoder.Encode(settings)
} else {
defer f.Close()
}
decoder := json.NewDecoder(f)
e = decoder.Decode(&settings)
if e != nil {
fmt.Println(e)
}
ship.AddComponent(NewDestoyable(shipHP, 1))
uvs, ind := engine.AnimatedGroupUVs(atlas, Explosion_ID)
Explosion = engine.NewGameObject("Explosion")
Explosion.AddComponent(engine.NewSprite3(atlas.Texture, uvs))
Explosion.Sprite.BindAnimations(ind)
Explosion.Sprite.AnimationSpeed = 25
Explosion.Sprite.AnimationEndCallback = func(sprite *engine.Sprite) {
sprite.GameObject().Destroy()
}
Explosion.Transform().SetScalef(30, 30)
missle := engine.NewGameObject("Missle")
missle.AddComponent(engine.NewSprite2(atlas.Texture, engine.IndexUV(atlas, Missle_A)))
missle.AddComponent(engine.NewPhysics(false))
missle.Transform().SetScalef(20, 20)
missle.AddComponent(NewDamageDealer(50))
m := NewMissle(30000)
missle.AddComponent(m)
PlayerShip.Missle = m
m.Explosion = Explosion
ds := NewDestoyable(0, 1)
ds.SetDestroyTime(1)
missle.AddComponent(ds)
cookie = engine.NewGameObject("Cookie")
cookie.AddComponent(engine.NewSprite(cir))
cookie.AddComponent(NewDestoyable(100, 2))
cookie.AddComponent(NewDamageDealer(20))
cookie.AddComponent(NewEnemeyAI(Player, Enemey_Cookie))
cookie.Transform().SetScalef(50, 50)
cookie.Transform().SetPositionf(400, 400)
cookie.AddComponent(engine.NewPhysicsShape(false, chipmunk.NewCircle(vect.Vect{0, 0}, 25)))
cookie.Tag = CookieTag
defender = engine.NewGameObject("Box")
ds = NewDestoyable(30, 3)
ds.SetDestroyTime(5)
defender.AddComponent(ds)
defender.AddComponent(engine.NewSprite(boxt))
defender.Tag = CookieTag
defender.Transform().SetScalef(50, 50)
phx := defender.AddComponent(engine.NewPhysics(false)).(*engine.Physics)
phx.Body.SetMass(2.5)
phx.Body.SetMoment(phx.Shape.Moment(2.5))
phx.Shape.SetFriction(0.5)
//phx.Shape.Group = 2
phx.Shape.SetElasticity(0.5)
QueenCookie := engine.NewGameObject("Cookie")
QueenCookie.AddComponent(engine.NewSprite2(atlas.Texture, engine.IndexUV(atlas, Queen_A)))
QueenCookie.AddComponent(NewDestoyable(5000, 2))
QueenCookie.AddComponent(NewDamageDealer(200))
QueenCookie.AddComponent(NewEnemeyAI(Player, Enemey_Boss))
QueenCookie.Transform().SetParent2(Layer2)
QueenCookie.Transform().SetScalef(300, 300)
QueenCookie.Transform().SetPositionf(999999, 999999)
QueenCookie.AddComponent(engine.NewPhysicsShape(false, chipmunk.NewCircle(vect.Vect{0, 0}, 25)))
QueenCookie.Tag = CookieTag
staticCookie := engine.NewGameObject("Cookie")
staticCookie.AddComponent(engine.NewSprite(cir))
staticCookie.Transform().SetScalef(400, 400)
staticCookie.Transform().SetPositionf(400, 400)
staticCookie.AddComponent(NewDestoyable(float32(engine.Inf), 2))
staticCookie.AddComponent(engine.NewPhysicsShape(true, chipmunk.NewCircle(vect.Vect{0, 0}, 200)))
staticCookie.Physics.Shape.SetElasticity(0)
staticCookie.Physics.Body.SetMass(999999999999)
staticCookie.Physics.Body.SetMoment(staticCookie.Physics.Shape.Moment(999999999999))
staticCookie.Tag = CookieTag
uvs, ind = engine.AnimatedGroupUVs(atlasSpace, "s")
Background := engine.NewGameObject("Background")
Background.AddComponent(engine.NewSprite3(atlasSpace.Texture, uvs))
Background.Sprite.BindAnimations(ind)
Background.Sprite.SetAnimation("s")
Background.Sprite.AnimationSpeed = 0
Background.Transform().SetScalef(50, 50)
Background.Transform().SetPositionf(400, 400)
uvs, ind = engine.AnimatedGroupUVs(atlasPowerUp, PowerUps_ID)
PowerUpGO = engine.NewGameObject("Background")
//PowerUpGO.Transform().SetParent2(Layer2)
PowerUpGO.AddComponent(engine.NewSprite3(atlasPowerUp.Texture, uvs))
PowerUpGO.AddComponent(engine.NewPhysics(false))
PowerUpGO.Physics.Shape.IsSensor = true
PowerUpGO.Sprite.BindAnimations(ind)
PowerUpGO.Sprite.SetAnimation(PowerUps_ID)
PowerUpGO.Sprite.AnimationSpeed = 0
index := (rand.Int() % 6) + 6
PowerUpGO.Sprite.SetAnimationIndex(int(index))
PowerUpGO.Transform().SetScalef(61, 61)
PowerUpGO.Transform().SetPositionf(0, 0)
background := engine.NewGameObject("Background")
background.AddComponent(engine.NewSprite(backgroung))
background.AddComponent(NewBackground(background.Sprite))
background.Sprite.Render = false
//background.Transform().SetScalef(float32(backgroung.Height()), float32(backgroung.Height()), 1)
background.Transform().SetScalef(800, 800)
background.Transform().SetPositionf(0, 0)
for i := 0; i < 300; i++ {
c := Background.Clone()
c.Transform().SetParent2(Layer4)
size := 20 + rand.Float32()*50
p := engine.Vector{(rand.Float32() * 5000) - 1000, (rand.Float32() * 5000) - 1000, 1}
index := rand.Int() % 7
Background.Sprite.SetAnimationIndex(int(index))
c.Transform().SetRotationf(rand.Float32() * 360)
c.Transform().SetPosition(p)
c.Transform().SetScalef(size, size)
}
for i := 0; i < 600; i++ {
c := cookie.Clone()
//c.Tag = CookieTag
c.Transform().SetParent2(Layer2)
size := 40 + rand.Float32()*100
p := engine.Vector{(rand.Float32() * 4000), (rand.Float32() * 4000), 1}
if p.X < 1100 && p.Y < 800 {
p.X += 1100
p.Y += 800
}
c.Transform().SetPosition(p)
c.Transform().SetScalef(size, size)
}
Wall = engine.NewGameObject("Wall")
Wall.Transform().SetParent2(Layer2)
for i := 0; i < (4000/400)+2; i++ {
c := staticCookie.Clone()
c.Transform().SetParent2(Wall)
p := engine.Vector{float32(i) * 400, -200, 1}
c.Transform().SetPosition(p)
c.Transform().SetScalef(400, 400)
}
for i := 0; i < (4000/400)+2; i++ {
c := staticCookie.Clone()
c.Transform().SetParent2(Wall)
p := engine.Vector{float32(i) * 400, 4200, 1}
c.Transform().SetPosition(p)
c.Transform().SetScalef(400, 400)
}
for i := 0; i < (4000/400)+2; i++ {
c := staticCookie.Clone()
c.Transform().SetParent2(Wall)
p := engine.Vector{-200, float32(i) * 400, 1}
c.Transform().SetPosition(p)
c.Transform().SetScalef(400, 400)
}
for i := 0; i < (4000/400)+2; i++ {
c := staticCookie.Clone()
c.Transform().SetParent2(Wall)
p := engine.Vector{4200, float32(i) * 400, 1}
c.Transform().SetPosition(p)
c.Transform().SetScalef(400, 400)
}
s.AddGameObject(cam)
s.AddGameObject(gui)
s.AddGameObject(Layer1)
s.AddGameObject(Layer2)
s.AddGameObject(Layer3)
s.AddGameObject(Layer4)
s.AddGameObject(background)
//s.AddGameObject(shadowShader)
fmt.Println("GameScene loaded")
}<|fim▁end|> | |
<|file_name|>syntax-extension-source-utils.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// This test is brittle!
// ignore-pretty - the pretty tests lose path information, breaking include!
pub mod m1 {
pub mod m2 {
pub fn where_am_i() -> String {
(module_path!()).to_string()
}
}
}
macro_rules! indirect_line { () => ( line!() ) }
pub fn main() {
assert_eq!(line!(), 25);
assert!((column!() == 4));
assert_eq!(indirect_line!(), 27);
assert!((file!().ends_with("syntax-extension-source-utils.rs")));
assert_eq!(stringify!((2*3) + 5).to_string(), "( 2 * 3 ) + 5".to_string());
assert!(include!("syntax-extension-source-utils-files/includeme.\
fragment").to_string()
== "victory robot 6".to_string());
assert!(<|fim▁hole|> .as_slice()
.starts_with("/* this is for "));
assert!(
include_bytes!("syntax-extension-source-utils-files/includeme.fragment")
[1] == (42 as u8)); // '*'
// The Windows tests are wrapped in an extra module for some reason
assert!((m1::m2::where_am_i().ends_with("m1::m2")));
assert!(match (45, "( 2 * 3 ) + 5") {
(line!(), stringify!((2*3) + 5)) => true,
_ => false
})
}<|fim▁end|> | include_str!("syntax-extension-source-utils-files/includeme.\
fragment").to_string() |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>#![feature(globs)]
extern crate graphics;
extern crate piston;
// extern crate sdl2_game_window;
extern crate glfw_game_window;
// use Window = sdl2_game_window::GameWindowSDL2;
use Window = glfw_game_window::GameWindowGLFW;
use piston::{
Game,
GameIteratorSettings,
GameWindowSettings,
UpdateArgs,
RenderArgs,
KeyPressArgs,
KeyReleaseArgs,
MousePressArgs,
MouseReleaseArgs,
MouseMoveArgs,
MouseRelativeMoveArgs,
MouseScrollArgs,
};
pub struct App {
_blowup: bool,
_screenshot: bool,
_premult: bool
}
impl App {
/// Creates a new application.
pub fn new() -> App {
App {
_blowup: false,
_screenshot: false,
_premult: false
}
}
}
impl Game for App {
/// Perform tasks for loading before showing anything.
fn load(&mut self) {}
fn update(&mut self, _args: &UpdateArgs) {}
fn render(&mut self, _args: &RenderArgs) {}
fn key_press(&mut self, _args: &KeyPressArgs) {}
fn key_release(&mut self, _args: &KeyReleaseArgs) {}
fn mouse_press(&mut self, _args: &MousePressArgs) {}
fn mouse_release(&mut self, _args: &MouseReleaseArgs) {}
fn mouse_move(&mut self, _args: &MouseMoveArgs) {}
/// Moved mouse relative, not bounded by cursor.
fn mouse_relative_move(&mut self, _args: &MouseRelativeMoveArgs) {}
fn mouse_scroll(&mut self, _args: &MouseScrollArgs) {}
}<|fim▁hole|>fn main() {
let mut window = Window::new(
GameWindowSettings {
title: "NanoMorph Demo".to_string(),
size: [300, 300],
fullscreen: false,
exit_on_esc: true,
}
);
let mut app = App::new();
let game_iter_settings = GameIteratorSettings {
updates_per_second: 120,
max_frames_per_second: 60,
};
app.run(&mut window, &game_iter_settings);
}<|fim▁end|> | |
<|file_name|>test_hashes.py<|end_file_name|><|fim▁begin|>import json
import argparse
import logging
import glob
# Logging Information
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
<|fim▁hole|>formatter = logging.Formatter('%(levelname)s: %(message)s')
fh = logging.FileHandler('test_hashes.log')
fh.setLevel(logging.DEBUG)
fh.setFormatter(formatter)
logger.addHandler(fh)
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
ch.setFormatter(formatter)
logger.addHandler(ch)
parser = argparse.ArgumentParser()
parser.add_argument("hash_num", help="file that we want to verify")
args = parser.parse_args()
hashes = set()
hash_num = args.hash_num
logger.info("Verifying consistency for VirusShare_00" + str(hash_num).zfill(3))
logger.debug("Generating hashes from ../hashes/VirusShare_00" + str(hash_num).zfill(3) + ".md5")
with open(("../hashes/VirusShare_00" + str(hash_num).zfill(3) + ".md5"),'r') as file:
for line in file.readlines()[6:]:
hashes.add(line.strip())
for filename in glob.glob("../analyses/VirusShare_00" + str(hash_num).zfill(3) + ".*"):
logger.debug("Removing hashes from " + filename)
with open(filename,'r') as file:
for line in file.readlines():
hashes.remove(json.loads(line.strip())["md5"])
if len(hashes) == 0:
logger.info("VirusShare_00" + str(hash_num).zfill(3) + ".ldjson is consistent with hashfile")
else:
logger.error("VirusShare_00" + str(hash_num).zfill(3) + ".ldjson is inconsistent with hashfile")<|fim▁end|> | |
<|file_name|>fix_agmethods2.py<|end_file_name|><|fim▁begin|>'''
run with: python ten2eleven.py -f agmethods2 test_dummy_old_MDA_code.py
Author: Tyler Reddy
'''
from lib2to3.fixer_base import BaseFix
from lib2to3.fixer_util import Name, Call, LParen, RParen, ArgList, Dot
from lib2to3 import pytree
class FixAgmethods2(BaseFix):<|fim▁hole|> 'improper')>
parens=trailer< '(' ')' >
tail=any*>
"""
def transform(self, node, results):
head = results['head']
method = results['method'][0]
tail = results['tail']
syms = self.syms
method_name = method.value
if method_name == 'torsion':
method_name = 'dihedral'
head = [n.clone() for n in head]
tail = [n.clone() for n in tail]
args = head + [pytree.Node(syms.trailer, [Dot(), Name(method_name, prefix = method.prefix), Dot(), Name('value'), LParen(), RParen()])]
new = pytree.Node(syms.power, args)
return new<|fim▁end|> |
PATTERN = """
power< head =any+
trailer< dot = '.' method=('bond'|'angle'|'torsion'| |
<|file_name|>product_store.go<|end_file_name|><|fim▁begin|>package postgres
import "github.com/shijuvar/gokit/examples/http-app/pkg/domain"
// ProductStore provides persistence logic for "products" table
type ProductStore struct {
Store DataStore
}
// ToDO: Write CRUD operations here
// Create creates a new Product
func (productStore ProductStore) Create(product domain.Product) (domain.Product, error) {<|fim▁hole|> return domain.Product{}, nil
}<|fim▁end|> | // ToDo: Write the code here |
<|file_name|>escape.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#
# Copyright 2009 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Escaping/unescaping methods for HTML, JSON, URLs, and others.
Also includes a few other miscellaneous string manipulation functions that
have crept in over time.
"""
from __future__ import absolute_import, division, print_function
import json
import re
from tornado.util import PY3, unicode_type, basestring_type
if PY3:
from urllib.parse import parse_qs as _parse_qs
import html.entities as htmlentitydefs
import urllib.parse as urllib_parse
unichr = chr
else:
from urlparse import parse_qs as _parse_qs
import htmlentitydefs
import urllib as urllib_parse
try:
import typing # noqa
except ImportError:
pass
_XHTML_ESCAPE_RE = re.compile('[&<>"\']')
_XHTML_ESCAPE_DICT = {'&': '&', '<': '<', '>': '>', '"': '"',
'\'': '''}
def xhtml_escape(value):
"""Escapes a string so it is valid within HTML or XML.
Escapes the characters ``<``, ``>``, ``"``, ``'``, and ``&``.
When used in attribute values the escaped strings must be enclosed
in quotes.
.. versionchanged:: 3.2
Added the single quote to the list of escaped characters.
"""
return _XHTML_ESCAPE_RE.sub(lambda match: _XHTML_ESCAPE_DICT[match.group(0)],
to_basestring(value))
def xhtml_unescape(value):
"""Un-escapes an XML-escaped string."""
return re.sub(r"&(#?)(\w+?);", _convert_entity, _unicode(value))
# The fact that json_encode wraps json.dumps is an implementation detail.
# Please see https://github.com/tornadoweb/tornado/pull/706
# before sending a pull request that adds **kwargs to this function.
def json_encode(value):
"""JSON-encodes the given Python object."""
# JSON permits but does not require forward slashes to be escaped.
# This is useful when json data is emitted in a <script> tag
# in HTML, as it prevents </script> tags from prematurely terminating
# the javascript. Some json libraries do this escaping by default,
# although python's standard library does not, so we do it here.
# http://stackoverflow.com/questions/1580647/json-why-are-forward-slashes-escaped
return json.dumps(value).replace("</", "<\\/")
def json_decode(value):
"""Returns Python objects for the given JSON string."""
return json.loads(to_basestring(value))
def squeeze(value):
"""Replace all sequences of whitespace chars with a single space."""
return re.sub(r"[\x00-\x20]+", " ", value).strip()
def url_escape(value, plus=True):
"""Returns a URL-encoded version of the given value.
If ``plus`` is true (the default), spaces will be represented
as "+" instead of "%20". This is appropriate for query strings
but not for the path component of a URL. Note that this default
is the reverse of Python's urllib module.
<|fim▁hole|> .. versionadded:: 3.1
The ``plus`` argument
"""
quote = urllib_parse.quote_plus if plus else urllib_parse.quote
return quote(utf8(value))
# python 3 changed things around enough that we need two separate
# implementations of url_unescape. We also need our own implementation
# of parse_qs since python 3's version insists on decoding everything.
if not PY3:
def url_unescape(value, encoding='utf-8', plus=True):
"""Decodes the given value from a URL.
The argument may be either a byte or unicode string.
If encoding is None, the result will be a byte string. Otherwise,
the result is a unicode string in the specified encoding.
If ``plus`` is true (the default), plus signs will be interpreted
as spaces (literal plus signs must be represented as "%2B"). This
is appropriate for query strings and form-encoded values but not
for the path component of a URL. Note that this default is the
reverse of Python's urllib module.
.. versionadded:: 3.1
The ``plus`` argument
"""
unquote = (urllib_parse.unquote_plus if plus else urllib_parse.unquote)
if encoding is None:
return unquote(utf8(value))
else:
return unicode_type(unquote(utf8(value)), encoding)
parse_qs_bytes = _parse_qs
else:
def url_unescape(value, encoding='utf-8', plus=True):
"""Decodes the given value from a URL.
The argument may be either a byte or unicode string.
If encoding is None, the result will be a byte string. Otherwise,
the result is a unicode string in the specified encoding.
If ``plus`` is true (the default), plus signs will be interpreted
as spaces (literal plus signs must be represented as "%2B"). This
is appropriate for query strings and form-encoded values but not
for the path component of a URL. Note that this default is the
reverse of Python's urllib module.
.. versionadded:: 3.1
The ``plus`` argument
"""
if encoding is None:
if plus:
# unquote_to_bytes doesn't have a _plus variant
value = to_basestring(value).replace('+', ' ')
return urllib_parse.unquote_to_bytes(value)
else:
unquote = (urllib_parse.unquote_plus if plus
else urllib_parse.unquote)
return unquote(to_basestring(value), encoding=encoding)
def parse_qs_bytes(qs, keep_blank_values=False, strict_parsing=False):
"""Parses a query string like urlparse.parse_qs, but returns the
values as byte strings.
Keys still become type str (interpreted as latin1 in python3!)
because it's too painful to keep them as byte strings in
python3 and in practice they're nearly always ascii anyway.
"""
# This is gross, but python3 doesn't give us another way.
# Latin1 is the universal donor of character encodings.
result = _parse_qs(qs, keep_blank_values, strict_parsing,
encoding='latin1', errors='strict')
encoded = {}
for k, v in result.items():
encoded[k] = [i.encode('latin1') for i in v]
return encoded
_UTF8_TYPES = (bytes, type(None))
def utf8(value):
# type: (typing.Union[bytes,unicode_type,None])->typing.Union[bytes,None]
"""Converts a string argument to a byte string.
If the argument is already a byte string or None, it is returned unchanged.
Otherwise it must be a unicode string and is encoded as utf8.
"""
if isinstance(value, _UTF8_TYPES):
return value
if not isinstance(value, unicode_type):
raise TypeError(
"Expected bytes, unicode, or None; got %r" % type(value)
)
return value.encode("utf-8")
_TO_UNICODE_TYPES = (unicode_type, type(None))
def to_unicode(value):
"""Converts a string argument to a unicode string.
If the argument is already a unicode string or None, it is returned
unchanged. Otherwise it must be a byte string and is decoded as utf8.
"""
if isinstance(value, _TO_UNICODE_TYPES):
return value
if not isinstance(value, bytes):
raise TypeError(
"Expected bytes, unicode, or None; got %r" % type(value)
)
return value.decode("utf-8")
# to_unicode was previously named _unicode not because it was private,
# but to avoid conflicts with the built-in unicode() function/type
_unicode = to_unicode
# When dealing with the standard library across python 2 and 3 it is
# sometimes useful to have a direct conversion to the native string type
if str is unicode_type:
native_str = to_unicode
else:
native_str = utf8
_BASESTRING_TYPES = (basestring_type, type(None))
def to_basestring(value):
"""Converts a string argument to a subclass of basestring.
In python2, byte and unicode strings are mostly interchangeable,
so functions that deal with a user-supplied argument in combination
with ascii string constants can use either and should return the type
the user supplied. In python3, the two types are not interchangeable,
so this method is needed to convert byte strings to unicode.
"""
if isinstance(value, _BASESTRING_TYPES):
return value
if not isinstance(value, bytes):
raise TypeError(
"Expected bytes, unicode, or None; got %r" % type(value)
)
return value.decode("utf-8")
def recursive_unicode(obj):
"""Walks a simple data structure, converting byte strings to unicode.
Supports lists, tuples, and dictionaries.
"""
if isinstance(obj, dict):
return dict((recursive_unicode(k), recursive_unicode(v)) for (k, v) in obj.items())
elif isinstance(obj, list):
return list(recursive_unicode(i) for i in obj)
elif isinstance(obj, tuple):
return tuple(recursive_unicode(i) for i in obj)
elif isinstance(obj, bytes):
return to_unicode(obj)
else:
return obj
# I originally used the regex from
# http://daringfireball.net/2010/07/improved_regex_for_matching_urls
# but it gets all exponential on certain patterns (such as too many trailing
# dots), causing the regex matcher to never return.
# This regex should avoid those problems.
# Use to_unicode instead of tornado.util.u - we don't want backslashes getting
# processed as escapes.
_URL_RE = re.compile(to_unicode(r"""\b((?:([\w-]+):(/{1,3})|www[.])(?:(?:(?:[^\s&()]|&|")*(?:[^!"#$%&'()*+,.:;<=>?@\[\]^`{|}~\s]))|(?:\((?:[^\s&()]|&|")*\)))+)"""))
def linkify(text, shorten=False, extra_params="",
require_protocol=False, permitted_protocols=["http", "https"]):
"""Converts plain text into HTML with links.
For example: ``linkify("Hello http://tornadoweb.org!")`` would return
``Hello <a href="http://tornadoweb.org">http://tornadoweb.org</a>!``
Parameters:
* ``shorten``: Long urls will be shortened for display.
* ``extra_params``: Extra text to include in the link tag, or a callable
taking the link as an argument and returning the extra text
e.g. ``linkify(text, extra_params='rel="nofollow" class="external"')``,
or::
def extra_params_cb(url):
if url.startswith("http://example.com"):
return 'class="internal"'
else:
return 'class="external" rel="nofollow"'
linkify(text, extra_params=extra_params_cb)
* ``require_protocol``: Only linkify urls which include a protocol. If
this is False, urls such as www.facebook.com will also be linkified.
* ``permitted_protocols``: List (or set) of protocols which should be
linkified, e.g. ``linkify(text, permitted_protocols=["http", "ftp",
"mailto"])``. It is very unsafe to include protocols such as
``javascript``.
"""
if extra_params and not callable(extra_params):
extra_params = " " + extra_params.strip()
def make_link(m):
url = m.group(1)
proto = m.group(2)
if require_protocol and not proto:
return url # not protocol, no linkify
if proto and proto not in permitted_protocols:
return url # bad protocol, no linkify
href = m.group(1)
if not proto:
href = "http://" + href # no proto specified, use http
if callable(extra_params):
params = " " + extra_params(href).strip()
else:
params = extra_params
# clip long urls. max_len is just an approximation
max_len = 30
if shorten and len(url) > max_len:
before_clip = url
if proto:
proto_len = len(proto) + 1 + len(m.group(3) or "") # +1 for :
else:
proto_len = 0
parts = url[proto_len:].split("/")
if len(parts) > 1:
# Grab the whole host part plus the first bit of the path
# The path is usually not that interesting once shortened
# (no more slug, etc), so it really just provides a little
# extra indication of shortening.
url = url[:proto_len] + parts[0] + "/" + \
parts[1][:8].split('?')[0].split('.')[0]
if len(url) > max_len * 1.5: # still too long
url = url[:max_len]
if url != before_clip:
amp = url.rfind('&')
# avoid splitting html char entities
if amp > max_len - 5:
url = url[:amp]
url += "..."
if len(url) >= len(before_clip):
url = before_clip
else:
# full url is visible on mouse-over (for those who don't
# have a status bar, such as Safari by default)
params += ' title="%s"' % href
return u'<a href="%s"%s>%s</a>' % (href, params, url)
# First HTML-escape so that our strings are all safe.
# The regex is modified to avoid character entites other than & so
# that we won't pick up ", etc.
text = _unicode(xhtml_escape(text))
return _URL_RE.sub(make_link, text)
def _convert_entity(m):
if m.group(1) == "#":
try:
if m.group(2)[:1].lower() == 'x':
return unichr(int(m.group(2)[1:], 16))
else:
return unichr(int(m.group(2)))
except ValueError:
return "&#%s;" % m.group(2)
try:
return _HTML_UNICODE_MAP[m.group(2)]
except KeyError:
return "&%s;" % m.group(2)
def _build_unicode_map():
unicode_map = {}
for name, value in htmlentitydefs.name2codepoint.items():
unicode_map[name] = unichr(value)
return unicode_map
_HTML_UNICODE_MAP = _build_unicode_map()<|fim▁end|> | |
<|file_name|>translation-service-tests.ts<|end_file_name|><|fim▁begin|>/// <reference path="../include.d.ts" />
import * as should from "should";
import * as mockFs from "mock-fs";
import { TranslationService } from "../../src/services/translation-service";
describe('TranslationService', function() {<|fim▁hole|> describe('languages', function() {
it('displays correct language', async function() {
// Arrange
mockTranslations();
const service = await initServiceAsync('de');
// Act
const result = service.translate('title');
// Assert
should(result).be.equal('Das ist der titel');
});
it('displays fallback language', async function() {
// Arrange
mockTranslations();
const service = await initServiceAsync('de');
// Act
const result = service.translate('subtitle');
// Assert
should(result).be.equal('This is the subtitle');
});
});
describe('layouts without translations', function() {
it('does not throw when only fallback file exists', async function() {
// Arrange
mockFs({
'document': {
'i18n': {
'en.json': `{
"title": "This is the title"
}`
}
}
});
// Act
const service = await initServiceAsync('de');
// Assert
const result = service.translate('title');
should(result).be.equal('This is the title');
});
it('does not throw when no translation file exists', async function() {
// Arrange
mockFs({ });
// Act
const service = await initServiceAsync('de');
// Assert
const result = service.translate('title');
should(result).be.equal('title');
});
});
describe('custom translations', function() {
it('should overwrite existing key', async function() {
// Arrange
mockTranslations();
const service = await initServiceAsync('de');
// Act
service.overwriteTranslations({
title: 'My custom translation'
})
// Assert
const result = service.translate('title');
should(result).be.equal('My custom translation');
});
it('should extend non existing keys (with fallback)', async function() {
// Arrange
mockTranslations();
const service = await initServiceAsync('de');
// Act
service.overwriteTranslations({
subtitle: 'My custom translation'
})
// Assert
const result = service.translate('subtitle');
should(result).be.equal('My custom translation');
const result2 = service.translate('title');
should(result2).be.equal('Das ist der titel');
});
it('should extend non existing keys (without fallback)', async function() {
// Arrange
mockTranslations();
const service = await initServiceAsync('de');
// Act
service.overwriteTranslations({
invalid: 'My custom translation'
})
// Assert
const result = service.translate('invalid');
should(result).be.equal('My custom translation');
});
it('should work deep', async function() {
// Arrange
mockTranslations();
const service = await initServiceAsync('de');
// Act
service.overwriteTranslations({
child: {
title: 'My custom translation'
}
})
// Assert
const result = service.translate('child.title');
should(result).be.equal('My custom translation');
});
});
});
export function mockTranslations() {
mockFs({
'document': {
'i18n': {
'en.json': `{
"title": "This is the title",
"subtitle": "This is the subtitle",
"pageOf": "Page {{current}} of {{max}}",
"count": "singular",
"count_plural": "plural"
}`,
'de.json': `{
"title": "Das ist der titel"
}`
}
}
});
}
export async function initServiceAsync(language: string = 'en') {
const service = new TranslationService('document', language);
await service.initAsync();
return service;
}<|fim▁end|> | |
<|file_name|>styles.py<|end_file_name|><|fim▁begin|>"""
InaSAFE Disaster risk assessment tool developed by AusAid **Messaging styles.**
Contact : [email protected]
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
Style constants for use with messaging. Example usage::
from messaging.styles import PROGRESS_UPDATE_STYLE
m.ImportantText(myTitle, **PROGRESS_UPDATE_STYLE)
This will result in some standardised styling being applied to the important
text element.
"""
__author__ = '[email protected]'
__revision__ = '$Format:%H$'
__date__ = '06/06/2013'
__copyright__ = ('Copyright 2012, Australia Indonesia Facility for '
'Disaster Reduction')
# These all apply to heading elements
PROGRESS_UPDATE_STYLE = {
'level': 5,
'icon': 'icon-cog icon-white',
'style_class': 'info'}
INFO_STYLE = {
'level': 5,
'icon': 'icon-info-sign icon-white',
'style_class': 'info'}
WARNING_STYLE = {
'level': 5,
'icon': 'icon-warning-sign icon-white',
'style_class': 'warning'}
<|fim▁hole|> 'style_class': 'suggestion'}
PROBLEM_STYLE = {
'level': 5,
'icon': 'icon-remove-sign icon-white',
'style_class': 'warning'}
DETAILS_STYLE = {
'level': 5,
'icon': 'icon-list icon-white',
'style_class': 'problem'}
SMALL_ICON_STYLE = {
'attributes': 'style="width: 24px; height: 24px;"',
}
TRACEBACK_STYLE = {
'level': 5,
'icon': 'icon-info-sign icon-white',
'style_class': 'inverse',
'attributes': 'onclick="toggleTracebacks();"'}
TRACEBACK_ITEMS_STYLE = {
'style_class': 'traceback-detail',
}
# This is typically a text element or its derivatives
KEYWORD_STYLE = {
# not working unless you turn css on and off again using inspector
#'style_class': 'label label-success'
}<|fim▁end|> | SUGGESTION_STYLE = {
'level': 5,
'icon': 'icon-comment icon-white', |
<|file_name|>hot-keyword-api.service.spec.ts<|end_file_name|><|fim▁begin|>/* tslint:disable:no-unused-variable */
import { TestBed, async, inject } from '@angular/core/testing';
import { HotKeywordApiService } from './hot-keyword-api.service';
describe('HotKeywordApiService', () => {
beforeEach(() => {
TestBed.configureTestingModule({
providers: [HotKeywordApiService]<|fim▁hole|> expect(service).toBeTruthy();
}));
});<|fim▁end|> | });
});
it('should ...', inject([HotKeywordApiService], (service: HotKeywordApiService) => { |
<|file_name|>problem1.rs<|end_file_name|><|fim▁begin|>/* Enun
If we list all the natural numbers below 10 that are multiples of 3 or 5,
we get 3, 5, 6 and 9. The sum of these multiples is 23.
Find the sum of all the multiples of 3 or 5 below 1000.
*/
fn main(){
let mut s = 0;
for i in (1..1000).filter(|x| (x%3 == 0)||(x%5 == 0)){
s = &i + s;<|fim▁hole|>}<|fim▁end|> | }
println!("sum:{}", s); |
<|file_name|>node.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
Helper classes.
"""
"""
Pynu - Python Node Utilities
Copyright (c) 2014 Juho Vepsäläinen
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
import re
class NodeContainer(object):
def __init__(self, owner, name, complementary_name):
super(NodeContainer, self).__init__()
self._nodes = list()
self.owner = owner
self.name = name
self.complementary_name = complementary_name
def __getitem__(self, key):
return self._nodes[key]
def __eq__(self, other):
"""Checks if container contents are equal to other.
>>> node1, node2, node3 = Node(), Node(), Node()
>>>
>>> assert node1.children == None
>>>
>>> node1.children = node3
>>> node2.children = node3
>>>
>>> assert node1.children == [node3, ]
>>> assert node1.children == node2.children
"""
if len(self._nodes) == 0 and other is None:
return True
return self._nodes == other
def ___neq__(self, other):
"""Checks if container contents are not equal to other.
>>> node1, node2, node3 = Node(), Node(), Node()
>>>
>>> node1.children = node3
>>> node2.children = node3
>>>
>>> assert node1.children != [node2, ]
>>> assert node1.children != node3.children
"""
return not self == other
def __len__(self):
return len(self._nodes)
def _set_content(self, content):
"""Sets content of the container.
>>> node1, node2 = Node(), Node()
>>> node1.children._set_content(node2)
>>>
>>> assert node1.children == [node2, ]
>>> assert node2.parents == [node1, ]
>>>
>>> node3 = Node()
>>> node1.children._set_content(node3)
>>>
>>> assert node1.children == [node3, ]
>>> assert node2.parents == None
"""
self.empty()
if hasattr(content, '__iter__'):
self.append(*content)
else:
self.append(content)
def empty(self):
"""Empties container content.
>>> node1, node2 = Node(), Node()
>>>
>>> node1.children = node2
>>> node1.children.empty()
>>>
>>> assert len(node1.children) == 0
>>> assert len(node2.parents) == 0
"""
for item in self._nodes:
self._nodes.remove(item)
complementary_items = getattr(item,
self.complementary_name)
complementary_items.remove(self.owner)
def append(self, *items):
"""Appends given items to container.
Regular case
>>> node1, node2 = Node(), Node()
>>>
>>> node1.children = node2
>>>
>>> assert node1.children[0] == node2
>>> assert node2.parents[0] == node1
Cycles are allowed by default
>>> node1.parents.append(node2)
>>>
>>> assert node2.children[0] == node1
>>> assert node1.parents[0] == node2
Append multiple times
>>> node1, node2 = Node(), Node()
>>> node1.children.append(node2)
>>> node1.children.append(node2)
>>>
>>> assert node1.children[0] == node2
>>> assert node2.parents[0] == node1
>>> assert len(node1.children) == 1
>>> assert len(node2.parents) == 1
Append multiple at once
>>> node1, node2, node3 = Node(), Node(), Node()
>>>
>>> node1.children = (node2, node3)
>>>
>>> assert len(node1.children) == 2
>>> assert node2 in node1.children
>>> assert node3 in node1.children
"""
for item in items:
if item not in self._nodes:
self._nodes.append(item)
complementary_items = getattr(item,
self.complementary_name)
complementary_items.append(self.owner)
def remove(self, *items):
"""Removes given items from container.
Regular case
>>> node1, node2 = Node(), Node()
>>>
>>> node1.children = node2
>>> node1.children.remove(node2)
>>>
>>> assert len(node1.children) == 0
>>> assert len(node2.parents) == 0
Remove multiple times
>>> node1, node2 = Node(), Node()
>>>
>>> node1.parents = node2
>>> node1.parents.remove(node2)
>>> node1.parents.remove(node2)
>>> node1.parents.remove(node2)
>>>
>>> assert len(node1.parents) == 0
>>> assert len(node2.children) == 0
Remove multiple at once
>>> node1, node2, node3 = Node(), Node(), Node()
>>>
>>> node1.children = (node2, node3)
>>> node1.children.remove(node2, node3)
>>>
>>> assert len(node1.children) == 0
"""
for item in items:
if item in self:
self._nodes.remove(item)
complementary_items = getattr(item,
self.complementary_name)
complementary_items.remove(self.owner)
def find(self, **kvargs):
"""Finds nodes matching to given rules. The idea is that the method
seeks based on the type of the container. For example in case
"node.parents.find" is invoked, it goes through all parents beginning
from the parents of the given node.
Default case
>>> node1, node2, node3, node4 = Node(), Node(), Node(), Node()
>>>
>>> node1.children = (node2, node3)
>>> node3.parents.append(node4)
>>>
>>> node1.name = 'joe'
>>> node1.value = 13
>>> node2.color = 'blue'
>>> node3.color = 'black'
>>> node4.value = 13
Single argument, single result
>>> assert node2.parents.find(name='joe') == node1
>>> assert node1.children.find(color='blue') == node2
<|fim▁hole|> Multiple arguments, single result
>>> assert node2.parents.find(name='joe', value=13) == node1
Regex argument (match anything except newline)
>>> assert node2.parents.find(name='.') == node1
Regex argument (match from beginning)
>>> assert node1.children.find(color='^bl') == [node2, node3]
No result
>>> assert node2.parents.find(color='red') == None
Cyclic case
>>> node1, node2 = Node(), Node()
>>>
>>> node1.children = node2
>>> node2.children = node1
>>>
>>> node1.name = 'joe'
>>> node2.name = 'jack'
Single argument, single result
>>> assert node1.children.find(name='joe') == node1
>>> assert node1.children.find(name='jack') == node2
"""
found_nodes = self._recursion(kvargs, [], [])
if len(found_nodes) > 0:
return found_nodes[0] if len(found_nodes) == 1 else found_nodes
def _recursion(self, search_clauses, found_nodes, visited_nodes):
visited_nodes.append(self.owner)
for node in self._nodes:
try:
if self._all_match(node, search_clauses):
found_nodes.append(node)
except AttributeError:
pass
if node not in visited_nodes:
node_container = getattr(node, self.name)
node_container._recursion(search_clauses, found_nodes,
visited_nodes)
return found_nodes
def _all_match(self, node, search_clauses):
for wanted_attribute, wanted_value in search_clauses.items():
attribute_value = getattr(node, wanted_attribute)
if isinstance(wanted_value, str):
matched = re.match(wanted_value, attribute_value)
else:
matched = wanted_value == attribute_value
if not matched:
return False
return True
class Node(object):
_children_container = NodeContainer
_children_name = 'children'
_parents_container = NodeContainer
_parents_name = 'parents'
def __init__(self):
def set_container(container, name, complementary_name):
setattr(self, name, container(self, name, complementary_name))
set_container(self._children_container, self._children_name,
self._parents_name)
set_container(self._parents_container, self._parents_name,
self._children_name)
def __setattr__(self, name, value):
""" Assignment of children/parents resets previous content and creates
needed links to nodes. Otherwise setting attributes works as expected.
Simple assignment
>>> node1, node2 = Node(), Node()
>>> node1.children = node2
>>>
>>> assert node1.children[0] == node2
>>> assert node2.parents[0] == node1
Tuple assignment
>>> node1, node2, node3 = Node(), Node(), Node()
>>> node1.children = (node2, node3)
>>>
>>> assert node1.children[0] == node2
>>> assert node2.parents[0] == node1
>>> assert node1.children[1] == node3
>>> assert node3.parents[0] == node1
Assign value to an attribute
>>> node = Node()
>>>
>>> node.value = 13
>>> assert node.value == 13
"""
def container_template(container_name):
if hasattr(self, container_name):
container = getattr(self, container_name)
container._set_content(value)
else:
super(Node, self).__setattr__(name, value)
if name in (self._children_name, self._parents_name):
container_template(name)
else:
super(Node, self).__setattr__(name, value)<|fim▁end|> | Single argument, multiple results
>>> assert node3.parents.find(value=13) == [node1, node4]
|
<|file_name|>rewriter.cc<|end_file_name|><|fim▁begin|>// Copyright 2012 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/parsing/rewriter.h"
#include "src/ast/ast.h"
#include "src/ast/scopes.h"
#include "src/parsing/parse-info.h"
#include "src/parsing/parser.h"
namespace v8 {
namespace internal {
class Processor final : public AstVisitor<Processor> {
public:<|fim▁hole|> result_assigned_(false),
replacement_(nullptr),
is_set_(false),
breakable_(false),
zone_(ast_value_factory->zone()),
closure_scope_(closure_scope),
factory_(ast_value_factory) {
DCHECK_EQ(closure_scope, closure_scope->GetClosureScope());
InitializeAstVisitor(isolate);
}
Processor(Parser* parser, DeclarationScope* closure_scope, Variable* result,
AstValueFactory* ast_value_factory)
: result_(result),
result_assigned_(false),
replacement_(nullptr),
is_set_(false),
breakable_(false),
zone_(ast_value_factory->zone()),
closure_scope_(closure_scope),
factory_(ast_value_factory) {
DCHECK_EQ(closure_scope, closure_scope->GetClosureScope());
InitializeAstVisitor(parser->stack_limit());
}
void Process(ZoneList<Statement*>* statements);
bool result_assigned() const { return result_assigned_; }
Zone* zone() { return zone_; }
DeclarationScope* closure_scope() { return closure_scope_; }
AstNodeFactory* factory() { return &factory_; }
// Returns ".result = value"
Expression* SetResult(Expression* value) {
result_assigned_ = true;
VariableProxy* result_proxy = factory()->NewVariableProxy(result_);
return factory()->NewAssignment(Token::ASSIGN, result_proxy, value,
kNoSourcePosition);
}
// Inserts '.result = undefined' in front of the given statement.
Statement* AssignUndefinedBefore(Statement* s);
private:
Variable* result_;
// We are not tracking result usage via the result_'s use
// counts (we leave the accurate computation to the
// usage analyzer). Instead we simple remember if
// there was ever an assignment to result_.
bool result_assigned_;
// When visiting a node, we "return" a replacement for that node in
// [replacement_]. In many cases this will just be the original node.
Statement* replacement_;
// To avoid storing to .result all the time, we eliminate some of
// the stores by keeping track of whether or not we're sure .result
// will be overwritten anyway. This is a bit more tricky than what I
// was hoping for.
bool is_set_;
bool breakable_;
class BreakableScope final {
public:
explicit BreakableScope(Processor* processor, bool breakable = true)
: processor_(processor), previous_(processor->breakable_) {
processor->breakable_ = processor->breakable_ || breakable;
}
~BreakableScope() { processor_->breakable_ = previous_; }
private:
Processor* processor_;
bool previous_;
};
Zone* zone_;
DeclarationScope* closure_scope_;
AstNodeFactory factory_;
// Node visitors.
#define DEF_VISIT(type) void Visit##type(type* node);
AST_NODE_LIST(DEF_VISIT)
#undef DEF_VISIT
void VisitIterationStatement(IterationStatement* stmt);
DEFINE_AST_VISITOR_SUBCLASS_MEMBERS();
};
Statement* Processor::AssignUndefinedBefore(Statement* s) {
Expression* result_proxy = factory()->NewVariableProxy(result_);
Expression* undef = factory()->NewUndefinedLiteral(kNoSourcePosition);
Expression* assignment = factory()->NewAssignment(Token::ASSIGN, result_proxy,
undef, kNoSourcePosition);
Block* b = factory()->NewBlock(NULL, 2, false, kNoSourcePosition);
b->statements()->Add(
factory()->NewExpressionStatement(assignment, kNoSourcePosition), zone());
b->statements()->Add(s, zone());
return b;
}
void Processor::Process(ZoneList<Statement*>* statements) {
// If we're in a breakable scope (named block, iteration, or switch), we walk
// all statements. The last value producing statement before the break needs
// to assign to .result. If we're not in a breakable scope, only the last
// value producing statement in the block assigns to .result, so we can stop
// early.
for (int i = statements->length() - 1; i >= 0 && (breakable_ || !is_set_);
--i) {
Visit(statements->at(i));
statements->Set(i, replacement_);
}
}
void Processor::VisitBlock(Block* node) {
// An initializer block is the rewritten form of a variable declaration
// with initialization expressions. The initializer block contains the
// list of assignments corresponding to the initialization expressions.
// While unclear from the spec (ECMA-262, 3rd., 12.2), the value of
// a variable declaration with initialization expression is 'undefined'
// with some JS VMs: For instance, using smjs, print(eval('var x = 7'))
// returns 'undefined'. To obtain the same behavior with v8, we need
// to prevent rewriting in that case.
if (!node->ignore_completion_value()) {
BreakableScope scope(this, node->labels() != nullptr);
Process(node->statements());
}
replacement_ = node;
}
void Processor::VisitExpressionStatement(ExpressionStatement* node) {
// Rewrite : <x>; -> .result = <x>;
if (!is_set_) {
node->set_expression(SetResult(node->expression()));
is_set_ = true;
}
replacement_ = node;
}
void Processor::VisitIfStatement(IfStatement* node) {
// Rewrite both branches.
bool set_after = is_set_;
Visit(node->then_statement());
node->set_then_statement(replacement_);
bool set_in_then = is_set_;
is_set_ = set_after;
Visit(node->else_statement());
node->set_else_statement(replacement_);
replacement_ = set_in_then && is_set_ ? node : AssignUndefinedBefore(node);
is_set_ = true;
}
void Processor::VisitIterationStatement(IterationStatement* node) {
// The statement may have to produce a value, so always assign undefined
// before.
// TODO(verwaest): Omit it if we know that there's no break/continue leaving
// it early.
DCHECK(breakable_ || !is_set_);
BreakableScope scope(this);
Visit(node->body());
node->set_body(replacement_);
replacement_ = AssignUndefinedBefore(node);
is_set_ = true;
}
void Processor::VisitDoWhileStatement(DoWhileStatement* node) {
VisitIterationStatement(node);
}
void Processor::VisitWhileStatement(WhileStatement* node) {
VisitIterationStatement(node);
}
void Processor::VisitForStatement(ForStatement* node) {
VisitIterationStatement(node);
}
void Processor::VisitForInStatement(ForInStatement* node) {
VisitIterationStatement(node);
}
void Processor::VisitForOfStatement(ForOfStatement* node) {
VisitIterationStatement(node);
}
void Processor::VisitTryCatchStatement(TryCatchStatement* node) {
// Rewrite both try and catch block.
bool set_after = is_set_;
Visit(node->try_block());
node->set_try_block(static_cast<Block*>(replacement_));
bool set_in_try = is_set_;
is_set_ = set_after;
Visit(node->catch_block());
node->set_catch_block(static_cast<Block*>(replacement_));
replacement_ = is_set_ && set_in_try ? node : AssignUndefinedBefore(node);
is_set_ = true;
}
void Processor::VisitTryFinallyStatement(TryFinallyStatement* node) {
// Only rewrite finally if it could contain 'break' or 'continue'. Always
// rewrite try.
if (breakable_) {
// Only set result before a 'break' or 'continue'.
is_set_ = true;
Visit(node->finally_block());
node->set_finally_block(replacement_->AsBlock());
// Save .result value at the beginning of the finally block and restore it
// at the end again: ".backup = .result; ...; .result = .backup"
// This is necessary because the finally block does not normally contribute
// to the completion value.
CHECK_NOT_NULL(closure_scope());
Variable* backup = closure_scope()->NewTemporary(
factory()->ast_value_factory()->dot_result_string());
Expression* backup_proxy = factory()->NewVariableProxy(backup);
Expression* result_proxy = factory()->NewVariableProxy(result_);
Expression* save = factory()->NewAssignment(
Token::ASSIGN, backup_proxy, result_proxy, kNoSourcePosition);
Expression* restore = factory()->NewAssignment(
Token::ASSIGN, result_proxy, backup_proxy, kNoSourcePosition);
node->finally_block()->statements()->InsertAt(
0, factory()->NewExpressionStatement(save, kNoSourcePosition), zone());
node->finally_block()->statements()->Add(
factory()->NewExpressionStatement(restore, kNoSourcePosition), zone());
}
Visit(node->try_block());
node->set_try_block(replacement_->AsBlock());
replacement_ = is_set_ ? node : AssignUndefinedBefore(node);
is_set_ = true;
}
void Processor::VisitSwitchStatement(SwitchStatement* node) {
// The statement may have to produce a value, so always assign undefined
// before.
// TODO(verwaest): Omit it if we know that there's no break/continue leaving
// it early.
DCHECK(breakable_ || !is_set_);
BreakableScope scope(this);
// Rewrite statements in all case clauses.
ZoneList<CaseClause*>* clauses = node->cases();
for (int i = clauses->length() - 1; i >= 0; --i) {
CaseClause* clause = clauses->at(i);
Process(clause->statements());
}
replacement_ = AssignUndefinedBefore(node);
is_set_ = true;
}
void Processor::VisitContinueStatement(ContinueStatement* node) {
is_set_ = false;
replacement_ = node;
}
void Processor::VisitBreakStatement(BreakStatement* node) {
is_set_ = false;
replacement_ = node;
}
void Processor::VisitWithStatement(WithStatement* node) {
Visit(node->statement());
node->set_statement(replacement_);
replacement_ = is_set_ ? node : AssignUndefinedBefore(node);
is_set_ = true;
}
void Processor::VisitSloppyBlockFunctionStatement(
SloppyBlockFunctionStatement* node) {
Visit(node->statement());
node->set_statement(replacement_);
replacement_ = node;
}
void Processor::VisitEmptyStatement(EmptyStatement* node) {
replacement_ = node;
}
void Processor::VisitReturnStatement(ReturnStatement* node) {
is_set_ = true;
replacement_ = node;
}
void Processor::VisitDebuggerStatement(DebuggerStatement* node) {
replacement_ = node;
}
// Expressions are never visited.
#define DEF_VISIT(type) \
void Processor::Visit##type(type* expr) { UNREACHABLE(); }
EXPRESSION_NODE_LIST(DEF_VISIT)
#undef DEF_VISIT
// Declarations are never visited.
#define DEF_VISIT(type) \
void Processor::Visit##type(type* expr) { UNREACHABLE(); }
DECLARATION_NODE_LIST(DEF_VISIT)
#undef DEF_VISIT
// Assumes code has been parsed. Mutates the AST, so the AST should not
// continue to be used in the case of failure.
bool Rewriter::Rewrite(ParseInfo* info) {
FunctionLiteral* function = info->literal();
DCHECK_NOT_NULL(function);
Scope* scope = function->scope();
DCHECK_NOT_NULL(scope);
if (!scope->is_script_scope() && !scope->is_eval_scope()) return true;
DeclarationScope* closure_scope = scope->GetClosureScope();
ZoneList<Statement*>* body = function->body();
if (!body->is_empty()) {
Variable* result = closure_scope->NewTemporary(
info->ast_value_factory()->dot_result_string());
// The name string must be internalized at this point.
info->ast_value_factory()->Internalize(info->isolate());
DCHECK(!result->name().is_null());
Processor processor(info->isolate(), closure_scope, result,
info->ast_value_factory());
processor.Process(body);
// Internalize any values created during rewriting.
info->ast_value_factory()->Internalize(info->isolate());
if (processor.HasStackOverflow()) return false;
if (processor.result_assigned()) {
int pos = kNoSourcePosition;
VariableProxy* result_proxy =
processor.factory()->NewVariableProxy(result, pos);
Statement* result_statement =
processor.factory()->NewReturnStatement(result_proxy, pos);
body->Add(result_statement, info->zone());
}
}
return true;
}
bool Rewriter::Rewrite(Parser* parser, DeclarationScope* closure_scope,
DoExpression* expr, AstValueFactory* factory) {
Block* block = expr->block();
DCHECK_EQ(closure_scope, closure_scope->GetClosureScope());
DCHECK(block->scope() == nullptr ||
block->scope()->GetClosureScope() == closure_scope);
ZoneList<Statement*>* body = block->statements();
VariableProxy* result = expr->result();
Variable* result_var = result->var();
if (!body->is_empty()) {
Processor processor(parser, closure_scope, result_var, factory);
processor.Process(body);
if (processor.HasStackOverflow()) return false;
if (!processor.result_assigned()) {
AstNodeFactory* node_factory = processor.factory();
Expression* undef = node_factory->NewUndefinedLiteral(kNoSourcePosition);
Statement* completion = node_factory->NewExpressionStatement(
processor.SetResult(undef), expr->position());
body->Add(completion, factory->zone());
}
}
return true;
}
} // namespace internal
} // namespace v8<|fim▁end|> | Processor(Isolate* isolate, DeclarationScope* closure_scope, Variable* result,
AstValueFactory* ast_value_factory)
: result_(result), |
<|file_name|>unique-fn-ret.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
fn f() -> ~int {
~100<|fim▁hole|> assert!(f() == ~100);
}<|fim▁end|> | }
pub fn main() { |
<|file_name|>base.py<|end_file_name|><|fim▁begin|>"""
Django settings for lwc project.
Generated by 'django-admin startproject' using Django 1.9.7.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '7fm_f66p8e!p%o=sr%d&cue(%+bh@@j_y6*b3d@t^c5%i8)1)2'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
#Share url
SHARER_URL = "http://127.0.0.1:8000/?ref="
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'joins',
]
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'lwc.middleware.ReferMiddleware',
]
ROOT_URLCONF = 'lwc.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'lwc.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}<|fim▁hole|>}
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'static', 'static_root')
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static', 'static_dirs'),
)
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'static', 'media')<|fim▁end|> | |
<|file_name|>BoundSheetRecord.cpp<|end_file_name|><|fim▁begin|>// Generated from /POI/java/org/apache/poi/hssf/record/BoundSheetRecord.java
#include <org/apache/poi/hssf/record/BoundSheetRecord.hpp>
#include <java/lang/ArrayStoreException.hpp>
#include <java/lang/NullPointerException.hpp>
#include <java/lang/String.hpp>
#include <java/lang/StringBuffer.hpp>
#include <java/util/Arrays.hpp>
#include <java/util/Comparator.hpp>
#include <java/util/List.hpp>
#include <org/apache/poi/hssf/record/BoundSheetRecord_1.hpp>
#include <org/apache/poi/hssf/record/Record.hpp>
#include <org/apache/poi/hssf/record/RecordBase.hpp>
#include <org/apache/poi/hssf/record/RecordInputStream.hpp>
#include <org/apache/poi/hssf/record/StandardRecord.hpp>
#include <org/apache/poi/ss/util/WorkbookUtil.hpp>
#include <org/apache/poi/util/BitField.hpp>
#include <org/apache/poi/util/BitFieldFactory.hpp>
#include <org/apache/poi/util/HexDump.hpp>
#include <org/apache/poi/util/LittleEndian.hpp>
#include <org/apache/poi/util/LittleEndianConsts.hpp>
#include <org/apache/poi/util/LittleEndianOutput.hpp>
#include <org/apache/poi/util/StringUtil.hpp>
#include <Array.hpp>
#include <ObjectArray.hpp>
#include <SubArray.hpp>
template<typename ComponentType, typename... Bases> struct SubArray;
namespace poi
{
namespace hssf
{
namespace record
{
typedef ::SubArray< ::poi::hssf::record::RecordBase, ::java::lang::ObjectArray > RecordBaseArray;
typedef ::SubArray< ::poi::hssf::record::Record, RecordBaseArray > RecordArray;
typedef ::SubArray< ::poi::hssf::record::StandardRecord, RecordArray > StandardRecordArray;
typedef ::SubArray< ::poi::hssf::record::BoundSheetRecord, StandardRecordArray > BoundSheetRecordArray;
} // record
} // hssf
} // poi
template<typename T>
static T* npc(T* t)
{
if(!t) throw new ::java::lang::NullPointerException();
return t;
}
poi::hssf::record::BoundSheetRecord::BoundSheetRecord(const ::default_init_tag&)
: super(*static_cast< ::default_init_tag* >(0))
{
clinit();
}
poi::hssf::record::BoundSheetRecord::BoundSheetRecord(::java::lang::String* sheetname)
: BoundSheetRecord(*static_cast< ::default_init_tag* >(0))
{
ctor(sheetname);
}
poi::hssf::record::BoundSheetRecord::BoundSheetRecord(RecordInputStream* in)
: BoundSheetRecord(*static_cast< ::default_init_tag* >(0))
{
ctor(in);
}
constexpr int16_t poi::hssf::record::BoundSheetRecord::sid;
poi::util::BitField*& poi::hssf::record::BoundSheetRecord::hiddenFlag()
{
clinit();
return hiddenFlag_;
}
poi::util::BitField* poi::hssf::record::BoundSheetRecord::hiddenFlag_;
poi::util::BitField*& poi::hssf::record::BoundSheetRecord::veryHiddenFlag()
{
clinit();
return veryHiddenFlag_;
}
poi::util::BitField* poi::hssf::record::BoundSheetRecord::veryHiddenFlag_;
void poi::hssf::record::BoundSheetRecord::ctor(::java::lang::String* sheetname)
{
super::ctor();
field_2_option_flags = 0;
setSheetname(sheetname);
}
void poi::hssf::record::BoundSheetRecord::ctor(RecordInputStream* in)
{
super::ctor();
auto buf = new ::int8_tArray(::poi::util::LittleEndianConsts::INT_SIZE);
npc(in)->readPlain(buf, int32_t(0), npc(buf)->length);
field_1_position_of_BOF = ::poi::util::LittleEndian::getInt(buf);
field_2_option_flags = npc(in)->readUShort();
auto field_3_sheetname_length = npc(in)->readUByte();
field_4_isMultibyteUnicode = npc(in)->readByte();
if(isMultibyte()) {
field_5_sheetname = npc(in)->readUnicodeLEString(field_3_sheetname_length);
} else {
field_5_sheetname = npc(in)->readCompressedUnicode(field_3_sheetname_length);
}
}
void poi::hssf::record::BoundSheetRecord::setPositionOfBof(int32_t pos)
{
field_1_position_of_BOF = pos;
}
void poi::hssf::record::BoundSheetRecord::setSheetname(::java::lang::String* sheetName)
{
::poi::ss::util::WorkbookUtil::validateSheetName(sheetName);
field_5_sheetname = sheetName;
field_4_isMultibyteUnicode = ::poi::util::StringUtil::hasMultibyte(sheetName) ? int32_t(1) : int32_t(0);
}
int32_t poi::hssf::record::BoundSheetRecord::getPositionOfBof()
{
return field_1_position_of_BOF;
}
bool poi::hssf::record::BoundSheetRecord::isMultibyte()
{
return (field_4_isMultibyteUnicode & int32_t(1)) != 0;
}
java::lang::String* poi::hssf::record::BoundSheetRecord::getSheetname()
{
return field_5_sheetname;
}
java::lang::String* poi::hssf::record::BoundSheetRecord::toString()
{
auto buffer = new ::java::lang::StringBuffer();
npc(buffer)->append(u"[BOUNDSHEET]\n"_j);
npc(npc(npc(buffer)->append(u" .bof = "_j))->append(::poi::util::HexDump::intToHex(getPositionOfBof())))->append(u"\n"_j);
npc(npc(npc(buffer)->append(u" .options = "_j))->append(::poi::util::HexDump::shortToHex(field_2_option_flags)))->append(u"\n"_j);
npc(npc(npc(buffer)->append(u" .unicodeflag= "_j))->append(::poi::util::HexDump::byteToHex(field_4_isMultibyteUnicode)))->append(u"\n"_j);
npc(npc(npc(buffer)->append(u" .sheetname = "_j))->append(field_5_sheetname))->append(u"\n"_j);
npc(buffer)->append(u"[/BOUNDSHEET]\n"_j);
return npc(buffer)->toString();
}
int32_t poi::hssf::record::BoundSheetRecord::getDataSize()
{
return int32_t(8) + npc(field_5_sheetname)->length() * (isMultibyte() ? int32_t(2) : int32_t(1));
}
<|fim▁hole|> auto name = field_5_sheetname;
npc(out)->writeByte(npc(name)->length());
npc(out)->writeByte(field_4_isMultibyteUnicode);
if(isMultibyte()) {
::poi::util::StringUtil::putUnicodeLE(name, out);
} else {
::poi::util::StringUtil::putCompressedUnicode(name, out);
}
}
int16_t poi::hssf::record::BoundSheetRecord::getSid()
{
return sid;
}
bool poi::hssf::record::BoundSheetRecord::isHidden()
{
return npc(hiddenFlag_)->isSet(field_2_option_flags);
}
void poi::hssf::record::BoundSheetRecord::setHidden(bool hidden)
{
field_2_option_flags = npc(hiddenFlag_)->setBoolean(field_2_option_flags, hidden);
}
bool poi::hssf::record::BoundSheetRecord::isVeryHidden()
{
return npc(veryHiddenFlag_)->isSet(field_2_option_flags);
}
void poi::hssf::record::BoundSheetRecord::setVeryHidden(bool veryHidden)
{
field_2_option_flags = npc(veryHiddenFlag_)->setBoolean(field_2_option_flags, veryHidden);
}
poi::hssf::record::BoundSheetRecordArray* poi::hssf::record::BoundSheetRecord::orderByBofPosition(::java::util::List* boundSheetRecords)
{
clinit();
auto bsrs = new BoundSheetRecordArray(npc(boundSheetRecords)->size());
npc(boundSheetRecords)->toArray_(static_cast< ::java::lang::ObjectArray* >(bsrs));
::java::util::Arrays::sort(bsrs, BOFComparator_);
return bsrs;
}
java::util::Comparator*& poi::hssf::record::BoundSheetRecord::BOFComparator()
{
clinit();
return BOFComparator_;
}
java::util::Comparator* poi::hssf::record::BoundSheetRecord::BOFComparator_;
extern java::lang::Class *class_(const char16_t *c, int n);
java::lang::Class* poi::hssf::record::BoundSheetRecord::class_()
{
static ::java::lang::Class* c = ::class_(u"org.apache.poi.hssf.record.BoundSheetRecord", 43);
return c;
}
void poi::hssf::record::BoundSheetRecord::clinit()
{
super::clinit();
static bool in_cl_init = false;
struct clinit_ {
clinit_() {
in_cl_init = true;
hiddenFlag_ = ::poi::util::BitFieldFactory::getInstance(1);
veryHiddenFlag_ = ::poi::util::BitFieldFactory::getInstance(2);
BOFComparator_ = new BoundSheetRecord_1();
}
};
if(!in_cl_init) {
static clinit_ clinit_instance;
}
}
int32_t poi::hssf::record::BoundSheetRecord::serialize(int32_t offset, ::int8_tArray* data)
{
return super::serialize(offset, data);
}
int8_tArray* poi::hssf::record::BoundSheetRecord::serialize()
{
return super::serialize();
}
java::lang::Class* poi::hssf::record::BoundSheetRecord::getClass0()
{
return class_();
}<|fim▁end|> | void poi::hssf::record::BoundSheetRecord::serialize(::poi::util::LittleEndianOutput* out)
{
npc(out)->writeInt(getPositionOfBof());
npc(out)->writeShort(field_2_option_flags); |
<|file_name|>videojs_languages.js<|end_file_name|><|fim▁begin|>'use strict';
var path = require('path');
var util = require('util');
module.exports = function(grunt) {
grunt.registerMultiTask('vjslanguages', 'A Grunt plugin for compiling VideoJS language assets.', function() {
var createLanguageFile = function(languageName, languageData, jsFilePath) {
var jsTemplate = 'videojs.addLanguage("' + languageName + '",' + JSON.stringify(languageData,null,' ') + ');';
grunt.file.write(jsFilePath, jsTemplate);
grunt.log.writeln('- [' + languageName +'] Language Built. File "' + jsFilePath + '" created.');
};
this.files.forEach(function(f) {<|fim▁hole|> if(util.isArray(f.src)){
for(var i =0; i < f.src.length; i++) {
languageName = path.basename(f.src[i], '.json');
languageData = grunt.file.readJSON(f.src[i]);
jsFilePath = path.join(f.dest, languageName + '.js');
createLanguageFile(languageName, languageData, jsFilePath);
}
}
// Singular File Case
else {
languageName = path.basename(f.src, '.json');
languageData = grunt.file.readJSON(f.src);
jsFilePath = path.join(f.dest, languageName + '.js');
createLanguageFile(languageName, languageData, jsFilePath);
}
});
});
};<|fim▁end|> | var languageName, languageData, jsFilePath;
// Multiple Files Case |
<|file_name|>CopyProgressDlg.cpp<|end_file_name|><|fim▁begin|>/*____________________________________________________________________________
ExifPro Image Viewer
Copyright (C) 2000-2015 Michael Kowalski
____________________________________________________________________________*/
// CopyProgressDlg.cpp : implementation file
//
#include "stdafx.h"
#include "resource.h"
#include "CopyProgressDlg.h"
// CopyProgressDlg dialog
CopyProgressDlg::CopyProgressDlg(CWnd* parent /*=NULL*/)
: CDialog(CopyProgressDlg::IDD, parent)
<|fim▁hole|>}
CopyProgressDlg::~CopyProgressDlg()
{
}
void CopyProgressDlg::DoDataExchange(CDataExchange* DX)
{
CDialog::DoDataExchange(DX);
DDX_Control(DX, IDC_ANIMATION, animation_);
}
BEGIN_MESSAGE_MAP(CopyProgressDlg, CDialog)
END_MESSAGE_MAP()
// CopyProgressDlg message handlers
BOOL CopyProgressDlg::OnInitDialog()
{
CDialog::OnInitDialog();
animation_.Open(IDR_COPY_ANIM);
return TRUE; // return TRUE unless you set the focus to a control
// EXCEPTION: OCX Property Pages should return FALSE
}<|fim▁end|> | {
|
<|file_name|>two_gram_model.py<|end_file_name|><|fim▁begin|># ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have purchased from
# Numenta, Inc. a separate commercial license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
"""Module containing the two gram OPF model implementation. """
import collections
import itertools
import logging
from nupic import encoders<|fim▁hole|>
class TwoGramModel(model.Model):
"""Two-gram benchmark model."""
def __init__(self, inferenceType=InferenceType.TemporalNextStep,
encoderParams=()):
""" Two-gram model constructor.
inferenceType: An opfutils.InferenceType value that specifies what type of
inference (i.e. TemporalNextStep, Classification, etc.)
encoders: Sequence of encoder params dictionaries.
"""
super(TwoGramModel, self).__init__(inferenceType)
self._logger = opfutils.initLogger(self)
self._reset = False
self._hashToValueDict = dict()
self._learningEnabled = True
self._encoder = encoders.MultiEncoder(encoderParams)
self._fieldNames = self._encoder.getScalarNames()
self._prevValues = [None] * len(self._fieldNames)
self._twoGramDicts = [dict() for _ in xrange(len(self._fieldNames))]
def run(self, inputRecord):
"""Run one iteration of this model.
Args:
inputRecord: A record object formatted according to
nupic.data.FileSource.getNext() result format.
Returns:
A ModelResult named tuple (see opfutils.py). The contents of
ModelResult.inferences depends on the specific inference type of this
model, which can be queried by getInferenceType().
TODO: Implement getInferenceType()?
"""
results = super(TwoGramModel, self).run(inputRecord)
# Set up the lists of values, defaults, and encoded values.
values = [inputRecord[k] for k in self._fieldNames]
defaults = ['' if type(v) == str else 0 for v in values]
inputFieldEncodings = self._encoder.encodeEachField(inputRecord)
inputBuckets = self._encoder.getBucketIndices(inputRecord)
results.sensorInput = opfutils.SensorInput(
dataRow=values, dataEncodings=inputFieldEncodings,
sequenceReset=int(self._reset))
# Keep track of the last value associated with each encoded value for that
# predictions can be returned in the original value format.
for value, bucket in itertools.izip(values, inputBuckets):
self._hashToValueDict[bucket] = value
# Update the two-gram dict if learning is enabled.
for bucket, prevValue, twoGramDict in itertools.izip(
inputBuckets, self._prevValues, self._twoGramDicts):
if self._learningEnabled and not self._reset:
if prevValue not in twoGramDict:
twoGramDict[prevValue] = collections.defaultdict(int)
twoGramDict[prevValue][bucket] += 1
# Populate the results.inferences dict with the predictions and encoded
# predictions.
predictions = []
encodedPredictions = []
for bucket, twoGramDict, default, fieldName in (
itertools.izip(inputBuckets, self._twoGramDicts, defaults,
self._fieldNames)):
if bucket in twoGramDict:
probabilities = twoGramDict[bucket].items()
prediction = self._hashToValueDict[
max(probabilities, key=lambda x: x[1])[0]]
predictions.append(prediction)
encodedPredictions.append(self._encoder.encodeField(fieldName,
prediction))
else:
predictions.append(default)
encodedPredictions.append(self._encoder.encodeField(fieldName,
default))
results.inferences = dict()
results.inferences[opfutils.InferenceElement.prediction] = predictions
results.inferences[opfutils.InferenceElement.encodings] = encodedPredictions
self._prevValues = inputBuckets
self._reset = False
return results
def finishLearning(self):
"""Places the model in a permanent "finished learning" mode.
Once called, the model will not be able to learn from subsequent input
records. Learning may not be resumed on a given instance of the model once
this is called as the implementation may optimize itself by pruning data
structures that are necessary for learning.
"""
self._learningEnabled = False
def setFieldStatistics(self,fieldStats):
"""
This method is used for the data source to communicate to the
model any statistics that it knows about the fields
Since the two-gram has no use for this information, this is a no-op
"""
pass
def getFieldInfo(self):
"""Returns the metadata specifying the format of the model's output.
The result may be different than the list of
nupic.data.fieldmeta.FieldMetaInfo objects supplied at initialization due
to the transcoding of some input fields into meta- fields, such as
datetime -> dayOfWeek, timeOfDay, etc.
"""
fieldTypes = self._encoder.getDecoderOutputFieldTypes()
assert len(self._fieldNames) == len(fieldTypes)
return tuple(fieldmeta.FieldMetaInfo(*args) for args in
itertools.izip(
self._fieldNames, fieldTypes,
itertools.repeat(fieldmeta.FieldMetaSpecial.none)))
def getRuntimeStats(self):
"""Get the runtime statistics specific to the model.
I.E. activeCellOverlapAvg
Returns:
A dict mapping statistic names to values.
"""
# TODO: Add debugging stats.
return dict()
def _getLogger(self):
"""Get the logger created by this subclass.
Returns:
A logging.Logger object. Should not be None.
"""
return self._logger
def resetSequenceStates(self):
"""Called to indicate the start of a new sequence.
The next call to run should not perform learning.
"""
self._reset = True
def __getstate__(self):
del self._logger
return self.__dict__
def __setstate__(self):
self._logger = opfutils.initLogger(self)<|fim▁end|> | from nupic.data import fieldmeta
from nupic.frameworks.opf import model
from nupic.frameworks.opf import opfutils
from opfutils import InferenceType |
<|file_name|>core.rs<|end_file_name|><|fim▁begin|>use std::cmp::min;
use crate::direction::Orientation;
use crate::event::{AnyCb, Event, EventResult, Key, MouseButton, MouseEvent};
use crate::printer::Printer;
use crate::rect::Rect;
use crate::theme::ColorStyle;
use crate::view::{ScrollStrategy, Selector, SizeCache};
use crate::with::With;
use crate::Vec2;
use crate::XY;
/// Describes an item with a scroll core.
///
/// This trait is used to represent "something that can scroll".
/// All it needs is an accessible core.
///
/// See the various methods in the [`scroll`](crate::view::scroll) module.
pub trait Scroller {
/// Returns a mutable access to the scroll core.
fn get_scroller_mut(&mut self) -> &mut Core;
/// Returns an immutable access to the scroll core.
fn get_scroller(&self) -> &Core;
}
/// Implements the `Scroller` trait for any type.
#[macro_export]
macro_rules! impl_scroller {
($class:ident :: $core:ident) => {
impl $crate::view::scroll::Scroller for $class {
fn get_scroller_mut(
&mut self,
) -> &mut $crate::view::scroll::Core {
&mut self.$core
}
fn get_scroller(&self) -> &$crate::view::scroll::Core {
&self.$core
}
}
};
($class:ident < $($args:tt),* > :: $core:ident) => {
impl <$( $args ),* > $crate::view::scroll::Scroller for $class<$($args),*> {
fn get_scroller_mut(
&mut self,
) -> &mut $crate::view::scroll::Core {
&mut self.$core
}
fn get_scroller(&self) -> &$crate::view::scroll::Core {
&self.$core
}
}
};
}
/// Core system for scrolling views.
///
/// This is the lowest-level element handling scroll logic.
///
/// Higher-level abstractions are probably what you're after.
///
/// In particular, see also [`ScrollView`](crate::views::ScrollView).
#[derive(Debug)]
pub struct Core {
/// This is the size the child thinks we're giving him.
inner_size: Vec2,
/// Offset into the inner view.
///
/// Our `(0,0)` will be inner's `offset`
offset: Vec2,
/// What was the size available to print the child last time?
///
/// Excludes any potential scrollbar.
last_available: Vec2,
/// Are we scrollable in each direction?
enabled: XY<bool>,
/// Should we show scrollbars?
///
/// Even if this is true, no scrollbar will be printed if we don't need to
/// scroll.
///
/// TODO: have an option to always show the scrollbar.
/// TODO: have an option to show scrollbar on top/left.
show_scrollbars: bool,
/// How much padding should be between content and scrollbar?
///
/// scrollbar_padding.x is the horizontal padding before the vertical scrollbar.
scrollbar_padding: Vec2,
/// Initial position of the cursor when dragging.
thumb_grab: Option<(Orientation, usize)>,
/// We keep the cache here so it can be busted when we change the content.
size_cache: Option<XY<SizeCache<bool>>>,
/// Defines how to update the offset when the view size changes.
scroll_strategy: ScrollStrategy,
}
impl Default for Core {
fn default() -> Self {
Self::new()
}
}
impl Core {
/// Creates a new `Core`.
pub fn new() -> Self {
Core {
inner_size: Vec2::zero(),
offset: Vec2::zero(),
last_available: Vec2::zero(),
enabled: XY::new(false, true),
show_scrollbars: true,
scrollbar_padding: Vec2::new(1, 0),
thumb_grab: None,
size_cache: None,
scroll_strategy: ScrollStrategy::KeepRow,
}
}
/// Returns a sub-printer ready to draw the content.
pub fn sub_printer<'a, 'b>(
&self,
printer: &Printer<'a, 'b>,
) -> Printer<'a, 'b> {
// Draw scrollbar?
let size = self.available_size();
// Draw the scrollbars
if self.get_show_scrollbars() {
let scrolling = self.is_scrolling();
let lengths = self.scrollbar_thumb_lengths();
let offsets = self.scrollbar_thumb_offsets(lengths);
let line_c = XY::new("-", "|");
let color = if printer.focused {
ColorStyle::highlight()
} else {
ColorStyle::highlight_inactive()
};
XY::zip5(lengths, offsets, size, line_c, Orientation::pair())
.run_if(
scrolling,
|(length, offset, size, c, orientation)| {
let start = printer
.size
.saturating_sub((1, 1))
.with_axis(orientation, 0);
let offset = orientation.make_vec(offset, 0);
printer.print_line(orientation, start, size, c);
let thumb_c = if self
.thumb_grab
.map(|(o, _)| o == orientation)
.unwrap_or(false)
{
" "
} else {
"▒"
};
printer.with_color(color, |printer| {
printer.print_line(
orientation,
start + offset,
length,
thumb_c,
);
});
},
);
// Draw the X between the two scrollbars.
if scrolling.both() {
printer.print(printer.size.saturating_sub((1, 1)), "╳");
}
}
// Draw content
printer
.cropped(size)
.content_offset(self.offset)
.inner_size(self.inner_size)
}
/// Returns `true` if `event` should be processed by the content.
///
/// This also updates `event` so that it is relative to the content.
pub fn is_event_inside(&self, event: &mut Event) -> bool {
if let Event::Mouse {
ref mut position,
ref offset,
..
} = event
{
// For mouse events, check if it falls inside the available area
let inside = position
.checked_sub(offset)
.map(|p| p.fits_in(self.available_size()))
.unwrap_or(false);
*position = *position + self.offset;
inside
} else {
// For key events, assume it's inside by default.
true
}
}
/// Handle an event after processing by the content.
pub fn on_inner_event(
&mut self,
event: Event,
inner_result: EventResult,
important_area: Rect,
) -> EventResult {
match inner_result {
EventResult::Ignored => {
// The view ignored the event, so we're free to use it.
// If it's an arrow, try to scroll in the given direction.
// If it's a mouse scroll, try to scroll as well.
// Also allow Ctrl+arrow to move the view,
// without affecting the selection.
match event {
Event::Mouse {
event: MouseEvent::WheelUp,
..
} if self.enabled.y && self.offset.y > 0 => {
self.offset.y = self.offset.y.saturating_sub(3);
}
Event::Mouse {
event: MouseEvent::WheelDown,
..
} if self.enabled.y
&& (self.offset.y + self.available_size().y
< self.inner_size.y) =>
{
self.offset.y = min(
self.inner_size
.y
.saturating_sub(self.available_size().y),
self.offset.y + 3,
);
}
Event::Mouse {
event: MouseEvent::Press(MouseButton::Left),
position,
offset,
} if self.show_scrollbars
&& position
.checked_sub(offset)
.map(|position| self.start_drag(position))
.unwrap_or(false) =>
{
// Just consume the event.
}
Event::Mouse {
event: MouseEvent::Hold(MouseButton::Left),
position,
offset,
} if self.show_scrollbars => {
let position = position.saturating_sub(offset);
self.drag(position);
}
Event::Mouse {
event: MouseEvent::Release(MouseButton::Left),
..
} => {
self.release_grab();
}
Event::Key(Key::Home) if self.enabled.any() => {
self.offset =
self.enabled.select_or(Vec2::zero(), self.offset);
}
Event::Key(Key::End) if self.enabled.any() => {
let max_offset = self
.inner_size
.saturating_sub(self.available_size());
self.offset =
self.enabled.select_or(max_offset, self.offset);
}
Event::Ctrl(Key::Up) | Event::Key(Key::Up)
if self.enabled.y && self.offset.y > 0 =>
{
self.offset.y -= 1;
}
Event::Key(Key::PageUp)
if self.enabled.y && self.offset.y > 0 =>
{
self.offset.y = self.offset.y.saturating_sub(5);
}
Event::Key(Key::PageDown)
if self.enabled.y
&& (self.offset.y + self.available_size().y
< self.inner_size.y) =>
{
self.offset.y += 5;
}
Event::Ctrl(Key::Down) | Event::Key(Key::Down)
if self.enabled.y
&& (self.offset.y + self.available_size().y
< self.inner_size.y) =>
{
self.offset.y += 1;
}
Event::Ctrl(Key::Left) | Event::Key(Key::Left)
if self.enabled.x && self.offset.x > 0 =>
{
self.offset.x -= 1;
}
Event::Ctrl(Key::Right) | Event::Key(Key::Right)
if self.enabled.x
&& (self.offset.x + self.available_size().x
< self.inner_size.x) =>
{
self.offset.x += 1;
}
_ => return EventResult::Ignored,
};
// We just scrolled manually, so reset the scroll strategy.
self.scroll_strategy = ScrollStrategy::KeepRow;
// TODO: return callback on_scroll?
EventResult::Consumed(None)
}
other => {
// The view consumed the event. Maybe something changed?
self.scroll_to_rect(important_area);
other
}
}
}
/// Specifies the size given in a layout phase.
pub(crate) fn set_last_size(
&mut self,
last_size: Vec2,
scrolling: XY<bool>,
) {
self.last_available = last_size.saturating_sub(
scrolling
.swap()
.select_or(self.scrollbar_padding + (1, 1), Vec2::zero()),
);
}
/// Specifies the size allocated to the content.
pub(crate) fn set_inner_size(&mut self, inner_size: Vec2) {
self.inner_size = inner_size;
}
/// Rebuild the cache with the given parameters.
pub(crate) fn build_cache(
&mut self,
self_size: Vec2,
last_size: Vec2,
scrolling: XY<bool>,
) {
self.size_cache =
Some(SizeCache::build_extra(self_size, last_size, scrolling));
}
/// Makes sure the viewport is within the content.
pub(crate) fn update_offset(&mut self) {
// Keep the offset in the valid range.
self.offset = self
.offset
.or_min(self.inner_size.saturating_sub(self.available_size()));
// Possibly update the offset if we're following a specific strategy.
self.adjust_scroll();
}
/// Returns `true` if we should relayout, no matter the content.
///
/// Even if this returns `false`, the content itself might still needs to relayout.
pub fn needs_relayout(&self) -> bool {
self.size_cache.is_none()
}
/// Performs `View::call_on_any()`
pub fn call_on_any<'a, F>(
&mut self,
selector: &Selector<'_>,
cb: AnyCb<'a>,
inner_call_on_any: F,
) where
F: FnOnce(&Selector, AnyCb),
{
inner_call_on_any(selector, cb)
}
/// Performs `View::focus_view()`
pub fn focus_view<F>(
&mut self,
selector: &Selector<'_>,
inner_focus_view: F,
) -> Result<(), ()>
where
F: FnOnce(&Selector) -> Result<(), ()>,
{
inner_focus_view(selector)
}
/// Returns the viewport in the inner content.
pub fn content_viewport(&self) -> Rect {
Rect::from_size(self.offset, self.available_size())
}
/// Defines the way scrolling is adjusted on content or size change.
///
/// The scroll strategy defines how the scrolling position is adjusted
/// when the size of the view or the content change.
///
/// It is reset to `ScrollStrategy::KeepRow` whenever the user scrolls
/// manually.
pub fn set_scroll_strategy(&mut self, strategy: ScrollStrategy) {
self.scroll_strategy = strategy;
self.adjust_scroll();
}
/// Defines the way scrolling is adjusted on content or size change.
///
/// Chainable variant.
pub fn scroll_strategy(self, strategy: ScrollStrategy) -> Self {
self.with(|s| s.set_scroll_strategy(strategy))
}
/// Sets the padding between content and scrollbar.
pub fn set_scrollbar_padding<V: Into<Vec2>>(
&mut self,
scrollbar_padding: V,
) {
self.scrollbar_padding = scrollbar_padding.into();
}
/// Sets the padding between content and scrollbar.
///
/// Chainable variant.
pub fn scrollbar_padding<V: Into<Vec2>>(
self,<|fim▁hole|> }
/// Returns the padding between content and scrollbar.
pub fn get_scrollbar_padding(&self) -> Vec2 {
self.scrollbar_padding
}
/// For each axis, returns `true` if this view can scroll.
///
/// For example, a vertically-scrolling view will return
/// `XY { x: false, y: true }`.
pub fn is_enabled(&self) -> XY<bool> {
self.enabled
}
/// Control whether scroll bars are visibile.
///
/// Defaults to `true`.
pub fn set_show_scrollbars(&mut self, show_scrollbars: bool) {
self.show_scrollbars = show_scrollbars;
}
/// Control whether scroll bars are visibile.
///
/// Chainable variant
pub fn show_scrollbars(self, show_scrollbars: bool) -> Self {
self.with(|s| s.set_show_scrollbars(show_scrollbars))
}
/// Returns `true` if we will show scrollbars when needed.
///
/// Scrollbars are always hidden when not needed.
pub fn get_show_scrollbars(&self) -> bool {
self.show_scrollbars
}
/// Returns the size given to the content on the last layout phase.
pub fn inner_size(&self) -> Vec2 {
self.inner_size
}
/// Sets the scroll offset to the given value
pub fn set_offset<S>(&mut self, offset: S)
where
S: Into<Vec2>,
{
let max_offset = self.inner_size.saturating_sub(self.available_size());
self.offset = offset.into().or_min(max_offset);
}
/// Controls whether this view can scroll vertically.
///
/// Defaults to `true`.
pub fn set_scroll_y(&mut self, enabled: bool) {
self.enabled.y = enabled;
self.invalidate_cache();
}
/// Controls whether this view can scroll horizontally.
///
/// Defaults to `false`.
pub fn set_scroll_x(&mut self, enabled: bool) {
self.enabled.x = enabled;
self.invalidate_cache();
}
/// Controls whether this view can scroll vertically.
///
/// Defaults to `true`.
///
/// Chainable variant.
pub fn scroll_y(self, enabled: bool) -> Self {
self.with(|s| s.set_scroll_y(enabled))
}
/// Controls whether this view can scroll horizontally.
///
/// Defaults to `false`.
///
/// Chainable variant.
pub fn scroll_x(self, enabled: bool) -> Self {
self.with(|s| s.set_scroll_x(enabled))
}
/// Try to keep the given `rect` in view.
pub fn keep_in_view(&mut self, rect: Rect) {
let min = rect.bottom_right().saturating_sub(self.available_size());
let max = rect.top_left();
let (min, max) = (Vec2::min(min, max), Vec2::max(min, max));
self.offset = self.offset.or_min(max).or_max(min);
}
/// Scrolls until the given rect is in view.
pub fn scroll_to_rect(&mut self, important_area: Rect) {
// The furthest top-left we can go
let top_left = (important_area.bottom_right() + (1, 1))
.saturating_sub(self.available_size());
// The furthest bottom-right we can go
let bottom_right = important_area.top_left();
// "top_left < bottom_right" is NOT guaranteed
// if the child is larger than the view.
let offset_min = Vec2::min(top_left, bottom_right);
let offset_max = Vec2::max(top_left, bottom_right);
self.offset = self.offset.or_max(offset_min).or_min(offset_max);
}
/// Scroll until the given point is visible.
pub fn scroll_to(&mut self, pos: Vec2) {
// The furthest top-left we can go
let min = pos.saturating_sub(self.available_size());
// How far to the bottom-right we can go
let max = pos;
self.offset = self.offset.or_min(max).or_max(min);
}
/// Scroll until the given column is visible.
pub fn scroll_to_x(&mut self, x: usize) {
if x >= self.offset.x + self.available_size().x {
self.offset.x = 1 + x - self.available_size().x;
} else if x < self.offset.x {
self.offset.x = x;
}
}
/// Scroll until the given row is visible.
pub fn scroll_to_y(&mut self, y: usize) {
if y >= self.offset.y + self.available_size().y {
self.offset.y = 1 + y - self.available_size().y;
} else if y < self.offset.y {
self.offset.y = y;
}
}
/// Programmatically scroll to the top of the view.
pub fn scroll_to_top(&mut self) {
let curr_x = self.offset.x;
self.set_offset((curr_x, 0));
}
/// Programmatically scroll to the bottom of the view.
pub fn scroll_to_bottom(&mut self) {
let max_y = self.inner_size.saturating_sub(self.available_size()).y;
let curr_x = self.offset.x;
self.set_offset((curr_x, max_y));
}
/// Programmatically scroll to the leftmost side of the view.
pub fn scroll_to_left(&mut self) {
let curr_y = self.offset.y;
self.set_offset((0, curr_y));
}
/// Programmatically scroll to the rightmost side of the view.
pub fn scroll_to_right(&mut self) {
let max_x = self.inner_size.saturating_sub(self.available_size()).x;
let curr_y = self.offset.y;
self.set_offset((max_x, curr_y));
}
/// Clears the cache.
fn invalidate_cache(&mut self) {
self.size_cache = None;
}
/// Returns for each axis if we are scrolling.
pub fn is_scrolling(&self) -> XY<bool> {
self.inner_size.zip_map(self.available_size(), |i, s| i > s)
}
/// Stops grabbing the scrollbar.
fn release_grab(&mut self) {
self.thumb_grab = None;
}
/// Returns the size taken by the scrollbars.
///
/// Will be zero in axis where we're not scrolling.
///
/// The scrollbar_size().x will be the horizontal space taken by the vertical scrollbar.
pub fn scrollbar_size(&self) -> Vec2 {
self.is_scrolling()
.swap()
.select_or(self.scrollbar_padding + (1, 1), Vec2::zero())
}
/// Returns the size available for the child view.
fn available_size(&self) -> Vec2 {
self.last_available
}
/// Returns the last size given by `layout`.
pub fn last_outer_size(&self) -> Vec2 {
self.available_size() + self.scrollbar_size()
}
/// Starts scrolling from the cursor position.
///
/// Returns `true` if the event was consumed.
fn start_drag(&mut self, position: Vec2) -> bool {
// For each scrollbar, how far it is.
let scrollbar_pos = self.last_outer_size().saturating_sub((1, 1));
let lengths = self.scrollbar_thumb_lengths();
let offsets = self.scrollbar_thumb_offsets(lengths);
let available = self.available_size();
// This is true for Y if we grabbed the vertical scrollbar
// More specifically, we need both (for instance for the vertical bar):
// * To be in the right column: X == scrollbar_pos
// * To be in the right range: Y < available
let grabbed = position
.zip_map(scrollbar_pos, |p, s| p == s)
.swap()
.and(position.zip_map(available, |p, a| p < a));
// Iterate on axises, and keep the one we grabbed.
if let Some((orientation, pos, length, offset)) =
XY::zip4(Orientation::pair(), position, lengths, offsets)
.keep(grabbed.and(self.enabled))
.into_iter()
.filter_map(|x| x)
.next()
{
if pos >= offset && pos < offset + length {
// We grabbed the thumb! Now scroll from that position.
self.thumb_grab = Some((orientation, pos - offset));
} else {
// We hit the scrollbar, outside of the thumb.
// Let's move the middle there.
self.thumb_grab = Some((orientation, (length - 1) / 2));
self.drag(position);
}
return true;
}
false
}
/// Called when a mouse drag is detected.
fn drag(&mut self, position: Vec2) {
// Only do something if we grabbed something before.
if let Some((orientation, grab)) = self.thumb_grab {
self.scroll_to_thumb(
orientation,
position.get(orientation).saturating_sub(grab),
);
}
}
fn scroll_to_thumb(&mut self, orientation: Orientation, thumb_pos: usize) {
let lengths = self.scrollbar_thumb_lengths();
let available = self.available_size();
// We want self.scrollbar_thumb_offsets() to be thumb_pos
// steps * self.o / (self.inner + 1 - available) = thumb_pos
// self.o = thumb_pos * (self.inner + 1 - available) / (available + 1 - lengths)
// The new offset is:
// thumb_pos * (content + 1 - available) / (available + 1 - thumb size)
let extra =
(available + (1, 1)).saturating_sub(lengths).or_max((1, 1));
// We're dividing by this value, so make sure it's positive!
assert!(extra > Vec2::zero());
let new_offset =
((self.inner_size + (1, 1)).saturating_sub(available) * thumb_pos)
.div_up(extra);
let max_offset = self.inner_size.saturating_sub(self.available_size());
self.offset
.set_axis_from(orientation, &new_offset.or_min(max_offset));
}
/// Tries to apply the cache to the current constraint.
///
/// Returns the cached value if it works, or `None`.
pub(crate) fn try_cache(
&self,
constraint: Vec2,
) -> Option<(Vec2, Vec2, XY<bool>)> {
self.size_cache.and_then(|cache| {
if cache.zip_map(constraint, SizeCache::accept).both() {
Some((
self.inner_size,
cache.map(|c| c.value),
cache.map(|c| c.extra),
))
} else {
None
}
})
}
fn scrollbar_thumb_lengths(&self) -> Vec2 {
let available = self.available_size();
// The length should be (visible / total) * visible
(available * available / self.inner_size.or_max((1, 1))).or_max((1, 1))
}
fn scrollbar_thumb_offsets(&self, lengths: Vec2) -> Vec2 {
let available = self.available_size();
// The number of steps is 1 + the "extra space"
let steps = (available + (1, 1)).saturating_sub(lengths);
let max_offset = self.inner_size.saturating_sub(available) + (1, 1);
steps * self.offset / max_offset
}
/// Apply the scrolling strategy to the current scroll position.
fn adjust_scroll(&mut self) {
match self.scroll_strategy {
ScrollStrategy::StickToTop => self.scroll_to_top(),
ScrollStrategy::StickToBottom => self.scroll_to_bottom(),
ScrollStrategy::KeepRow => (),
}
}
}<|fim▁end|> | scrollbar_padding: V,
) -> Self {
self.with(|s| s.set_scrollbar_padding(scrollbar_padding)) |
<|file_name|>app.module.ts<|end_file_name|><|fim▁begin|>import {NgModule, ErrorHandler} from '@angular/core';
import {IonicApp, IonicModule, IonicErrorHandler} from 'ionic-angular';
import {MyApp} from './app.component';
import {Home} from '../pages/Home/Home';
import {GamingPage} from '../pages/gaming/gaming';
import {Page2} from '../pages/page2/page2';
import {BasicModal} from '../components/basic-modal';
@NgModule({
declarations: [<|fim▁hole|> Home,
GamingPage,
Page2,
BasicModal
],
imports: [
IonicModule.forRoot(MyApp, {mode: 'md'})
],
bootstrap: [IonicApp],
entryComponents: [
MyApp,
Home,
GamingPage,
Page2,
BasicModal
],
providers: [{provide: ErrorHandler, useClass: IonicErrorHandler}]
})
export class AppModule {
}<|fim▁end|> | MyApp, |
<|file_name|>txn.go<|end_file_name|><|fim▁begin|>// @author Couchbase <[email protected]>
// @copyright 2014 Couchbase, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package common
import (
"fmt"
"sync"
)
type Txnid uint64
type TxnState struct {
epoch uint64
counter uint64
mutex sync.Mutex
curTxnid Txnid
}
func NewTxnState() *TxnState {
state := new(TxnState)
state.epoch = 0
state.counter = 0
state.curTxnid = 0
return state
}
func (t *TxnState) GetNextTxnId() Txnid {
t.mutex.Lock()
defer t.mutex.Unlock()
// Increment the epoch. If the counter overflows, panic.
if t.counter == uint64(MAX_COUNTER) {<|fim▁hole|> panic(fmt.Sprintf("Counter overflows for epoch %d", t.epoch))
}
t.counter++
epoch := uint64(t.epoch << 32)
newTxnid := Txnid(epoch + t.counter)
// t.curTxnid is initialized using the LastLoggedTxid in the local repository. So if this node becomes master,
// we want to make sure that the new txid is larger than the one that we saw before.
if t.curTxnid >= newTxnid {
// Assertion. This is to ensure integrity of the system. Wrong txnid can result in corruption.
panic(fmt.Sprintf("GetNextTxnId(): Assertion: New Txnid %d is smaller than or equal to old txnid %d", newTxnid, t.curTxnid))
}
t.curTxnid = newTxnid
return t.curTxnid
}
// Return true if txid2 is logically next in sequence from txid1.
// If txid2 and txid1 have different epoch, then only check if
// txid2 has a larger epoch. Otherwise, compare the counter such
// that txid2 is txid1 + 1
func IsNextInSequence(new, old Txnid) bool {
if new.GetEpoch() > old.GetEpoch() {
return true
}
if new.GetEpoch() == old.GetEpoch() &&
uint32(old.GetCounter()) != MAX_COUNTER &&
new == old+1 {
return true
}
return false
}
func (t *TxnState) SetEpoch(newEpoch uint32) {
t.mutex.Lock()
defer t.mutex.Unlock()
if t.epoch >= uint64(newEpoch) {
// Assertion. This is to ensure integrity of the system. We do not support epoch rollover yet.
panic(fmt.Sprintf("SetEpoch(): Assertion: New Epoch %d is smaller than or equal to old epoch %d", newEpoch, t.epoch))
}
t.epoch = uint64(newEpoch)
t.counter = 0
}
func (t *TxnState) InitCurrentTxnid(txnid Txnid) {
t.mutex.Lock()
defer t.mutex.Unlock()
if txnid > t.curTxnid {
t.curTxnid = txnid
}
}
func (id Txnid) GetEpoch() uint64 {
v := uint64(id)
return (v >> 32)
}
func (id Txnid) GetCounter() uint64 {
v := uint32(id)
return uint64(v)
}
//
// Compare function to compare epoch1 with epoch2
//
// return common.EQUAL if epoch1 is the same as epoch2
// return common.MORE_RECENT if epoch1 is more recent
// return common.LESS_RECENT if epoch1 is less recent
//
// This is just to prepare in the future if we support
// rolling over the epoch (but it will also require
// changes to comparing txnid as well).
//
func CompareEpoch(epoch1, epoch2 uint32) CompareResult {
if epoch1 == epoch2 {
return EQUAL
}
if epoch1 > epoch2 {
return MORE_RECENT
}
return LESS_RECENT
}
//
// Compare epoch1 and epoch2. If epoch1 is equal or more recent, return
// the next more recent epoch value. If epoch1 is less recent than
// epoch2, return epoch2 as it is.
//
func CompareAndIncrementEpoch(epoch1, epoch2 uint32) uint32 {
result := CompareEpoch(epoch1, epoch2)
if result == MORE_RECENT || result == EQUAL {
if epoch1 != MAX_EPOCH {
return epoch1 + 1
}
// TODO : Epoch has reached the max value. If we have a leader
// election every second, it will take 135 years to overflow the epoch (32 bits).
// Regardless, we should gracefully roll over the epoch eventually in our
// implementation.
panic("epoch limit is reached")
}
return epoch2
}<|fim▁end|> |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.